1// Code generated from _gen/generic.rules using 'go generate'; DO NOT EDIT.
2
3package ssa
4
5import "math"
6import "cmd/internal/obj"
7import "cmd/compile/internal/types"
8import "cmd/compile/internal/ir"
9
10func rewriteValuegeneric(v *Value) bool {
11	switch v.Op {
12	case OpAdd16:
13		return rewriteValuegeneric_OpAdd16(v)
14	case OpAdd32:
15		return rewriteValuegeneric_OpAdd32(v)
16	case OpAdd32F:
17		return rewriteValuegeneric_OpAdd32F(v)
18	case OpAdd64:
19		return rewriteValuegeneric_OpAdd64(v)
20	case OpAdd64F:
21		return rewriteValuegeneric_OpAdd64F(v)
22	case OpAdd8:
23		return rewriteValuegeneric_OpAdd8(v)
24	case OpAddPtr:
25		return rewriteValuegeneric_OpAddPtr(v)
26	case OpAnd16:
27		return rewriteValuegeneric_OpAnd16(v)
28	case OpAnd32:
29		return rewriteValuegeneric_OpAnd32(v)
30	case OpAnd64:
31		return rewriteValuegeneric_OpAnd64(v)
32	case OpAnd8:
33		return rewriteValuegeneric_OpAnd8(v)
34	case OpAndB:
35		return rewriteValuegeneric_OpAndB(v)
36	case OpArraySelect:
37		return rewriteValuegeneric_OpArraySelect(v)
38	case OpCeil:
39		return rewriteValuegeneric_OpCeil(v)
40	case OpCom16:
41		return rewriteValuegeneric_OpCom16(v)
42	case OpCom32:
43		return rewriteValuegeneric_OpCom32(v)
44	case OpCom64:
45		return rewriteValuegeneric_OpCom64(v)
46	case OpCom8:
47		return rewriteValuegeneric_OpCom8(v)
48	case OpConstInterface:
49		return rewriteValuegeneric_OpConstInterface(v)
50	case OpConstSlice:
51		return rewriteValuegeneric_OpConstSlice(v)
52	case OpConstString:
53		return rewriteValuegeneric_OpConstString(v)
54	case OpConvert:
55		return rewriteValuegeneric_OpConvert(v)
56	case OpCtz16:
57		return rewriteValuegeneric_OpCtz16(v)
58	case OpCtz32:
59		return rewriteValuegeneric_OpCtz32(v)
60	case OpCtz64:
61		return rewriteValuegeneric_OpCtz64(v)
62	case OpCtz8:
63		return rewriteValuegeneric_OpCtz8(v)
64	case OpCvt32Fto32:
65		return rewriteValuegeneric_OpCvt32Fto32(v)
66	case OpCvt32Fto64:
67		return rewriteValuegeneric_OpCvt32Fto64(v)
68	case OpCvt32Fto64F:
69		return rewriteValuegeneric_OpCvt32Fto64F(v)
70	case OpCvt32to32F:
71		return rewriteValuegeneric_OpCvt32to32F(v)
72	case OpCvt32to64F:
73		return rewriteValuegeneric_OpCvt32to64F(v)
74	case OpCvt64Fto32:
75		return rewriteValuegeneric_OpCvt64Fto32(v)
76	case OpCvt64Fto32F:
77		return rewriteValuegeneric_OpCvt64Fto32F(v)
78	case OpCvt64Fto64:
79		return rewriteValuegeneric_OpCvt64Fto64(v)
80	case OpCvt64to32F:
81		return rewriteValuegeneric_OpCvt64to32F(v)
82	case OpCvt64to64F:
83		return rewriteValuegeneric_OpCvt64to64F(v)
84	case OpCvtBoolToUint8:
85		return rewriteValuegeneric_OpCvtBoolToUint8(v)
86	case OpDiv16:
87		return rewriteValuegeneric_OpDiv16(v)
88	case OpDiv16u:
89		return rewriteValuegeneric_OpDiv16u(v)
90	case OpDiv32:
91		return rewriteValuegeneric_OpDiv32(v)
92	case OpDiv32F:
93		return rewriteValuegeneric_OpDiv32F(v)
94	case OpDiv32u:
95		return rewriteValuegeneric_OpDiv32u(v)
96	case OpDiv64:
97		return rewriteValuegeneric_OpDiv64(v)
98	case OpDiv64F:
99		return rewriteValuegeneric_OpDiv64F(v)
100	case OpDiv64u:
101		return rewriteValuegeneric_OpDiv64u(v)
102	case OpDiv8:
103		return rewriteValuegeneric_OpDiv8(v)
104	case OpDiv8u:
105		return rewriteValuegeneric_OpDiv8u(v)
106	case OpEq16:
107		return rewriteValuegeneric_OpEq16(v)
108	case OpEq32:
109		return rewriteValuegeneric_OpEq32(v)
110	case OpEq32F:
111		return rewriteValuegeneric_OpEq32F(v)
112	case OpEq64:
113		return rewriteValuegeneric_OpEq64(v)
114	case OpEq64F:
115		return rewriteValuegeneric_OpEq64F(v)
116	case OpEq8:
117		return rewriteValuegeneric_OpEq8(v)
118	case OpEqB:
119		return rewriteValuegeneric_OpEqB(v)
120	case OpEqInter:
121		return rewriteValuegeneric_OpEqInter(v)
122	case OpEqPtr:
123		return rewriteValuegeneric_OpEqPtr(v)
124	case OpEqSlice:
125		return rewriteValuegeneric_OpEqSlice(v)
126	case OpFloor:
127		return rewriteValuegeneric_OpFloor(v)
128	case OpIMake:
129		return rewriteValuegeneric_OpIMake(v)
130	case OpInterLECall:
131		return rewriteValuegeneric_OpInterLECall(v)
132	case OpIsInBounds:
133		return rewriteValuegeneric_OpIsInBounds(v)
134	case OpIsNonNil:
135		return rewriteValuegeneric_OpIsNonNil(v)
136	case OpIsSliceInBounds:
137		return rewriteValuegeneric_OpIsSliceInBounds(v)
138	case OpLeq16:
139		return rewriteValuegeneric_OpLeq16(v)
140	case OpLeq16U:
141		return rewriteValuegeneric_OpLeq16U(v)
142	case OpLeq32:
143		return rewriteValuegeneric_OpLeq32(v)
144	case OpLeq32F:
145		return rewriteValuegeneric_OpLeq32F(v)
146	case OpLeq32U:
147		return rewriteValuegeneric_OpLeq32U(v)
148	case OpLeq64:
149		return rewriteValuegeneric_OpLeq64(v)
150	case OpLeq64F:
151		return rewriteValuegeneric_OpLeq64F(v)
152	case OpLeq64U:
153		return rewriteValuegeneric_OpLeq64U(v)
154	case OpLeq8:
155		return rewriteValuegeneric_OpLeq8(v)
156	case OpLeq8U:
157		return rewriteValuegeneric_OpLeq8U(v)
158	case OpLess16:
159		return rewriteValuegeneric_OpLess16(v)
160	case OpLess16U:
161		return rewriteValuegeneric_OpLess16U(v)
162	case OpLess32:
163		return rewriteValuegeneric_OpLess32(v)
164	case OpLess32F:
165		return rewriteValuegeneric_OpLess32F(v)
166	case OpLess32U:
167		return rewriteValuegeneric_OpLess32U(v)
168	case OpLess64:
169		return rewriteValuegeneric_OpLess64(v)
170	case OpLess64F:
171		return rewriteValuegeneric_OpLess64F(v)
172	case OpLess64U:
173		return rewriteValuegeneric_OpLess64U(v)
174	case OpLess8:
175		return rewriteValuegeneric_OpLess8(v)
176	case OpLess8U:
177		return rewriteValuegeneric_OpLess8U(v)
178	case OpLoad:
179		return rewriteValuegeneric_OpLoad(v)
180	case OpLsh16x16:
181		return rewriteValuegeneric_OpLsh16x16(v)
182	case OpLsh16x32:
183		return rewriteValuegeneric_OpLsh16x32(v)
184	case OpLsh16x64:
185		return rewriteValuegeneric_OpLsh16x64(v)
186	case OpLsh16x8:
187		return rewriteValuegeneric_OpLsh16x8(v)
188	case OpLsh32x16:
189		return rewriteValuegeneric_OpLsh32x16(v)
190	case OpLsh32x32:
191		return rewriteValuegeneric_OpLsh32x32(v)
192	case OpLsh32x64:
193		return rewriteValuegeneric_OpLsh32x64(v)
194	case OpLsh32x8:
195		return rewriteValuegeneric_OpLsh32x8(v)
196	case OpLsh64x16:
197		return rewriteValuegeneric_OpLsh64x16(v)
198	case OpLsh64x32:
199		return rewriteValuegeneric_OpLsh64x32(v)
200	case OpLsh64x64:
201		return rewriteValuegeneric_OpLsh64x64(v)
202	case OpLsh64x8:
203		return rewriteValuegeneric_OpLsh64x8(v)
204	case OpLsh8x16:
205		return rewriteValuegeneric_OpLsh8x16(v)
206	case OpLsh8x32:
207		return rewriteValuegeneric_OpLsh8x32(v)
208	case OpLsh8x64:
209		return rewriteValuegeneric_OpLsh8x64(v)
210	case OpLsh8x8:
211		return rewriteValuegeneric_OpLsh8x8(v)
212	case OpMod16:
213		return rewriteValuegeneric_OpMod16(v)
214	case OpMod16u:
215		return rewriteValuegeneric_OpMod16u(v)
216	case OpMod32:
217		return rewriteValuegeneric_OpMod32(v)
218	case OpMod32u:
219		return rewriteValuegeneric_OpMod32u(v)
220	case OpMod64:
221		return rewriteValuegeneric_OpMod64(v)
222	case OpMod64u:
223		return rewriteValuegeneric_OpMod64u(v)
224	case OpMod8:
225		return rewriteValuegeneric_OpMod8(v)
226	case OpMod8u:
227		return rewriteValuegeneric_OpMod8u(v)
228	case OpMove:
229		return rewriteValuegeneric_OpMove(v)
230	case OpMul16:
231		return rewriteValuegeneric_OpMul16(v)
232	case OpMul32:
233		return rewriteValuegeneric_OpMul32(v)
234	case OpMul32F:
235		return rewriteValuegeneric_OpMul32F(v)
236	case OpMul64:
237		return rewriteValuegeneric_OpMul64(v)
238	case OpMul64F:
239		return rewriteValuegeneric_OpMul64F(v)
240	case OpMul8:
241		return rewriteValuegeneric_OpMul8(v)
242	case OpNeg16:
243		return rewriteValuegeneric_OpNeg16(v)
244	case OpNeg32:
245		return rewriteValuegeneric_OpNeg32(v)
246	case OpNeg32F:
247		return rewriteValuegeneric_OpNeg32F(v)
248	case OpNeg64:
249		return rewriteValuegeneric_OpNeg64(v)
250	case OpNeg64F:
251		return rewriteValuegeneric_OpNeg64F(v)
252	case OpNeg8:
253		return rewriteValuegeneric_OpNeg8(v)
254	case OpNeq16:
255		return rewriteValuegeneric_OpNeq16(v)
256	case OpNeq32:
257		return rewriteValuegeneric_OpNeq32(v)
258	case OpNeq32F:
259		return rewriteValuegeneric_OpNeq32F(v)
260	case OpNeq64:
261		return rewriteValuegeneric_OpNeq64(v)
262	case OpNeq64F:
263		return rewriteValuegeneric_OpNeq64F(v)
264	case OpNeq8:
265		return rewriteValuegeneric_OpNeq8(v)
266	case OpNeqB:
267		return rewriteValuegeneric_OpNeqB(v)
268	case OpNeqInter:
269		return rewriteValuegeneric_OpNeqInter(v)
270	case OpNeqPtr:
271		return rewriteValuegeneric_OpNeqPtr(v)
272	case OpNeqSlice:
273		return rewriteValuegeneric_OpNeqSlice(v)
274	case OpNilCheck:
275		return rewriteValuegeneric_OpNilCheck(v)
276	case OpNot:
277		return rewriteValuegeneric_OpNot(v)
278	case OpOffPtr:
279		return rewriteValuegeneric_OpOffPtr(v)
280	case OpOr16:
281		return rewriteValuegeneric_OpOr16(v)
282	case OpOr32:
283		return rewriteValuegeneric_OpOr32(v)
284	case OpOr64:
285		return rewriteValuegeneric_OpOr64(v)
286	case OpOr8:
287		return rewriteValuegeneric_OpOr8(v)
288	case OpOrB:
289		return rewriteValuegeneric_OpOrB(v)
290	case OpPhi:
291		return rewriteValuegeneric_OpPhi(v)
292	case OpPtrIndex:
293		return rewriteValuegeneric_OpPtrIndex(v)
294	case OpRotateLeft16:
295		return rewriteValuegeneric_OpRotateLeft16(v)
296	case OpRotateLeft32:
297		return rewriteValuegeneric_OpRotateLeft32(v)
298	case OpRotateLeft64:
299		return rewriteValuegeneric_OpRotateLeft64(v)
300	case OpRotateLeft8:
301		return rewriteValuegeneric_OpRotateLeft8(v)
302	case OpRound32F:
303		return rewriteValuegeneric_OpRound32F(v)
304	case OpRound64F:
305		return rewriteValuegeneric_OpRound64F(v)
306	case OpRoundToEven:
307		return rewriteValuegeneric_OpRoundToEven(v)
308	case OpRsh16Ux16:
309		return rewriteValuegeneric_OpRsh16Ux16(v)
310	case OpRsh16Ux32:
311		return rewriteValuegeneric_OpRsh16Ux32(v)
312	case OpRsh16Ux64:
313		return rewriteValuegeneric_OpRsh16Ux64(v)
314	case OpRsh16Ux8:
315		return rewriteValuegeneric_OpRsh16Ux8(v)
316	case OpRsh16x16:
317		return rewriteValuegeneric_OpRsh16x16(v)
318	case OpRsh16x32:
319		return rewriteValuegeneric_OpRsh16x32(v)
320	case OpRsh16x64:
321		return rewriteValuegeneric_OpRsh16x64(v)
322	case OpRsh16x8:
323		return rewriteValuegeneric_OpRsh16x8(v)
324	case OpRsh32Ux16:
325		return rewriteValuegeneric_OpRsh32Ux16(v)
326	case OpRsh32Ux32:
327		return rewriteValuegeneric_OpRsh32Ux32(v)
328	case OpRsh32Ux64:
329		return rewriteValuegeneric_OpRsh32Ux64(v)
330	case OpRsh32Ux8:
331		return rewriteValuegeneric_OpRsh32Ux8(v)
332	case OpRsh32x16:
333		return rewriteValuegeneric_OpRsh32x16(v)
334	case OpRsh32x32:
335		return rewriteValuegeneric_OpRsh32x32(v)
336	case OpRsh32x64:
337		return rewriteValuegeneric_OpRsh32x64(v)
338	case OpRsh32x8:
339		return rewriteValuegeneric_OpRsh32x8(v)
340	case OpRsh64Ux16:
341		return rewriteValuegeneric_OpRsh64Ux16(v)
342	case OpRsh64Ux32:
343		return rewriteValuegeneric_OpRsh64Ux32(v)
344	case OpRsh64Ux64:
345		return rewriteValuegeneric_OpRsh64Ux64(v)
346	case OpRsh64Ux8:
347		return rewriteValuegeneric_OpRsh64Ux8(v)
348	case OpRsh64x16:
349		return rewriteValuegeneric_OpRsh64x16(v)
350	case OpRsh64x32:
351		return rewriteValuegeneric_OpRsh64x32(v)
352	case OpRsh64x64:
353		return rewriteValuegeneric_OpRsh64x64(v)
354	case OpRsh64x8:
355		return rewriteValuegeneric_OpRsh64x8(v)
356	case OpRsh8Ux16:
357		return rewriteValuegeneric_OpRsh8Ux16(v)
358	case OpRsh8Ux32:
359		return rewriteValuegeneric_OpRsh8Ux32(v)
360	case OpRsh8Ux64:
361		return rewriteValuegeneric_OpRsh8Ux64(v)
362	case OpRsh8Ux8:
363		return rewriteValuegeneric_OpRsh8Ux8(v)
364	case OpRsh8x16:
365		return rewriteValuegeneric_OpRsh8x16(v)
366	case OpRsh8x32:
367		return rewriteValuegeneric_OpRsh8x32(v)
368	case OpRsh8x64:
369		return rewriteValuegeneric_OpRsh8x64(v)
370	case OpRsh8x8:
371		return rewriteValuegeneric_OpRsh8x8(v)
372	case OpSelect0:
373		return rewriteValuegeneric_OpSelect0(v)
374	case OpSelect1:
375		return rewriteValuegeneric_OpSelect1(v)
376	case OpSelectN:
377		return rewriteValuegeneric_OpSelectN(v)
378	case OpSignExt16to32:
379		return rewriteValuegeneric_OpSignExt16to32(v)
380	case OpSignExt16to64:
381		return rewriteValuegeneric_OpSignExt16to64(v)
382	case OpSignExt32to64:
383		return rewriteValuegeneric_OpSignExt32to64(v)
384	case OpSignExt8to16:
385		return rewriteValuegeneric_OpSignExt8to16(v)
386	case OpSignExt8to32:
387		return rewriteValuegeneric_OpSignExt8to32(v)
388	case OpSignExt8to64:
389		return rewriteValuegeneric_OpSignExt8to64(v)
390	case OpSliceCap:
391		return rewriteValuegeneric_OpSliceCap(v)
392	case OpSliceLen:
393		return rewriteValuegeneric_OpSliceLen(v)
394	case OpSlicePtr:
395		return rewriteValuegeneric_OpSlicePtr(v)
396	case OpSlicemask:
397		return rewriteValuegeneric_OpSlicemask(v)
398	case OpSqrt:
399		return rewriteValuegeneric_OpSqrt(v)
400	case OpStaticCall:
401		return rewriteValuegeneric_OpStaticCall(v)
402	case OpStaticLECall:
403		return rewriteValuegeneric_OpStaticLECall(v)
404	case OpStore:
405		return rewriteValuegeneric_OpStore(v)
406	case OpStringLen:
407		return rewriteValuegeneric_OpStringLen(v)
408	case OpStringPtr:
409		return rewriteValuegeneric_OpStringPtr(v)
410	case OpStructSelect:
411		return rewriteValuegeneric_OpStructSelect(v)
412	case OpSub16:
413		return rewriteValuegeneric_OpSub16(v)
414	case OpSub32:
415		return rewriteValuegeneric_OpSub32(v)
416	case OpSub32F:
417		return rewriteValuegeneric_OpSub32F(v)
418	case OpSub64:
419		return rewriteValuegeneric_OpSub64(v)
420	case OpSub64F:
421		return rewriteValuegeneric_OpSub64F(v)
422	case OpSub8:
423		return rewriteValuegeneric_OpSub8(v)
424	case OpTrunc:
425		return rewriteValuegeneric_OpTrunc(v)
426	case OpTrunc16to8:
427		return rewriteValuegeneric_OpTrunc16to8(v)
428	case OpTrunc32to16:
429		return rewriteValuegeneric_OpTrunc32to16(v)
430	case OpTrunc32to8:
431		return rewriteValuegeneric_OpTrunc32to8(v)
432	case OpTrunc64to16:
433		return rewriteValuegeneric_OpTrunc64to16(v)
434	case OpTrunc64to32:
435		return rewriteValuegeneric_OpTrunc64to32(v)
436	case OpTrunc64to8:
437		return rewriteValuegeneric_OpTrunc64to8(v)
438	case OpXor16:
439		return rewriteValuegeneric_OpXor16(v)
440	case OpXor32:
441		return rewriteValuegeneric_OpXor32(v)
442	case OpXor64:
443		return rewriteValuegeneric_OpXor64(v)
444	case OpXor8:
445		return rewriteValuegeneric_OpXor8(v)
446	case OpZero:
447		return rewriteValuegeneric_OpZero(v)
448	case OpZeroExt16to32:
449		return rewriteValuegeneric_OpZeroExt16to32(v)
450	case OpZeroExt16to64:
451		return rewriteValuegeneric_OpZeroExt16to64(v)
452	case OpZeroExt32to64:
453		return rewriteValuegeneric_OpZeroExt32to64(v)
454	case OpZeroExt8to16:
455		return rewriteValuegeneric_OpZeroExt8to16(v)
456	case OpZeroExt8to32:
457		return rewriteValuegeneric_OpZeroExt8to32(v)
458	case OpZeroExt8to64:
459		return rewriteValuegeneric_OpZeroExt8to64(v)
460	}
461	return false
462}
463func rewriteValuegeneric_OpAdd16(v *Value) bool {
464	v_1 := v.Args[1]
465	v_0 := v.Args[0]
466	b := v.Block
467	config := b.Func.Config
468	// match: (Add16 (Const16 [c]) (Const16 [d]))
469	// result: (Const16 [c+d])
470	for {
471		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
472			if v_0.Op != OpConst16 {
473				continue
474			}
475			c := auxIntToInt16(v_0.AuxInt)
476			if v_1.Op != OpConst16 {
477				continue
478			}
479			d := auxIntToInt16(v_1.AuxInt)
480			v.reset(OpConst16)
481			v.AuxInt = int16ToAuxInt(c + d)
482			return true
483		}
484		break
485	}
486	// match: (Add16 <t> (Mul16 x y) (Mul16 x z))
487	// result: (Mul16 x (Add16 <t> y z))
488	for {
489		t := v.Type
490		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
491			if v_0.Op != OpMul16 {
492				continue
493			}
494			_ = v_0.Args[1]
495			v_0_0 := v_0.Args[0]
496			v_0_1 := v_0.Args[1]
497			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
498				x := v_0_0
499				y := v_0_1
500				if v_1.Op != OpMul16 {
501					continue
502				}
503				_ = v_1.Args[1]
504				v_1_0 := v_1.Args[0]
505				v_1_1 := v_1.Args[1]
506				for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 {
507					if x != v_1_0 {
508						continue
509					}
510					z := v_1_1
511					v.reset(OpMul16)
512					v0 := b.NewValue0(v.Pos, OpAdd16, t)
513					v0.AddArg2(y, z)
514					v.AddArg2(x, v0)
515					return true
516				}
517			}
518		}
519		break
520	}
521	// match: (Add16 (Const16 [0]) x)
522	// result: x
523	for {
524		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
525			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
526				continue
527			}
528			x := v_1
529			v.copyOf(x)
530			return true
531		}
532		break
533	}
534	// match: (Add16 x (Neg16 y))
535	// result: (Sub16 x y)
536	for {
537		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
538			x := v_0
539			if v_1.Op != OpNeg16 {
540				continue
541			}
542			y := v_1.Args[0]
543			v.reset(OpSub16)
544			v.AddArg2(x, y)
545			return true
546		}
547		break
548	}
549	// match: (Add16 (Com16 x) x)
550	// result: (Const16 [-1])
551	for {
552		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
553			if v_0.Op != OpCom16 {
554				continue
555			}
556			x := v_0.Args[0]
557			if x != v_1 {
558				continue
559			}
560			v.reset(OpConst16)
561			v.AuxInt = int16ToAuxInt(-1)
562			return true
563		}
564		break
565	}
566	// match: (Add16 (Sub16 x t) (Add16 t y))
567	// result: (Add16 x y)
568	for {
569		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
570			if v_0.Op != OpSub16 {
571				continue
572			}
573			t := v_0.Args[1]
574			x := v_0.Args[0]
575			if v_1.Op != OpAdd16 {
576				continue
577			}
578			_ = v_1.Args[1]
579			v_1_0 := v_1.Args[0]
580			v_1_1 := v_1.Args[1]
581			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
582				if t != v_1_0 {
583					continue
584				}
585				y := v_1_1
586				v.reset(OpAdd16)
587				v.AddArg2(x, y)
588				return true
589			}
590		}
591		break
592	}
593	// match: (Add16 (Const16 [1]) (Com16 x))
594	// result: (Neg16 x)
595	for {
596		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
597			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 1 || v_1.Op != OpCom16 {
598				continue
599			}
600			x := v_1.Args[0]
601			v.reset(OpNeg16)
602			v.AddArg(x)
603			return true
604		}
605		break
606	}
607	// match: (Add16 x (Sub16 y x))
608	// result: y
609	for {
610		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
611			x := v_0
612			if v_1.Op != OpSub16 {
613				continue
614			}
615			_ = v_1.Args[1]
616			y := v_1.Args[0]
617			if x != v_1.Args[1] {
618				continue
619			}
620			v.copyOf(y)
621			return true
622		}
623		break
624	}
625	// match: (Add16 x (Add16 y (Sub16 z x)))
626	// result: (Add16 y z)
627	for {
628		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
629			x := v_0
630			if v_1.Op != OpAdd16 {
631				continue
632			}
633			_ = v_1.Args[1]
634			v_1_0 := v_1.Args[0]
635			v_1_1 := v_1.Args[1]
636			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
637				y := v_1_0
638				if v_1_1.Op != OpSub16 {
639					continue
640				}
641				_ = v_1_1.Args[1]
642				z := v_1_1.Args[0]
643				if x != v_1_1.Args[1] {
644					continue
645				}
646				v.reset(OpAdd16)
647				v.AddArg2(y, z)
648				return true
649			}
650		}
651		break
652	}
653	// match: (Add16 (Add16 i:(Const16 <t>) z) x)
654	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
655	// result: (Add16 i (Add16 <t> z x))
656	for {
657		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
658			if v_0.Op != OpAdd16 {
659				continue
660			}
661			_ = v_0.Args[1]
662			v_0_0 := v_0.Args[0]
663			v_0_1 := v_0.Args[1]
664			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
665				i := v_0_0
666				if i.Op != OpConst16 {
667					continue
668				}
669				t := i.Type
670				z := v_0_1
671				x := v_1
672				if !(z.Op != OpConst16 && x.Op != OpConst16) {
673					continue
674				}
675				v.reset(OpAdd16)
676				v0 := b.NewValue0(v.Pos, OpAdd16, t)
677				v0.AddArg2(z, x)
678				v.AddArg2(i, v0)
679				return true
680			}
681		}
682		break
683	}
684	// match: (Add16 (Sub16 i:(Const16 <t>) z) x)
685	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
686	// result: (Add16 i (Sub16 <t> x z))
687	for {
688		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
689			if v_0.Op != OpSub16 {
690				continue
691			}
692			z := v_0.Args[1]
693			i := v_0.Args[0]
694			if i.Op != OpConst16 {
695				continue
696			}
697			t := i.Type
698			x := v_1
699			if !(z.Op != OpConst16 && x.Op != OpConst16) {
700				continue
701			}
702			v.reset(OpAdd16)
703			v0 := b.NewValue0(v.Pos, OpSub16, t)
704			v0.AddArg2(x, z)
705			v.AddArg2(i, v0)
706			return true
707		}
708		break
709	}
710	// match: (Add16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
711	// result: (Add16 (Const16 <t> [c+d]) x)
712	for {
713		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
714			if v_0.Op != OpConst16 {
715				continue
716			}
717			t := v_0.Type
718			c := auxIntToInt16(v_0.AuxInt)
719			if v_1.Op != OpAdd16 {
720				continue
721			}
722			_ = v_1.Args[1]
723			v_1_0 := v_1.Args[0]
724			v_1_1 := v_1.Args[1]
725			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
726				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
727					continue
728				}
729				d := auxIntToInt16(v_1_0.AuxInt)
730				x := v_1_1
731				v.reset(OpAdd16)
732				v0 := b.NewValue0(v.Pos, OpConst16, t)
733				v0.AuxInt = int16ToAuxInt(c + d)
734				v.AddArg2(v0, x)
735				return true
736			}
737		}
738		break
739	}
740	// match: (Add16 (Const16 <t> [c]) (Sub16 (Const16 <t> [d]) x))
741	// result: (Sub16 (Const16 <t> [c+d]) x)
742	for {
743		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
744			if v_0.Op != OpConst16 {
745				continue
746			}
747			t := v_0.Type
748			c := auxIntToInt16(v_0.AuxInt)
749			if v_1.Op != OpSub16 {
750				continue
751			}
752			x := v_1.Args[1]
753			v_1_0 := v_1.Args[0]
754			if v_1_0.Op != OpConst16 || v_1_0.Type != t {
755				continue
756			}
757			d := auxIntToInt16(v_1_0.AuxInt)
758			v.reset(OpSub16)
759			v0 := b.NewValue0(v.Pos, OpConst16, t)
760			v0.AuxInt = int16ToAuxInt(c + d)
761			v.AddArg2(v0, x)
762			return true
763		}
764		break
765	}
766	// match: (Add16 (Lsh16x64 x z:(Const64 <t> [c])) (Rsh16Ux64 x (Const64 [d])))
767	// cond: c < 16 && d == 16-c && canRotate(config, 16)
768	// result: (RotateLeft16 x z)
769	for {
770		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
771			if v_0.Op != OpLsh16x64 {
772				continue
773			}
774			_ = v_0.Args[1]
775			x := v_0.Args[0]
776			z := v_0.Args[1]
777			if z.Op != OpConst64 {
778				continue
779			}
780			c := auxIntToInt64(z.AuxInt)
781			if v_1.Op != OpRsh16Ux64 {
782				continue
783			}
784			_ = v_1.Args[1]
785			if x != v_1.Args[0] {
786				continue
787			}
788			v_1_1 := v_1.Args[1]
789			if v_1_1.Op != OpConst64 {
790				continue
791			}
792			d := auxIntToInt64(v_1_1.AuxInt)
793			if !(c < 16 && d == 16-c && canRotate(config, 16)) {
794				continue
795			}
796			v.reset(OpRotateLeft16)
797			v.AddArg2(x, z)
798			return true
799		}
800		break
801	}
802	// match: (Add16 left:(Lsh16x64 x y) right:(Rsh16Ux64 x (Sub64 (Const64 [16]) y)))
803	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
804	// result: (RotateLeft16 x y)
805	for {
806		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
807			left := v_0
808			if left.Op != OpLsh16x64 {
809				continue
810			}
811			y := left.Args[1]
812			x := left.Args[0]
813			right := v_1
814			if right.Op != OpRsh16Ux64 {
815				continue
816			}
817			_ = right.Args[1]
818			if x != right.Args[0] {
819				continue
820			}
821			right_1 := right.Args[1]
822			if right_1.Op != OpSub64 {
823				continue
824			}
825			_ = right_1.Args[1]
826			right_1_0 := right_1.Args[0]
827			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
828				continue
829			}
830			v.reset(OpRotateLeft16)
831			v.AddArg2(x, y)
832			return true
833		}
834		break
835	}
836	// match: (Add16 left:(Lsh16x32 x y) right:(Rsh16Ux32 x (Sub32 (Const32 [16]) y)))
837	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
838	// result: (RotateLeft16 x y)
839	for {
840		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
841			left := v_0
842			if left.Op != OpLsh16x32 {
843				continue
844			}
845			y := left.Args[1]
846			x := left.Args[0]
847			right := v_1
848			if right.Op != OpRsh16Ux32 {
849				continue
850			}
851			_ = right.Args[1]
852			if x != right.Args[0] {
853				continue
854			}
855			right_1 := right.Args[1]
856			if right_1.Op != OpSub32 {
857				continue
858			}
859			_ = right_1.Args[1]
860			right_1_0 := right_1.Args[0]
861			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
862				continue
863			}
864			v.reset(OpRotateLeft16)
865			v.AddArg2(x, y)
866			return true
867		}
868		break
869	}
870	// match: (Add16 left:(Lsh16x16 x y) right:(Rsh16Ux16 x (Sub16 (Const16 [16]) y)))
871	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
872	// result: (RotateLeft16 x y)
873	for {
874		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
875			left := v_0
876			if left.Op != OpLsh16x16 {
877				continue
878			}
879			y := left.Args[1]
880			x := left.Args[0]
881			right := v_1
882			if right.Op != OpRsh16Ux16 {
883				continue
884			}
885			_ = right.Args[1]
886			if x != right.Args[0] {
887				continue
888			}
889			right_1 := right.Args[1]
890			if right_1.Op != OpSub16 {
891				continue
892			}
893			_ = right_1.Args[1]
894			right_1_0 := right_1.Args[0]
895			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
896				continue
897			}
898			v.reset(OpRotateLeft16)
899			v.AddArg2(x, y)
900			return true
901		}
902		break
903	}
904	// match: (Add16 left:(Lsh16x8 x y) right:(Rsh16Ux8 x (Sub8 (Const8 [16]) y)))
905	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
906	// result: (RotateLeft16 x y)
907	for {
908		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
909			left := v_0
910			if left.Op != OpLsh16x8 {
911				continue
912			}
913			y := left.Args[1]
914			x := left.Args[0]
915			right := v_1
916			if right.Op != OpRsh16Ux8 {
917				continue
918			}
919			_ = right.Args[1]
920			if x != right.Args[0] {
921				continue
922			}
923			right_1 := right.Args[1]
924			if right_1.Op != OpSub8 {
925				continue
926			}
927			_ = right_1.Args[1]
928			right_1_0 := right_1.Args[0]
929			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
930				continue
931			}
932			v.reset(OpRotateLeft16)
933			v.AddArg2(x, y)
934			return true
935		}
936		break
937	}
938	// match: (Add16 right:(Rsh16Ux64 x y) left:(Lsh16x64 x z:(Sub64 (Const64 [16]) y)))
939	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
940	// result: (RotateLeft16 x z)
941	for {
942		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
943			right := v_0
944			if right.Op != OpRsh16Ux64 {
945				continue
946			}
947			y := right.Args[1]
948			x := right.Args[0]
949			left := v_1
950			if left.Op != OpLsh16x64 {
951				continue
952			}
953			_ = left.Args[1]
954			if x != left.Args[0] {
955				continue
956			}
957			z := left.Args[1]
958			if z.Op != OpSub64 {
959				continue
960			}
961			_ = z.Args[1]
962			z_0 := z.Args[0]
963			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
964				continue
965			}
966			v.reset(OpRotateLeft16)
967			v.AddArg2(x, z)
968			return true
969		}
970		break
971	}
972	// match: (Add16 right:(Rsh16Ux32 x y) left:(Lsh16x32 x z:(Sub32 (Const32 [16]) y)))
973	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
974	// result: (RotateLeft16 x z)
975	for {
976		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
977			right := v_0
978			if right.Op != OpRsh16Ux32 {
979				continue
980			}
981			y := right.Args[1]
982			x := right.Args[0]
983			left := v_1
984			if left.Op != OpLsh16x32 {
985				continue
986			}
987			_ = left.Args[1]
988			if x != left.Args[0] {
989				continue
990			}
991			z := left.Args[1]
992			if z.Op != OpSub32 {
993				continue
994			}
995			_ = z.Args[1]
996			z_0 := z.Args[0]
997			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
998				continue
999			}
1000			v.reset(OpRotateLeft16)
1001			v.AddArg2(x, z)
1002			return true
1003		}
1004		break
1005	}
1006	// match: (Add16 right:(Rsh16Ux16 x y) left:(Lsh16x16 x z:(Sub16 (Const16 [16]) y)))
1007	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
1008	// result: (RotateLeft16 x z)
1009	for {
1010		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1011			right := v_0
1012			if right.Op != OpRsh16Ux16 {
1013				continue
1014			}
1015			y := right.Args[1]
1016			x := right.Args[0]
1017			left := v_1
1018			if left.Op != OpLsh16x16 {
1019				continue
1020			}
1021			_ = left.Args[1]
1022			if x != left.Args[0] {
1023				continue
1024			}
1025			z := left.Args[1]
1026			if z.Op != OpSub16 {
1027				continue
1028			}
1029			_ = z.Args[1]
1030			z_0 := z.Args[0]
1031			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
1032				continue
1033			}
1034			v.reset(OpRotateLeft16)
1035			v.AddArg2(x, z)
1036			return true
1037		}
1038		break
1039	}
1040	// match: (Add16 right:(Rsh16Ux8 x y) left:(Lsh16x8 x z:(Sub8 (Const8 [16]) y)))
1041	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
1042	// result: (RotateLeft16 x z)
1043	for {
1044		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1045			right := v_0
1046			if right.Op != OpRsh16Ux8 {
1047				continue
1048			}
1049			y := right.Args[1]
1050			x := right.Args[0]
1051			left := v_1
1052			if left.Op != OpLsh16x8 {
1053				continue
1054			}
1055			_ = left.Args[1]
1056			if x != left.Args[0] {
1057				continue
1058			}
1059			z := left.Args[1]
1060			if z.Op != OpSub8 {
1061				continue
1062			}
1063			_ = z.Args[1]
1064			z_0 := z.Args[0]
1065			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
1066				continue
1067			}
1068			v.reset(OpRotateLeft16)
1069			v.AddArg2(x, z)
1070			return true
1071		}
1072		break
1073	}
1074	return false
1075}
1076func rewriteValuegeneric_OpAdd32(v *Value) bool {
1077	v_1 := v.Args[1]
1078	v_0 := v.Args[0]
1079	b := v.Block
1080	config := b.Func.Config
1081	// match: (Add32 (Const32 [c]) (Const32 [d]))
1082	// result: (Const32 [c+d])
1083	for {
1084		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1085			if v_0.Op != OpConst32 {
1086				continue
1087			}
1088			c := auxIntToInt32(v_0.AuxInt)
1089			if v_1.Op != OpConst32 {
1090				continue
1091			}
1092			d := auxIntToInt32(v_1.AuxInt)
1093			v.reset(OpConst32)
1094			v.AuxInt = int32ToAuxInt(c + d)
1095			return true
1096		}
1097		break
1098	}
1099	// match: (Add32 <t> (Mul32 x y) (Mul32 x z))
1100	// result: (Mul32 x (Add32 <t> y z))
1101	for {
1102		t := v.Type
1103		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1104			if v_0.Op != OpMul32 {
1105				continue
1106			}
1107			_ = v_0.Args[1]
1108			v_0_0 := v_0.Args[0]
1109			v_0_1 := v_0.Args[1]
1110			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
1111				x := v_0_0
1112				y := v_0_1
1113				if v_1.Op != OpMul32 {
1114					continue
1115				}
1116				_ = v_1.Args[1]
1117				v_1_0 := v_1.Args[0]
1118				v_1_1 := v_1.Args[1]
1119				for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 {
1120					if x != v_1_0 {
1121						continue
1122					}
1123					z := v_1_1
1124					v.reset(OpMul32)
1125					v0 := b.NewValue0(v.Pos, OpAdd32, t)
1126					v0.AddArg2(y, z)
1127					v.AddArg2(x, v0)
1128					return true
1129				}
1130			}
1131		}
1132		break
1133	}
1134	// match: (Add32 (Const32 [0]) x)
1135	// result: x
1136	for {
1137		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1138			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
1139				continue
1140			}
1141			x := v_1
1142			v.copyOf(x)
1143			return true
1144		}
1145		break
1146	}
1147	// match: (Add32 x (Neg32 y))
1148	// result: (Sub32 x y)
1149	for {
1150		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1151			x := v_0
1152			if v_1.Op != OpNeg32 {
1153				continue
1154			}
1155			y := v_1.Args[0]
1156			v.reset(OpSub32)
1157			v.AddArg2(x, y)
1158			return true
1159		}
1160		break
1161	}
1162	// match: (Add32 (Com32 x) x)
1163	// result: (Const32 [-1])
1164	for {
1165		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1166			if v_0.Op != OpCom32 {
1167				continue
1168			}
1169			x := v_0.Args[0]
1170			if x != v_1 {
1171				continue
1172			}
1173			v.reset(OpConst32)
1174			v.AuxInt = int32ToAuxInt(-1)
1175			return true
1176		}
1177		break
1178	}
1179	// match: (Add32 (Sub32 x t) (Add32 t y))
1180	// result: (Add32 x y)
1181	for {
1182		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1183			if v_0.Op != OpSub32 {
1184				continue
1185			}
1186			t := v_0.Args[1]
1187			x := v_0.Args[0]
1188			if v_1.Op != OpAdd32 {
1189				continue
1190			}
1191			_ = v_1.Args[1]
1192			v_1_0 := v_1.Args[0]
1193			v_1_1 := v_1.Args[1]
1194			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
1195				if t != v_1_0 {
1196					continue
1197				}
1198				y := v_1_1
1199				v.reset(OpAdd32)
1200				v.AddArg2(x, y)
1201				return true
1202			}
1203		}
1204		break
1205	}
1206	// match: (Add32 (Const32 [1]) (Com32 x))
1207	// result: (Neg32 x)
1208	for {
1209		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1210			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 1 || v_1.Op != OpCom32 {
1211				continue
1212			}
1213			x := v_1.Args[0]
1214			v.reset(OpNeg32)
1215			v.AddArg(x)
1216			return true
1217		}
1218		break
1219	}
1220	// match: (Add32 x (Sub32 y x))
1221	// result: y
1222	for {
1223		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1224			x := v_0
1225			if v_1.Op != OpSub32 {
1226				continue
1227			}
1228			_ = v_1.Args[1]
1229			y := v_1.Args[0]
1230			if x != v_1.Args[1] {
1231				continue
1232			}
1233			v.copyOf(y)
1234			return true
1235		}
1236		break
1237	}
1238	// match: (Add32 x (Add32 y (Sub32 z x)))
1239	// result: (Add32 y z)
1240	for {
1241		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1242			x := v_0
1243			if v_1.Op != OpAdd32 {
1244				continue
1245			}
1246			_ = v_1.Args[1]
1247			v_1_0 := v_1.Args[0]
1248			v_1_1 := v_1.Args[1]
1249			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
1250				y := v_1_0
1251				if v_1_1.Op != OpSub32 {
1252					continue
1253				}
1254				_ = v_1_1.Args[1]
1255				z := v_1_1.Args[0]
1256				if x != v_1_1.Args[1] {
1257					continue
1258				}
1259				v.reset(OpAdd32)
1260				v.AddArg2(y, z)
1261				return true
1262			}
1263		}
1264		break
1265	}
1266	// match: (Add32 (Add32 i:(Const32 <t>) z) x)
1267	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
1268	// result: (Add32 i (Add32 <t> z x))
1269	for {
1270		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1271			if v_0.Op != OpAdd32 {
1272				continue
1273			}
1274			_ = v_0.Args[1]
1275			v_0_0 := v_0.Args[0]
1276			v_0_1 := v_0.Args[1]
1277			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
1278				i := v_0_0
1279				if i.Op != OpConst32 {
1280					continue
1281				}
1282				t := i.Type
1283				z := v_0_1
1284				x := v_1
1285				if !(z.Op != OpConst32 && x.Op != OpConst32) {
1286					continue
1287				}
1288				v.reset(OpAdd32)
1289				v0 := b.NewValue0(v.Pos, OpAdd32, t)
1290				v0.AddArg2(z, x)
1291				v.AddArg2(i, v0)
1292				return true
1293			}
1294		}
1295		break
1296	}
1297	// match: (Add32 (Sub32 i:(Const32 <t>) z) x)
1298	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
1299	// result: (Add32 i (Sub32 <t> x z))
1300	for {
1301		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1302			if v_0.Op != OpSub32 {
1303				continue
1304			}
1305			z := v_0.Args[1]
1306			i := v_0.Args[0]
1307			if i.Op != OpConst32 {
1308				continue
1309			}
1310			t := i.Type
1311			x := v_1
1312			if !(z.Op != OpConst32 && x.Op != OpConst32) {
1313				continue
1314			}
1315			v.reset(OpAdd32)
1316			v0 := b.NewValue0(v.Pos, OpSub32, t)
1317			v0.AddArg2(x, z)
1318			v.AddArg2(i, v0)
1319			return true
1320		}
1321		break
1322	}
1323	// match: (Add32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
1324	// result: (Add32 (Const32 <t> [c+d]) x)
1325	for {
1326		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1327			if v_0.Op != OpConst32 {
1328				continue
1329			}
1330			t := v_0.Type
1331			c := auxIntToInt32(v_0.AuxInt)
1332			if v_1.Op != OpAdd32 {
1333				continue
1334			}
1335			_ = v_1.Args[1]
1336			v_1_0 := v_1.Args[0]
1337			v_1_1 := v_1.Args[1]
1338			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
1339				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
1340					continue
1341				}
1342				d := auxIntToInt32(v_1_0.AuxInt)
1343				x := v_1_1
1344				v.reset(OpAdd32)
1345				v0 := b.NewValue0(v.Pos, OpConst32, t)
1346				v0.AuxInt = int32ToAuxInt(c + d)
1347				v.AddArg2(v0, x)
1348				return true
1349			}
1350		}
1351		break
1352	}
1353	// match: (Add32 (Const32 <t> [c]) (Sub32 (Const32 <t> [d]) x))
1354	// result: (Sub32 (Const32 <t> [c+d]) x)
1355	for {
1356		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1357			if v_0.Op != OpConst32 {
1358				continue
1359			}
1360			t := v_0.Type
1361			c := auxIntToInt32(v_0.AuxInt)
1362			if v_1.Op != OpSub32 {
1363				continue
1364			}
1365			x := v_1.Args[1]
1366			v_1_0 := v_1.Args[0]
1367			if v_1_0.Op != OpConst32 || v_1_0.Type != t {
1368				continue
1369			}
1370			d := auxIntToInt32(v_1_0.AuxInt)
1371			v.reset(OpSub32)
1372			v0 := b.NewValue0(v.Pos, OpConst32, t)
1373			v0.AuxInt = int32ToAuxInt(c + d)
1374			v.AddArg2(v0, x)
1375			return true
1376		}
1377		break
1378	}
1379	// match: (Add32 (Lsh32x64 x z:(Const64 <t> [c])) (Rsh32Ux64 x (Const64 [d])))
1380	// cond: c < 32 && d == 32-c && canRotate(config, 32)
1381	// result: (RotateLeft32 x z)
1382	for {
1383		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1384			if v_0.Op != OpLsh32x64 {
1385				continue
1386			}
1387			_ = v_0.Args[1]
1388			x := v_0.Args[0]
1389			z := v_0.Args[1]
1390			if z.Op != OpConst64 {
1391				continue
1392			}
1393			c := auxIntToInt64(z.AuxInt)
1394			if v_1.Op != OpRsh32Ux64 {
1395				continue
1396			}
1397			_ = v_1.Args[1]
1398			if x != v_1.Args[0] {
1399				continue
1400			}
1401			v_1_1 := v_1.Args[1]
1402			if v_1_1.Op != OpConst64 {
1403				continue
1404			}
1405			d := auxIntToInt64(v_1_1.AuxInt)
1406			if !(c < 32 && d == 32-c && canRotate(config, 32)) {
1407				continue
1408			}
1409			v.reset(OpRotateLeft32)
1410			v.AddArg2(x, z)
1411			return true
1412		}
1413		break
1414	}
1415	// match: (Add32 left:(Lsh32x64 x y) right:(Rsh32Ux64 x (Sub64 (Const64 [32]) y)))
1416	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
1417	// result: (RotateLeft32 x y)
1418	for {
1419		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1420			left := v_0
1421			if left.Op != OpLsh32x64 {
1422				continue
1423			}
1424			y := left.Args[1]
1425			x := left.Args[0]
1426			right := v_1
1427			if right.Op != OpRsh32Ux64 {
1428				continue
1429			}
1430			_ = right.Args[1]
1431			if x != right.Args[0] {
1432				continue
1433			}
1434			right_1 := right.Args[1]
1435			if right_1.Op != OpSub64 {
1436				continue
1437			}
1438			_ = right_1.Args[1]
1439			right_1_0 := right_1.Args[0]
1440			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
1441				continue
1442			}
1443			v.reset(OpRotateLeft32)
1444			v.AddArg2(x, y)
1445			return true
1446		}
1447		break
1448	}
1449	// match: (Add32 left:(Lsh32x32 x y) right:(Rsh32Ux32 x (Sub32 (Const32 [32]) y)))
1450	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
1451	// result: (RotateLeft32 x y)
1452	for {
1453		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1454			left := v_0
1455			if left.Op != OpLsh32x32 {
1456				continue
1457			}
1458			y := left.Args[1]
1459			x := left.Args[0]
1460			right := v_1
1461			if right.Op != OpRsh32Ux32 {
1462				continue
1463			}
1464			_ = right.Args[1]
1465			if x != right.Args[0] {
1466				continue
1467			}
1468			right_1 := right.Args[1]
1469			if right_1.Op != OpSub32 {
1470				continue
1471			}
1472			_ = right_1.Args[1]
1473			right_1_0 := right_1.Args[0]
1474			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
1475				continue
1476			}
1477			v.reset(OpRotateLeft32)
1478			v.AddArg2(x, y)
1479			return true
1480		}
1481		break
1482	}
1483	// match: (Add32 left:(Lsh32x16 x y) right:(Rsh32Ux16 x (Sub16 (Const16 [32]) y)))
1484	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
1485	// result: (RotateLeft32 x y)
1486	for {
1487		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1488			left := v_0
1489			if left.Op != OpLsh32x16 {
1490				continue
1491			}
1492			y := left.Args[1]
1493			x := left.Args[0]
1494			right := v_1
1495			if right.Op != OpRsh32Ux16 {
1496				continue
1497			}
1498			_ = right.Args[1]
1499			if x != right.Args[0] {
1500				continue
1501			}
1502			right_1 := right.Args[1]
1503			if right_1.Op != OpSub16 {
1504				continue
1505			}
1506			_ = right_1.Args[1]
1507			right_1_0 := right_1.Args[0]
1508			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
1509				continue
1510			}
1511			v.reset(OpRotateLeft32)
1512			v.AddArg2(x, y)
1513			return true
1514		}
1515		break
1516	}
1517	// match: (Add32 left:(Lsh32x8 x y) right:(Rsh32Ux8 x (Sub8 (Const8 [32]) y)))
1518	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
1519	// result: (RotateLeft32 x y)
1520	for {
1521		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1522			left := v_0
1523			if left.Op != OpLsh32x8 {
1524				continue
1525			}
1526			y := left.Args[1]
1527			x := left.Args[0]
1528			right := v_1
1529			if right.Op != OpRsh32Ux8 {
1530				continue
1531			}
1532			_ = right.Args[1]
1533			if x != right.Args[0] {
1534				continue
1535			}
1536			right_1 := right.Args[1]
1537			if right_1.Op != OpSub8 {
1538				continue
1539			}
1540			_ = right_1.Args[1]
1541			right_1_0 := right_1.Args[0]
1542			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
1543				continue
1544			}
1545			v.reset(OpRotateLeft32)
1546			v.AddArg2(x, y)
1547			return true
1548		}
1549		break
1550	}
1551	// match: (Add32 right:(Rsh32Ux64 x y) left:(Lsh32x64 x z:(Sub64 (Const64 [32]) y)))
1552	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
1553	// result: (RotateLeft32 x z)
1554	for {
1555		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1556			right := v_0
1557			if right.Op != OpRsh32Ux64 {
1558				continue
1559			}
1560			y := right.Args[1]
1561			x := right.Args[0]
1562			left := v_1
1563			if left.Op != OpLsh32x64 {
1564				continue
1565			}
1566			_ = left.Args[1]
1567			if x != left.Args[0] {
1568				continue
1569			}
1570			z := left.Args[1]
1571			if z.Op != OpSub64 {
1572				continue
1573			}
1574			_ = z.Args[1]
1575			z_0 := z.Args[0]
1576			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
1577				continue
1578			}
1579			v.reset(OpRotateLeft32)
1580			v.AddArg2(x, z)
1581			return true
1582		}
1583		break
1584	}
1585	// match: (Add32 right:(Rsh32Ux32 x y) left:(Lsh32x32 x z:(Sub32 (Const32 [32]) y)))
1586	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
1587	// result: (RotateLeft32 x z)
1588	for {
1589		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1590			right := v_0
1591			if right.Op != OpRsh32Ux32 {
1592				continue
1593			}
1594			y := right.Args[1]
1595			x := right.Args[0]
1596			left := v_1
1597			if left.Op != OpLsh32x32 {
1598				continue
1599			}
1600			_ = left.Args[1]
1601			if x != left.Args[0] {
1602				continue
1603			}
1604			z := left.Args[1]
1605			if z.Op != OpSub32 {
1606				continue
1607			}
1608			_ = z.Args[1]
1609			z_0 := z.Args[0]
1610			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
1611				continue
1612			}
1613			v.reset(OpRotateLeft32)
1614			v.AddArg2(x, z)
1615			return true
1616		}
1617		break
1618	}
1619	// match: (Add32 right:(Rsh32Ux16 x y) left:(Lsh32x16 x z:(Sub16 (Const16 [32]) y)))
1620	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
1621	// result: (RotateLeft32 x z)
1622	for {
1623		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1624			right := v_0
1625			if right.Op != OpRsh32Ux16 {
1626				continue
1627			}
1628			y := right.Args[1]
1629			x := right.Args[0]
1630			left := v_1
1631			if left.Op != OpLsh32x16 {
1632				continue
1633			}
1634			_ = left.Args[1]
1635			if x != left.Args[0] {
1636				continue
1637			}
1638			z := left.Args[1]
1639			if z.Op != OpSub16 {
1640				continue
1641			}
1642			_ = z.Args[1]
1643			z_0 := z.Args[0]
1644			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
1645				continue
1646			}
1647			v.reset(OpRotateLeft32)
1648			v.AddArg2(x, z)
1649			return true
1650		}
1651		break
1652	}
1653	// match: (Add32 right:(Rsh32Ux8 x y) left:(Lsh32x8 x z:(Sub8 (Const8 [32]) y)))
1654	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
1655	// result: (RotateLeft32 x z)
1656	for {
1657		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1658			right := v_0
1659			if right.Op != OpRsh32Ux8 {
1660				continue
1661			}
1662			y := right.Args[1]
1663			x := right.Args[0]
1664			left := v_1
1665			if left.Op != OpLsh32x8 {
1666				continue
1667			}
1668			_ = left.Args[1]
1669			if x != left.Args[0] {
1670				continue
1671			}
1672			z := left.Args[1]
1673			if z.Op != OpSub8 {
1674				continue
1675			}
1676			_ = z.Args[1]
1677			z_0 := z.Args[0]
1678			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
1679				continue
1680			}
1681			v.reset(OpRotateLeft32)
1682			v.AddArg2(x, z)
1683			return true
1684		}
1685		break
1686	}
1687	return false
1688}
1689func rewriteValuegeneric_OpAdd32F(v *Value) bool {
1690	v_1 := v.Args[1]
1691	v_0 := v.Args[0]
1692	// match: (Add32F (Const32F [c]) (Const32F [d]))
1693	// cond: c+d == c+d
1694	// result: (Const32F [c+d])
1695	for {
1696		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1697			if v_0.Op != OpConst32F {
1698				continue
1699			}
1700			c := auxIntToFloat32(v_0.AuxInt)
1701			if v_1.Op != OpConst32F {
1702				continue
1703			}
1704			d := auxIntToFloat32(v_1.AuxInt)
1705			if !(c+d == c+d) {
1706				continue
1707			}
1708			v.reset(OpConst32F)
1709			v.AuxInt = float32ToAuxInt(c + d)
1710			return true
1711		}
1712		break
1713	}
1714	return false
1715}
1716func rewriteValuegeneric_OpAdd64(v *Value) bool {
1717	v_1 := v.Args[1]
1718	v_0 := v.Args[0]
1719	b := v.Block
1720	config := b.Func.Config
1721	// match: (Add64 (Const64 [c]) (Const64 [d]))
1722	// result: (Const64 [c+d])
1723	for {
1724		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1725			if v_0.Op != OpConst64 {
1726				continue
1727			}
1728			c := auxIntToInt64(v_0.AuxInt)
1729			if v_1.Op != OpConst64 {
1730				continue
1731			}
1732			d := auxIntToInt64(v_1.AuxInt)
1733			v.reset(OpConst64)
1734			v.AuxInt = int64ToAuxInt(c + d)
1735			return true
1736		}
1737		break
1738	}
1739	// match: (Add64 <t> (Mul64 x y) (Mul64 x z))
1740	// result: (Mul64 x (Add64 <t> y z))
1741	for {
1742		t := v.Type
1743		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1744			if v_0.Op != OpMul64 {
1745				continue
1746			}
1747			_ = v_0.Args[1]
1748			v_0_0 := v_0.Args[0]
1749			v_0_1 := v_0.Args[1]
1750			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
1751				x := v_0_0
1752				y := v_0_1
1753				if v_1.Op != OpMul64 {
1754					continue
1755				}
1756				_ = v_1.Args[1]
1757				v_1_0 := v_1.Args[0]
1758				v_1_1 := v_1.Args[1]
1759				for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 {
1760					if x != v_1_0 {
1761						continue
1762					}
1763					z := v_1_1
1764					v.reset(OpMul64)
1765					v0 := b.NewValue0(v.Pos, OpAdd64, t)
1766					v0.AddArg2(y, z)
1767					v.AddArg2(x, v0)
1768					return true
1769				}
1770			}
1771		}
1772		break
1773	}
1774	// match: (Add64 (Const64 [0]) x)
1775	// result: x
1776	for {
1777		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1778			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
1779				continue
1780			}
1781			x := v_1
1782			v.copyOf(x)
1783			return true
1784		}
1785		break
1786	}
1787	// match: (Add64 x (Neg64 y))
1788	// result: (Sub64 x y)
1789	for {
1790		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1791			x := v_0
1792			if v_1.Op != OpNeg64 {
1793				continue
1794			}
1795			y := v_1.Args[0]
1796			v.reset(OpSub64)
1797			v.AddArg2(x, y)
1798			return true
1799		}
1800		break
1801	}
1802	// match: (Add64 (Com64 x) x)
1803	// result: (Const64 [-1])
1804	for {
1805		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1806			if v_0.Op != OpCom64 {
1807				continue
1808			}
1809			x := v_0.Args[0]
1810			if x != v_1 {
1811				continue
1812			}
1813			v.reset(OpConst64)
1814			v.AuxInt = int64ToAuxInt(-1)
1815			return true
1816		}
1817		break
1818	}
1819	// match: (Add64 (Sub64 x t) (Add64 t y))
1820	// result: (Add64 x y)
1821	for {
1822		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1823			if v_0.Op != OpSub64 {
1824				continue
1825			}
1826			t := v_0.Args[1]
1827			x := v_0.Args[0]
1828			if v_1.Op != OpAdd64 {
1829				continue
1830			}
1831			_ = v_1.Args[1]
1832			v_1_0 := v_1.Args[0]
1833			v_1_1 := v_1.Args[1]
1834			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
1835				if t != v_1_0 {
1836					continue
1837				}
1838				y := v_1_1
1839				v.reset(OpAdd64)
1840				v.AddArg2(x, y)
1841				return true
1842			}
1843		}
1844		break
1845	}
1846	// match: (Add64 (Const64 [1]) (Com64 x))
1847	// result: (Neg64 x)
1848	for {
1849		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1850			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 1 || v_1.Op != OpCom64 {
1851				continue
1852			}
1853			x := v_1.Args[0]
1854			v.reset(OpNeg64)
1855			v.AddArg(x)
1856			return true
1857		}
1858		break
1859	}
1860	// match: (Add64 x (Sub64 y x))
1861	// result: y
1862	for {
1863		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1864			x := v_0
1865			if v_1.Op != OpSub64 {
1866				continue
1867			}
1868			_ = v_1.Args[1]
1869			y := v_1.Args[0]
1870			if x != v_1.Args[1] {
1871				continue
1872			}
1873			v.copyOf(y)
1874			return true
1875		}
1876		break
1877	}
1878	// match: (Add64 x (Add64 y (Sub64 z x)))
1879	// result: (Add64 y z)
1880	for {
1881		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1882			x := v_0
1883			if v_1.Op != OpAdd64 {
1884				continue
1885			}
1886			_ = v_1.Args[1]
1887			v_1_0 := v_1.Args[0]
1888			v_1_1 := v_1.Args[1]
1889			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
1890				y := v_1_0
1891				if v_1_1.Op != OpSub64 {
1892					continue
1893				}
1894				_ = v_1_1.Args[1]
1895				z := v_1_1.Args[0]
1896				if x != v_1_1.Args[1] {
1897					continue
1898				}
1899				v.reset(OpAdd64)
1900				v.AddArg2(y, z)
1901				return true
1902			}
1903		}
1904		break
1905	}
1906	// match: (Add64 (Add64 i:(Const64 <t>) z) x)
1907	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
1908	// result: (Add64 i (Add64 <t> z x))
1909	for {
1910		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1911			if v_0.Op != OpAdd64 {
1912				continue
1913			}
1914			_ = v_0.Args[1]
1915			v_0_0 := v_0.Args[0]
1916			v_0_1 := v_0.Args[1]
1917			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
1918				i := v_0_0
1919				if i.Op != OpConst64 {
1920					continue
1921				}
1922				t := i.Type
1923				z := v_0_1
1924				x := v_1
1925				if !(z.Op != OpConst64 && x.Op != OpConst64) {
1926					continue
1927				}
1928				v.reset(OpAdd64)
1929				v0 := b.NewValue0(v.Pos, OpAdd64, t)
1930				v0.AddArg2(z, x)
1931				v.AddArg2(i, v0)
1932				return true
1933			}
1934		}
1935		break
1936	}
1937	// match: (Add64 (Sub64 i:(Const64 <t>) z) x)
1938	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
1939	// result: (Add64 i (Sub64 <t> x z))
1940	for {
1941		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1942			if v_0.Op != OpSub64 {
1943				continue
1944			}
1945			z := v_0.Args[1]
1946			i := v_0.Args[0]
1947			if i.Op != OpConst64 {
1948				continue
1949			}
1950			t := i.Type
1951			x := v_1
1952			if !(z.Op != OpConst64 && x.Op != OpConst64) {
1953				continue
1954			}
1955			v.reset(OpAdd64)
1956			v0 := b.NewValue0(v.Pos, OpSub64, t)
1957			v0.AddArg2(x, z)
1958			v.AddArg2(i, v0)
1959			return true
1960		}
1961		break
1962	}
1963	// match: (Add64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
1964	// result: (Add64 (Const64 <t> [c+d]) x)
1965	for {
1966		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1967			if v_0.Op != OpConst64 {
1968				continue
1969			}
1970			t := v_0.Type
1971			c := auxIntToInt64(v_0.AuxInt)
1972			if v_1.Op != OpAdd64 {
1973				continue
1974			}
1975			_ = v_1.Args[1]
1976			v_1_0 := v_1.Args[0]
1977			v_1_1 := v_1.Args[1]
1978			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
1979				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
1980					continue
1981				}
1982				d := auxIntToInt64(v_1_0.AuxInt)
1983				x := v_1_1
1984				v.reset(OpAdd64)
1985				v0 := b.NewValue0(v.Pos, OpConst64, t)
1986				v0.AuxInt = int64ToAuxInt(c + d)
1987				v.AddArg2(v0, x)
1988				return true
1989			}
1990		}
1991		break
1992	}
1993	// match: (Add64 (Const64 <t> [c]) (Sub64 (Const64 <t> [d]) x))
1994	// result: (Sub64 (Const64 <t> [c+d]) x)
1995	for {
1996		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1997			if v_0.Op != OpConst64 {
1998				continue
1999			}
2000			t := v_0.Type
2001			c := auxIntToInt64(v_0.AuxInt)
2002			if v_1.Op != OpSub64 {
2003				continue
2004			}
2005			x := v_1.Args[1]
2006			v_1_0 := v_1.Args[0]
2007			if v_1_0.Op != OpConst64 || v_1_0.Type != t {
2008				continue
2009			}
2010			d := auxIntToInt64(v_1_0.AuxInt)
2011			v.reset(OpSub64)
2012			v0 := b.NewValue0(v.Pos, OpConst64, t)
2013			v0.AuxInt = int64ToAuxInt(c + d)
2014			v.AddArg2(v0, x)
2015			return true
2016		}
2017		break
2018	}
2019	// match: (Add64 (Lsh64x64 x z:(Const64 <t> [c])) (Rsh64Ux64 x (Const64 [d])))
2020	// cond: c < 64 && d == 64-c && canRotate(config, 64)
2021	// result: (RotateLeft64 x z)
2022	for {
2023		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2024			if v_0.Op != OpLsh64x64 {
2025				continue
2026			}
2027			_ = v_0.Args[1]
2028			x := v_0.Args[0]
2029			z := v_0.Args[1]
2030			if z.Op != OpConst64 {
2031				continue
2032			}
2033			c := auxIntToInt64(z.AuxInt)
2034			if v_1.Op != OpRsh64Ux64 {
2035				continue
2036			}
2037			_ = v_1.Args[1]
2038			if x != v_1.Args[0] {
2039				continue
2040			}
2041			v_1_1 := v_1.Args[1]
2042			if v_1_1.Op != OpConst64 {
2043				continue
2044			}
2045			d := auxIntToInt64(v_1_1.AuxInt)
2046			if !(c < 64 && d == 64-c && canRotate(config, 64)) {
2047				continue
2048			}
2049			v.reset(OpRotateLeft64)
2050			v.AddArg2(x, z)
2051			return true
2052		}
2053		break
2054	}
2055	// match: (Add64 left:(Lsh64x64 x y) right:(Rsh64Ux64 x (Sub64 (Const64 [64]) y)))
2056	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
2057	// result: (RotateLeft64 x y)
2058	for {
2059		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2060			left := v_0
2061			if left.Op != OpLsh64x64 {
2062				continue
2063			}
2064			y := left.Args[1]
2065			x := left.Args[0]
2066			right := v_1
2067			if right.Op != OpRsh64Ux64 {
2068				continue
2069			}
2070			_ = right.Args[1]
2071			if x != right.Args[0] {
2072				continue
2073			}
2074			right_1 := right.Args[1]
2075			if right_1.Op != OpSub64 {
2076				continue
2077			}
2078			_ = right_1.Args[1]
2079			right_1_0 := right_1.Args[0]
2080			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
2081				continue
2082			}
2083			v.reset(OpRotateLeft64)
2084			v.AddArg2(x, y)
2085			return true
2086		}
2087		break
2088	}
2089	// match: (Add64 left:(Lsh64x32 x y) right:(Rsh64Ux32 x (Sub32 (Const32 [64]) y)))
2090	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
2091	// result: (RotateLeft64 x y)
2092	for {
2093		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2094			left := v_0
2095			if left.Op != OpLsh64x32 {
2096				continue
2097			}
2098			y := left.Args[1]
2099			x := left.Args[0]
2100			right := v_1
2101			if right.Op != OpRsh64Ux32 {
2102				continue
2103			}
2104			_ = right.Args[1]
2105			if x != right.Args[0] {
2106				continue
2107			}
2108			right_1 := right.Args[1]
2109			if right_1.Op != OpSub32 {
2110				continue
2111			}
2112			_ = right_1.Args[1]
2113			right_1_0 := right_1.Args[0]
2114			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
2115				continue
2116			}
2117			v.reset(OpRotateLeft64)
2118			v.AddArg2(x, y)
2119			return true
2120		}
2121		break
2122	}
2123	// match: (Add64 left:(Lsh64x16 x y) right:(Rsh64Ux16 x (Sub16 (Const16 [64]) y)))
2124	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
2125	// result: (RotateLeft64 x y)
2126	for {
2127		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2128			left := v_0
2129			if left.Op != OpLsh64x16 {
2130				continue
2131			}
2132			y := left.Args[1]
2133			x := left.Args[0]
2134			right := v_1
2135			if right.Op != OpRsh64Ux16 {
2136				continue
2137			}
2138			_ = right.Args[1]
2139			if x != right.Args[0] {
2140				continue
2141			}
2142			right_1 := right.Args[1]
2143			if right_1.Op != OpSub16 {
2144				continue
2145			}
2146			_ = right_1.Args[1]
2147			right_1_0 := right_1.Args[0]
2148			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
2149				continue
2150			}
2151			v.reset(OpRotateLeft64)
2152			v.AddArg2(x, y)
2153			return true
2154		}
2155		break
2156	}
2157	// match: (Add64 left:(Lsh64x8 x y) right:(Rsh64Ux8 x (Sub8 (Const8 [64]) y)))
2158	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
2159	// result: (RotateLeft64 x y)
2160	for {
2161		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2162			left := v_0
2163			if left.Op != OpLsh64x8 {
2164				continue
2165			}
2166			y := left.Args[1]
2167			x := left.Args[0]
2168			right := v_1
2169			if right.Op != OpRsh64Ux8 {
2170				continue
2171			}
2172			_ = right.Args[1]
2173			if x != right.Args[0] {
2174				continue
2175			}
2176			right_1 := right.Args[1]
2177			if right_1.Op != OpSub8 {
2178				continue
2179			}
2180			_ = right_1.Args[1]
2181			right_1_0 := right_1.Args[0]
2182			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
2183				continue
2184			}
2185			v.reset(OpRotateLeft64)
2186			v.AddArg2(x, y)
2187			return true
2188		}
2189		break
2190	}
2191	// match: (Add64 right:(Rsh64Ux64 x y) left:(Lsh64x64 x z:(Sub64 (Const64 [64]) y)))
2192	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
2193	// result: (RotateLeft64 x z)
2194	for {
2195		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2196			right := v_0
2197			if right.Op != OpRsh64Ux64 {
2198				continue
2199			}
2200			y := right.Args[1]
2201			x := right.Args[0]
2202			left := v_1
2203			if left.Op != OpLsh64x64 {
2204				continue
2205			}
2206			_ = left.Args[1]
2207			if x != left.Args[0] {
2208				continue
2209			}
2210			z := left.Args[1]
2211			if z.Op != OpSub64 {
2212				continue
2213			}
2214			_ = z.Args[1]
2215			z_0 := z.Args[0]
2216			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
2217				continue
2218			}
2219			v.reset(OpRotateLeft64)
2220			v.AddArg2(x, z)
2221			return true
2222		}
2223		break
2224	}
2225	// match: (Add64 right:(Rsh64Ux32 x y) left:(Lsh64x32 x z:(Sub32 (Const32 [64]) y)))
2226	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
2227	// result: (RotateLeft64 x z)
2228	for {
2229		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2230			right := v_0
2231			if right.Op != OpRsh64Ux32 {
2232				continue
2233			}
2234			y := right.Args[1]
2235			x := right.Args[0]
2236			left := v_1
2237			if left.Op != OpLsh64x32 {
2238				continue
2239			}
2240			_ = left.Args[1]
2241			if x != left.Args[0] {
2242				continue
2243			}
2244			z := left.Args[1]
2245			if z.Op != OpSub32 {
2246				continue
2247			}
2248			_ = z.Args[1]
2249			z_0 := z.Args[0]
2250			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
2251				continue
2252			}
2253			v.reset(OpRotateLeft64)
2254			v.AddArg2(x, z)
2255			return true
2256		}
2257		break
2258	}
2259	// match: (Add64 right:(Rsh64Ux16 x y) left:(Lsh64x16 x z:(Sub16 (Const16 [64]) y)))
2260	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
2261	// result: (RotateLeft64 x z)
2262	for {
2263		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2264			right := v_0
2265			if right.Op != OpRsh64Ux16 {
2266				continue
2267			}
2268			y := right.Args[1]
2269			x := right.Args[0]
2270			left := v_1
2271			if left.Op != OpLsh64x16 {
2272				continue
2273			}
2274			_ = left.Args[1]
2275			if x != left.Args[0] {
2276				continue
2277			}
2278			z := left.Args[1]
2279			if z.Op != OpSub16 {
2280				continue
2281			}
2282			_ = z.Args[1]
2283			z_0 := z.Args[0]
2284			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
2285				continue
2286			}
2287			v.reset(OpRotateLeft64)
2288			v.AddArg2(x, z)
2289			return true
2290		}
2291		break
2292	}
2293	// match: (Add64 right:(Rsh64Ux8 x y) left:(Lsh64x8 x z:(Sub8 (Const8 [64]) y)))
2294	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
2295	// result: (RotateLeft64 x z)
2296	for {
2297		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2298			right := v_0
2299			if right.Op != OpRsh64Ux8 {
2300				continue
2301			}
2302			y := right.Args[1]
2303			x := right.Args[0]
2304			left := v_1
2305			if left.Op != OpLsh64x8 {
2306				continue
2307			}
2308			_ = left.Args[1]
2309			if x != left.Args[0] {
2310				continue
2311			}
2312			z := left.Args[1]
2313			if z.Op != OpSub8 {
2314				continue
2315			}
2316			_ = z.Args[1]
2317			z_0 := z.Args[0]
2318			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
2319				continue
2320			}
2321			v.reset(OpRotateLeft64)
2322			v.AddArg2(x, z)
2323			return true
2324		}
2325		break
2326	}
2327	return false
2328}
2329func rewriteValuegeneric_OpAdd64F(v *Value) bool {
2330	v_1 := v.Args[1]
2331	v_0 := v.Args[0]
2332	// match: (Add64F (Const64F [c]) (Const64F [d]))
2333	// cond: c+d == c+d
2334	// result: (Const64F [c+d])
2335	for {
2336		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2337			if v_0.Op != OpConst64F {
2338				continue
2339			}
2340			c := auxIntToFloat64(v_0.AuxInt)
2341			if v_1.Op != OpConst64F {
2342				continue
2343			}
2344			d := auxIntToFloat64(v_1.AuxInt)
2345			if !(c+d == c+d) {
2346				continue
2347			}
2348			v.reset(OpConst64F)
2349			v.AuxInt = float64ToAuxInt(c + d)
2350			return true
2351		}
2352		break
2353	}
2354	return false
2355}
2356func rewriteValuegeneric_OpAdd8(v *Value) bool {
2357	v_1 := v.Args[1]
2358	v_0 := v.Args[0]
2359	b := v.Block
2360	config := b.Func.Config
2361	// match: (Add8 (Const8 [c]) (Const8 [d]))
2362	// result: (Const8 [c+d])
2363	for {
2364		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2365			if v_0.Op != OpConst8 {
2366				continue
2367			}
2368			c := auxIntToInt8(v_0.AuxInt)
2369			if v_1.Op != OpConst8 {
2370				continue
2371			}
2372			d := auxIntToInt8(v_1.AuxInt)
2373			v.reset(OpConst8)
2374			v.AuxInt = int8ToAuxInt(c + d)
2375			return true
2376		}
2377		break
2378	}
2379	// match: (Add8 <t> (Mul8 x y) (Mul8 x z))
2380	// result: (Mul8 x (Add8 <t> y z))
2381	for {
2382		t := v.Type
2383		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2384			if v_0.Op != OpMul8 {
2385				continue
2386			}
2387			_ = v_0.Args[1]
2388			v_0_0 := v_0.Args[0]
2389			v_0_1 := v_0.Args[1]
2390			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
2391				x := v_0_0
2392				y := v_0_1
2393				if v_1.Op != OpMul8 {
2394					continue
2395				}
2396				_ = v_1.Args[1]
2397				v_1_0 := v_1.Args[0]
2398				v_1_1 := v_1.Args[1]
2399				for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 {
2400					if x != v_1_0 {
2401						continue
2402					}
2403					z := v_1_1
2404					v.reset(OpMul8)
2405					v0 := b.NewValue0(v.Pos, OpAdd8, t)
2406					v0.AddArg2(y, z)
2407					v.AddArg2(x, v0)
2408					return true
2409				}
2410			}
2411		}
2412		break
2413	}
2414	// match: (Add8 (Const8 [0]) x)
2415	// result: x
2416	for {
2417		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2418			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
2419				continue
2420			}
2421			x := v_1
2422			v.copyOf(x)
2423			return true
2424		}
2425		break
2426	}
2427	// match: (Add8 x (Neg8 y))
2428	// result: (Sub8 x y)
2429	for {
2430		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2431			x := v_0
2432			if v_1.Op != OpNeg8 {
2433				continue
2434			}
2435			y := v_1.Args[0]
2436			v.reset(OpSub8)
2437			v.AddArg2(x, y)
2438			return true
2439		}
2440		break
2441	}
2442	// match: (Add8 (Com8 x) x)
2443	// result: (Const8 [-1])
2444	for {
2445		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2446			if v_0.Op != OpCom8 {
2447				continue
2448			}
2449			x := v_0.Args[0]
2450			if x != v_1 {
2451				continue
2452			}
2453			v.reset(OpConst8)
2454			v.AuxInt = int8ToAuxInt(-1)
2455			return true
2456		}
2457		break
2458	}
2459	// match: (Add8 (Sub8 x t) (Add8 t y))
2460	// result: (Add8 x y)
2461	for {
2462		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2463			if v_0.Op != OpSub8 {
2464				continue
2465			}
2466			t := v_0.Args[1]
2467			x := v_0.Args[0]
2468			if v_1.Op != OpAdd8 {
2469				continue
2470			}
2471			_ = v_1.Args[1]
2472			v_1_0 := v_1.Args[0]
2473			v_1_1 := v_1.Args[1]
2474			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
2475				if t != v_1_0 {
2476					continue
2477				}
2478				y := v_1_1
2479				v.reset(OpAdd8)
2480				v.AddArg2(x, y)
2481				return true
2482			}
2483		}
2484		break
2485	}
2486	// match: (Add8 (Const8 [1]) (Com8 x))
2487	// result: (Neg8 x)
2488	for {
2489		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2490			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 1 || v_1.Op != OpCom8 {
2491				continue
2492			}
2493			x := v_1.Args[0]
2494			v.reset(OpNeg8)
2495			v.AddArg(x)
2496			return true
2497		}
2498		break
2499	}
2500	// match: (Add8 x (Sub8 y x))
2501	// result: y
2502	for {
2503		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2504			x := v_0
2505			if v_1.Op != OpSub8 {
2506				continue
2507			}
2508			_ = v_1.Args[1]
2509			y := v_1.Args[0]
2510			if x != v_1.Args[1] {
2511				continue
2512			}
2513			v.copyOf(y)
2514			return true
2515		}
2516		break
2517	}
2518	// match: (Add8 x (Add8 y (Sub8 z x)))
2519	// result: (Add8 y z)
2520	for {
2521		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2522			x := v_0
2523			if v_1.Op != OpAdd8 {
2524				continue
2525			}
2526			_ = v_1.Args[1]
2527			v_1_0 := v_1.Args[0]
2528			v_1_1 := v_1.Args[1]
2529			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
2530				y := v_1_0
2531				if v_1_1.Op != OpSub8 {
2532					continue
2533				}
2534				_ = v_1_1.Args[1]
2535				z := v_1_1.Args[0]
2536				if x != v_1_1.Args[1] {
2537					continue
2538				}
2539				v.reset(OpAdd8)
2540				v.AddArg2(y, z)
2541				return true
2542			}
2543		}
2544		break
2545	}
2546	// match: (Add8 (Add8 i:(Const8 <t>) z) x)
2547	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
2548	// result: (Add8 i (Add8 <t> z x))
2549	for {
2550		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2551			if v_0.Op != OpAdd8 {
2552				continue
2553			}
2554			_ = v_0.Args[1]
2555			v_0_0 := v_0.Args[0]
2556			v_0_1 := v_0.Args[1]
2557			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
2558				i := v_0_0
2559				if i.Op != OpConst8 {
2560					continue
2561				}
2562				t := i.Type
2563				z := v_0_1
2564				x := v_1
2565				if !(z.Op != OpConst8 && x.Op != OpConst8) {
2566					continue
2567				}
2568				v.reset(OpAdd8)
2569				v0 := b.NewValue0(v.Pos, OpAdd8, t)
2570				v0.AddArg2(z, x)
2571				v.AddArg2(i, v0)
2572				return true
2573			}
2574		}
2575		break
2576	}
2577	// match: (Add8 (Sub8 i:(Const8 <t>) z) x)
2578	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
2579	// result: (Add8 i (Sub8 <t> x z))
2580	for {
2581		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2582			if v_0.Op != OpSub8 {
2583				continue
2584			}
2585			z := v_0.Args[1]
2586			i := v_0.Args[0]
2587			if i.Op != OpConst8 {
2588				continue
2589			}
2590			t := i.Type
2591			x := v_1
2592			if !(z.Op != OpConst8 && x.Op != OpConst8) {
2593				continue
2594			}
2595			v.reset(OpAdd8)
2596			v0 := b.NewValue0(v.Pos, OpSub8, t)
2597			v0.AddArg2(x, z)
2598			v.AddArg2(i, v0)
2599			return true
2600		}
2601		break
2602	}
2603	// match: (Add8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
2604	// result: (Add8 (Const8 <t> [c+d]) x)
2605	for {
2606		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2607			if v_0.Op != OpConst8 {
2608				continue
2609			}
2610			t := v_0.Type
2611			c := auxIntToInt8(v_0.AuxInt)
2612			if v_1.Op != OpAdd8 {
2613				continue
2614			}
2615			_ = v_1.Args[1]
2616			v_1_0 := v_1.Args[0]
2617			v_1_1 := v_1.Args[1]
2618			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
2619				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
2620					continue
2621				}
2622				d := auxIntToInt8(v_1_0.AuxInt)
2623				x := v_1_1
2624				v.reset(OpAdd8)
2625				v0 := b.NewValue0(v.Pos, OpConst8, t)
2626				v0.AuxInt = int8ToAuxInt(c + d)
2627				v.AddArg2(v0, x)
2628				return true
2629			}
2630		}
2631		break
2632	}
2633	// match: (Add8 (Const8 <t> [c]) (Sub8 (Const8 <t> [d]) x))
2634	// result: (Sub8 (Const8 <t> [c+d]) x)
2635	for {
2636		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2637			if v_0.Op != OpConst8 {
2638				continue
2639			}
2640			t := v_0.Type
2641			c := auxIntToInt8(v_0.AuxInt)
2642			if v_1.Op != OpSub8 {
2643				continue
2644			}
2645			x := v_1.Args[1]
2646			v_1_0 := v_1.Args[0]
2647			if v_1_0.Op != OpConst8 || v_1_0.Type != t {
2648				continue
2649			}
2650			d := auxIntToInt8(v_1_0.AuxInt)
2651			v.reset(OpSub8)
2652			v0 := b.NewValue0(v.Pos, OpConst8, t)
2653			v0.AuxInt = int8ToAuxInt(c + d)
2654			v.AddArg2(v0, x)
2655			return true
2656		}
2657		break
2658	}
2659	// match: (Add8 (Lsh8x64 x z:(Const64 <t> [c])) (Rsh8Ux64 x (Const64 [d])))
2660	// cond: c < 8 && d == 8-c && canRotate(config, 8)
2661	// result: (RotateLeft8 x z)
2662	for {
2663		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2664			if v_0.Op != OpLsh8x64 {
2665				continue
2666			}
2667			_ = v_0.Args[1]
2668			x := v_0.Args[0]
2669			z := v_0.Args[1]
2670			if z.Op != OpConst64 {
2671				continue
2672			}
2673			c := auxIntToInt64(z.AuxInt)
2674			if v_1.Op != OpRsh8Ux64 {
2675				continue
2676			}
2677			_ = v_1.Args[1]
2678			if x != v_1.Args[0] {
2679				continue
2680			}
2681			v_1_1 := v_1.Args[1]
2682			if v_1_1.Op != OpConst64 {
2683				continue
2684			}
2685			d := auxIntToInt64(v_1_1.AuxInt)
2686			if !(c < 8 && d == 8-c && canRotate(config, 8)) {
2687				continue
2688			}
2689			v.reset(OpRotateLeft8)
2690			v.AddArg2(x, z)
2691			return true
2692		}
2693		break
2694	}
2695	// match: (Add8 left:(Lsh8x64 x y) right:(Rsh8Ux64 x (Sub64 (Const64 [8]) y)))
2696	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
2697	// result: (RotateLeft8 x y)
2698	for {
2699		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2700			left := v_0
2701			if left.Op != OpLsh8x64 {
2702				continue
2703			}
2704			y := left.Args[1]
2705			x := left.Args[0]
2706			right := v_1
2707			if right.Op != OpRsh8Ux64 {
2708				continue
2709			}
2710			_ = right.Args[1]
2711			if x != right.Args[0] {
2712				continue
2713			}
2714			right_1 := right.Args[1]
2715			if right_1.Op != OpSub64 {
2716				continue
2717			}
2718			_ = right_1.Args[1]
2719			right_1_0 := right_1.Args[0]
2720			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
2721				continue
2722			}
2723			v.reset(OpRotateLeft8)
2724			v.AddArg2(x, y)
2725			return true
2726		}
2727		break
2728	}
2729	// match: (Add8 left:(Lsh8x32 x y) right:(Rsh8Ux32 x (Sub32 (Const32 [8]) y)))
2730	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
2731	// result: (RotateLeft8 x y)
2732	for {
2733		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2734			left := v_0
2735			if left.Op != OpLsh8x32 {
2736				continue
2737			}
2738			y := left.Args[1]
2739			x := left.Args[0]
2740			right := v_1
2741			if right.Op != OpRsh8Ux32 {
2742				continue
2743			}
2744			_ = right.Args[1]
2745			if x != right.Args[0] {
2746				continue
2747			}
2748			right_1 := right.Args[1]
2749			if right_1.Op != OpSub32 {
2750				continue
2751			}
2752			_ = right_1.Args[1]
2753			right_1_0 := right_1.Args[0]
2754			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
2755				continue
2756			}
2757			v.reset(OpRotateLeft8)
2758			v.AddArg2(x, y)
2759			return true
2760		}
2761		break
2762	}
2763	// match: (Add8 left:(Lsh8x16 x y) right:(Rsh8Ux16 x (Sub16 (Const16 [8]) y)))
2764	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
2765	// result: (RotateLeft8 x y)
2766	for {
2767		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2768			left := v_0
2769			if left.Op != OpLsh8x16 {
2770				continue
2771			}
2772			y := left.Args[1]
2773			x := left.Args[0]
2774			right := v_1
2775			if right.Op != OpRsh8Ux16 {
2776				continue
2777			}
2778			_ = right.Args[1]
2779			if x != right.Args[0] {
2780				continue
2781			}
2782			right_1 := right.Args[1]
2783			if right_1.Op != OpSub16 {
2784				continue
2785			}
2786			_ = right_1.Args[1]
2787			right_1_0 := right_1.Args[0]
2788			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
2789				continue
2790			}
2791			v.reset(OpRotateLeft8)
2792			v.AddArg2(x, y)
2793			return true
2794		}
2795		break
2796	}
2797	// match: (Add8 left:(Lsh8x8 x y) right:(Rsh8Ux8 x (Sub8 (Const8 [8]) y)))
2798	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
2799	// result: (RotateLeft8 x y)
2800	for {
2801		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2802			left := v_0
2803			if left.Op != OpLsh8x8 {
2804				continue
2805			}
2806			y := left.Args[1]
2807			x := left.Args[0]
2808			right := v_1
2809			if right.Op != OpRsh8Ux8 {
2810				continue
2811			}
2812			_ = right.Args[1]
2813			if x != right.Args[0] {
2814				continue
2815			}
2816			right_1 := right.Args[1]
2817			if right_1.Op != OpSub8 {
2818				continue
2819			}
2820			_ = right_1.Args[1]
2821			right_1_0 := right_1.Args[0]
2822			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
2823				continue
2824			}
2825			v.reset(OpRotateLeft8)
2826			v.AddArg2(x, y)
2827			return true
2828		}
2829		break
2830	}
2831	// match: (Add8 right:(Rsh8Ux64 x y) left:(Lsh8x64 x z:(Sub64 (Const64 [8]) y)))
2832	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
2833	// result: (RotateLeft8 x z)
2834	for {
2835		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2836			right := v_0
2837			if right.Op != OpRsh8Ux64 {
2838				continue
2839			}
2840			y := right.Args[1]
2841			x := right.Args[0]
2842			left := v_1
2843			if left.Op != OpLsh8x64 {
2844				continue
2845			}
2846			_ = left.Args[1]
2847			if x != left.Args[0] {
2848				continue
2849			}
2850			z := left.Args[1]
2851			if z.Op != OpSub64 {
2852				continue
2853			}
2854			_ = z.Args[1]
2855			z_0 := z.Args[0]
2856			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
2857				continue
2858			}
2859			v.reset(OpRotateLeft8)
2860			v.AddArg2(x, z)
2861			return true
2862		}
2863		break
2864	}
2865	// match: (Add8 right:(Rsh8Ux32 x y) left:(Lsh8x32 x z:(Sub32 (Const32 [8]) y)))
2866	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
2867	// result: (RotateLeft8 x z)
2868	for {
2869		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2870			right := v_0
2871			if right.Op != OpRsh8Ux32 {
2872				continue
2873			}
2874			y := right.Args[1]
2875			x := right.Args[0]
2876			left := v_1
2877			if left.Op != OpLsh8x32 {
2878				continue
2879			}
2880			_ = left.Args[1]
2881			if x != left.Args[0] {
2882				continue
2883			}
2884			z := left.Args[1]
2885			if z.Op != OpSub32 {
2886				continue
2887			}
2888			_ = z.Args[1]
2889			z_0 := z.Args[0]
2890			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
2891				continue
2892			}
2893			v.reset(OpRotateLeft8)
2894			v.AddArg2(x, z)
2895			return true
2896		}
2897		break
2898	}
2899	// match: (Add8 right:(Rsh8Ux16 x y) left:(Lsh8x16 x z:(Sub16 (Const16 [8]) y)))
2900	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
2901	// result: (RotateLeft8 x z)
2902	for {
2903		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2904			right := v_0
2905			if right.Op != OpRsh8Ux16 {
2906				continue
2907			}
2908			y := right.Args[1]
2909			x := right.Args[0]
2910			left := v_1
2911			if left.Op != OpLsh8x16 {
2912				continue
2913			}
2914			_ = left.Args[1]
2915			if x != left.Args[0] {
2916				continue
2917			}
2918			z := left.Args[1]
2919			if z.Op != OpSub16 {
2920				continue
2921			}
2922			_ = z.Args[1]
2923			z_0 := z.Args[0]
2924			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
2925				continue
2926			}
2927			v.reset(OpRotateLeft8)
2928			v.AddArg2(x, z)
2929			return true
2930		}
2931		break
2932	}
2933	// match: (Add8 right:(Rsh8Ux8 x y) left:(Lsh8x8 x z:(Sub8 (Const8 [8]) y)))
2934	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
2935	// result: (RotateLeft8 x z)
2936	for {
2937		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2938			right := v_0
2939			if right.Op != OpRsh8Ux8 {
2940				continue
2941			}
2942			y := right.Args[1]
2943			x := right.Args[0]
2944			left := v_1
2945			if left.Op != OpLsh8x8 {
2946				continue
2947			}
2948			_ = left.Args[1]
2949			if x != left.Args[0] {
2950				continue
2951			}
2952			z := left.Args[1]
2953			if z.Op != OpSub8 {
2954				continue
2955			}
2956			_ = z.Args[1]
2957			z_0 := z.Args[0]
2958			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
2959				continue
2960			}
2961			v.reset(OpRotateLeft8)
2962			v.AddArg2(x, z)
2963			return true
2964		}
2965		break
2966	}
2967	return false
2968}
2969func rewriteValuegeneric_OpAddPtr(v *Value) bool {
2970	v_1 := v.Args[1]
2971	v_0 := v.Args[0]
2972	// match: (AddPtr <t> x (Const64 [c]))
2973	// result: (OffPtr <t> x [c])
2974	for {
2975		t := v.Type
2976		x := v_0
2977		if v_1.Op != OpConst64 {
2978			break
2979		}
2980		c := auxIntToInt64(v_1.AuxInt)
2981		v.reset(OpOffPtr)
2982		v.Type = t
2983		v.AuxInt = int64ToAuxInt(c)
2984		v.AddArg(x)
2985		return true
2986	}
2987	// match: (AddPtr <t> x (Const32 [c]))
2988	// result: (OffPtr <t> x [int64(c)])
2989	for {
2990		t := v.Type
2991		x := v_0
2992		if v_1.Op != OpConst32 {
2993			break
2994		}
2995		c := auxIntToInt32(v_1.AuxInt)
2996		v.reset(OpOffPtr)
2997		v.Type = t
2998		v.AuxInt = int64ToAuxInt(int64(c))
2999		v.AddArg(x)
3000		return true
3001	}
3002	return false
3003}
3004func rewriteValuegeneric_OpAnd16(v *Value) bool {
3005	v_1 := v.Args[1]
3006	v_0 := v.Args[0]
3007	b := v.Block
3008	// match: (And16 (Const16 [c]) (Const16 [d]))
3009	// result: (Const16 [c&d])
3010	for {
3011		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3012			if v_0.Op != OpConst16 {
3013				continue
3014			}
3015			c := auxIntToInt16(v_0.AuxInt)
3016			if v_1.Op != OpConst16 {
3017				continue
3018			}
3019			d := auxIntToInt16(v_1.AuxInt)
3020			v.reset(OpConst16)
3021			v.AuxInt = int16ToAuxInt(c & d)
3022			return true
3023		}
3024		break
3025	}
3026	// match: (And16 <t> (Com16 x) (Com16 y))
3027	// result: (Com16 (Or16 <t> x y))
3028	for {
3029		t := v.Type
3030		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3031			if v_0.Op != OpCom16 {
3032				continue
3033			}
3034			x := v_0.Args[0]
3035			if v_1.Op != OpCom16 {
3036				continue
3037			}
3038			y := v_1.Args[0]
3039			v.reset(OpCom16)
3040			v0 := b.NewValue0(v.Pos, OpOr16, t)
3041			v0.AddArg2(x, y)
3042			v.AddArg(v0)
3043			return true
3044		}
3045		break
3046	}
3047	// match: (And16 (Const16 [m]) (Rsh16Ux64 _ (Const64 [c])))
3048	// cond: c >= int64(16-ntz16(m))
3049	// result: (Const16 [0])
3050	for {
3051		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3052			if v_0.Op != OpConst16 {
3053				continue
3054			}
3055			m := auxIntToInt16(v_0.AuxInt)
3056			if v_1.Op != OpRsh16Ux64 {
3057				continue
3058			}
3059			_ = v_1.Args[1]
3060			v_1_1 := v_1.Args[1]
3061			if v_1_1.Op != OpConst64 {
3062				continue
3063			}
3064			c := auxIntToInt64(v_1_1.AuxInt)
3065			if !(c >= int64(16-ntz16(m))) {
3066				continue
3067			}
3068			v.reset(OpConst16)
3069			v.AuxInt = int16ToAuxInt(0)
3070			return true
3071		}
3072		break
3073	}
3074	// match: (And16 (Const16 [m]) (Lsh16x64 _ (Const64 [c])))
3075	// cond: c >= int64(16-nlz16(m))
3076	// result: (Const16 [0])
3077	for {
3078		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3079			if v_0.Op != OpConst16 {
3080				continue
3081			}
3082			m := auxIntToInt16(v_0.AuxInt)
3083			if v_1.Op != OpLsh16x64 {
3084				continue
3085			}
3086			_ = v_1.Args[1]
3087			v_1_1 := v_1.Args[1]
3088			if v_1_1.Op != OpConst64 {
3089				continue
3090			}
3091			c := auxIntToInt64(v_1_1.AuxInt)
3092			if !(c >= int64(16-nlz16(m))) {
3093				continue
3094			}
3095			v.reset(OpConst16)
3096			v.AuxInt = int16ToAuxInt(0)
3097			return true
3098		}
3099		break
3100	}
3101	// match: (And16 x x)
3102	// result: x
3103	for {
3104		x := v_0
3105		if x != v_1 {
3106			break
3107		}
3108		v.copyOf(x)
3109		return true
3110	}
3111	// match: (And16 (Const16 [-1]) x)
3112	// result: x
3113	for {
3114		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3115			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
3116				continue
3117			}
3118			x := v_1
3119			v.copyOf(x)
3120			return true
3121		}
3122		break
3123	}
3124	// match: (And16 (Const16 [0]) _)
3125	// result: (Const16 [0])
3126	for {
3127		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3128			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
3129				continue
3130			}
3131			v.reset(OpConst16)
3132			v.AuxInt = int16ToAuxInt(0)
3133			return true
3134		}
3135		break
3136	}
3137	// match: (And16 (Com16 x) x)
3138	// result: (Const16 [0])
3139	for {
3140		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3141			if v_0.Op != OpCom16 {
3142				continue
3143			}
3144			x := v_0.Args[0]
3145			if x != v_1 {
3146				continue
3147			}
3148			v.reset(OpConst16)
3149			v.AuxInt = int16ToAuxInt(0)
3150			return true
3151		}
3152		break
3153	}
3154	// match: (And16 x (And16 x y))
3155	// result: (And16 x y)
3156	for {
3157		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3158			x := v_0
3159			if v_1.Op != OpAnd16 {
3160				continue
3161			}
3162			_ = v_1.Args[1]
3163			v_1_0 := v_1.Args[0]
3164			v_1_1 := v_1.Args[1]
3165			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
3166				if x != v_1_0 {
3167					continue
3168				}
3169				y := v_1_1
3170				v.reset(OpAnd16)
3171				v.AddArg2(x, y)
3172				return true
3173			}
3174		}
3175		break
3176	}
3177	// match: (And16 (And16 i:(Const16 <t>) z) x)
3178	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
3179	// result: (And16 i (And16 <t> z x))
3180	for {
3181		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3182			if v_0.Op != OpAnd16 {
3183				continue
3184			}
3185			_ = v_0.Args[1]
3186			v_0_0 := v_0.Args[0]
3187			v_0_1 := v_0.Args[1]
3188			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
3189				i := v_0_0
3190				if i.Op != OpConst16 {
3191					continue
3192				}
3193				t := i.Type
3194				z := v_0_1
3195				x := v_1
3196				if !(z.Op != OpConst16 && x.Op != OpConst16) {
3197					continue
3198				}
3199				v.reset(OpAnd16)
3200				v0 := b.NewValue0(v.Pos, OpAnd16, t)
3201				v0.AddArg2(z, x)
3202				v.AddArg2(i, v0)
3203				return true
3204			}
3205		}
3206		break
3207	}
3208	// match: (And16 (Const16 <t> [c]) (And16 (Const16 <t> [d]) x))
3209	// result: (And16 (Const16 <t> [c&d]) x)
3210	for {
3211		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3212			if v_0.Op != OpConst16 {
3213				continue
3214			}
3215			t := v_0.Type
3216			c := auxIntToInt16(v_0.AuxInt)
3217			if v_1.Op != OpAnd16 {
3218				continue
3219			}
3220			_ = v_1.Args[1]
3221			v_1_0 := v_1.Args[0]
3222			v_1_1 := v_1.Args[1]
3223			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
3224				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
3225					continue
3226				}
3227				d := auxIntToInt16(v_1_0.AuxInt)
3228				x := v_1_1
3229				v.reset(OpAnd16)
3230				v0 := b.NewValue0(v.Pos, OpConst16, t)
3231				v0.AuxInt = int16ToAuxInt(c & d)
3232				v.AddArg2(v0, x)
3233				return true
3234			}
3235		}
3236		break
3237	}
3238	return false
3239}
3240func rewriteValuegeneric_OpAnd32(v *Value) bool {
3241	v_1 := v.Args[1]
3242	v_0 := v.Args[0]
3243	b := v.Block
3244	// match: (And32 (Const32 [c]) (Const32 [d]))
3245	// result: (Const32 [c&d])
3246	for {
3247		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3248			if v_0.Op != OpConst32 {
3249				continue
3250			}
3251			c := auxIntToInt32(v_0.AuxInt)
3252			if v_1.Op != OpConst32 {
3253				continue
3254			}
3255			d := auxIntToInt32(v_1.AuxInt)
3256			v.reset(OpConst32)
3257			v.AuxInt = int32ToAuxInt(c & d)
3258			return true
3259		}
3260		break
3261	}
3262	// match: (And32 <t> (Com32 x) (Com32 y))
3263	// result: (Com32 (Or32 <t> x y))
3264	for {
3265		t := v.Type
3266		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3267			if v_0.Op != OpCom32 {
3268				continue
3269			}
3270			x := v_0.Args[0]
3271			if v_1.Op != OpCom32 {
3272				continue
3273			}
3274			y := v_1.Args[0]
3275			v.reset(OpCom32)
3276			v0 := b.NewValue0(v.Pos, OpOr32, t)
3277			v0.AddArg2(x, y)
3278			v.AddArg(v0)
3279			return true
3280		}
3281		break
3282	}
3283	// match: (And32 (Const32 [m]) (Rsh32Ux64 _ (Const64 [c])))
3284	// cond: c >= int64(32-ntz32(m))
3285	// result: (Const32 [0])
3286	for {
3287		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3288			if v_0.Op != OpConst32 {
3289				continue
3290			}
3291			m := auxIntToInt32(v_0.AuxInt)
3292			if v_1.Op != OpRsh32Ux64 {
3293				continue
3294			}
3295			_ = v_1.Args[1]
3296			v_1_1 := v_1.Args[1]
3297			if v_1_1.Op != OpConst64 {
3298				continue
3299			}
3300			c := auxIntToInt64(v_1_1.AuxInt)
3301			if !(c >= int64(32-ntz32(m))) {
3302				continue
3303			}
3304			v.reset(OpConst32)
3305			v.AuxInt = int32ToAuxInt(0)
3306			return true
3307		}
3308		break
3309	}
3310	// match: (And32 (Const32 [m]) (Lsh32x64 _ (Const64 [c])))
3311	// cond: c >= int64(32-nlz32(m))
3312	// result: (Const32 [0])
3313	for {
3314		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3315			if v_0.Op != OpConst32 {
3316				continue
3317			}
3318			m := auxIntToInt32(v_0.AuxInt)
3319			if v_1.Op != OpLsh32x64 {
3320				continue
3321			}
3322			_ = v_1.Args[1]
3323			v_1_1 := v_1.Args[1]
3324			if v_1_1.Op != OpConst64 {
3325				continue
3326			}
3327			c := auxIntToInt64(v_1_1.AuxInt)
3328			if !(c >= int64(32-nlz32(m))) {
3329				continue
3330			}
3331			v.reset(OpConst32)
3332			v.AuxInt = int32ToAuxInt(0)
3333			return true
3334		}
3335		break
3336	}
3337	// match: (And32 x x)
3338	// result: x
3339	for {
3340		x := v_0
3341		if x != v_1 {
3342			break
3343		}
3344		v.copyOf(x)
3345		return true
3346	}
3347	// match: (And32 (Const32 [-1]) x)
3348	// result: x
3349	for {
3350		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3351			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
3352				continue
3353			}
3354			x := v_1
3355			v.copyOf(x)
3356			return true
3357		}
3358		break
3359	}
3360	// match: (And32 (Const32 [0]) _)
3361	// result: (Const32 [0])
3362	for {
3363		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3364			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
3365				continue
3366			}
3367			v.reset(OpConst32)
3368			v.AuxInt = int32ToAuxInt(0)
3369			return true
3370		}
3371		break
3372	}
3373	// match: (And32 (Com32 x) x)
3374	// result: (Const32 [0])
3375	for {
3376		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3377			if v_0.Op != OpCom32 {
3378				continue
3379			}
3380			x := v_0.Args[0]
3381			if x != v_1 {
3382				continue
3383			}
3384			v.reset(OpConst32)
3385			v.AuxInt = int32ToAuxInt(0)
3386			return true
3387		}
3388		break
3389	}
3390	// match: (And32 x (And32 x y))
3391	// result: (And32 x y)
3392	for {
3393		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3394			x := v_0
3395			if v_1.Op != OpAnd32 {
3396				continue
3397			}
3398			_ = v_1.Args[1]
3399			v_1_0 := v_1.Args[0]
3400			v_1_1 := v_1.Args[1]
3401			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
3402				if x != v_1_0 {
3403					continue
3404				}
3405				y := v_1_1
3406				v.reset(OpAnd32)
3407				v.AddArg2(x, y)
3408				return true
3409			}
3410		}
3411		break
3412	}
3413	// match: (And32 (And32 i:(Const32 <t>) z) x)
3414	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
3415	// result: (And32 i (And32 <t> z x))
3416	for {
3417		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3418			if v_0.Op != OpAnd32 {
3419				continue
3420			}
3421			_ = v_0.Args[1]
3422			v_0_0 := v_0.Args[0]
3423			v_0_1 := v_0.Args[1]
3424			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
3425				i := v_0_0
3426				if i.Op != OpConst32 {
3427					continue
3428				}
3429				t := i.Type
3430				z := v_0_1
3431				x := v_1
3432				if !(z.Op != OpConst32 && x.Op != OpConst32) {
3433					continue
3434				}
3435				v.reset(OpAnd32)
3436				v0 := b.NewValue0(v.Pos, OpAnd32, t)
3437				v0.AddArg2(z, x)
3438				v.AddArg2(i, v0)
3439				return true
3440			}
3441		}
3442		break
3443	}
3444	// match: (And32 (Const32 <t> [c]) (And32 (Const32 <t> [d]) x))
3445	// result: (And32 (Const32 <t> [c&d]) x)
3446	for {
3447		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3448			if v_0.Op != OpConst32 {
3449				continue
3450			}
3451			t := v_0.Type
3452			c := auxIntToInt32(v_0.AuxInt)
3453			if v_1.Op != OpAnd32 {
3454				continue
3455			}
3456			_ = v_1.Args[1]
3457			v_1_0 := v_1.Args[0]
3458			v_1_1 := v_1.Args[1]
3459			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
3460				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
3461					continue
3462				}
3463				d := auxIntToInt32(v_1_0.AuxInt)
3464				x := v_1_1
3465				v.reset(OpAnd32)
3466				v0 := b.NewValue0(v.Pos, OpConst32, t)
3467				v0.AuxInt = int32ToAuxInt(c & d)
3468				v.AddArg2(v0, x)
3469				return true
3470			}
3471		}
3472		break
3473	}
3474	return false
3475}
3476func rewriteValuegeneric_OpAnd64(v *Value) bool {
3477	v_1 := v.Args[1]
3478	v_0 := v.Args[0]
3479	b := v.Block
3480	// match: (And64 (Const64 [c]) (Const64 [d]))
3481	// result: (Const64 [c&d])
3482	for {
3483		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3484			if v_0.Op != OpConst64 {
3485				continue
3486			}
3487			c := auxIntToInt64(v_0.AuxInt)
3488			if v_1.Op != OpConst64 {
3489				continue
3490			}
3491			d := auxIntToInt64(v_1.AuxInt)
3492			v.reset(OpConst64)
3493			v.AuxInt = int64ToAuxInt(c & d)
3494			return true
3495		}
3496		break
3497	}
3498	// match: (And64 <t> (Com64 x) (Com64 y))
3499	// result: (Com64 (Or64 <t> x y))
3500	for {
3501		t := v.Type
3502		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3503			if v_0.Op != OpCom64 {
3504				continue
3505			}
3506			x := v_0.Args[0]
3507			if v_1.Op != OpCom64 {
3508				continue
3509			}
3510			y := v_1.Args[0]
3511			v.reset(OpCom64)
3512			v0 := b.NewValue0(v.Pos, OpOr64, t)
3513			v0.AddArg2(x, y)
3514			v.AddArg(v0)
3515			return true
3516		}
3517		break
3518	}
3519	// match: (And64 (Const64 [m]) (Rsh64Ux64 _ (Const64 [c])))
3520	// cond: c >= int64(64-ntz64(m))
3521	// result: (Const64 [0])
3522	for {
3523		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3524			if v_0.Op != OpConst64 {
3525				continue
3526			}
3527			m := auxIntToInt64(v_0.AuxInt)
3528			if v_1.Op != OpRsh64Ux64 {
3529				continue
3530			}
3531			_ = v_1.Args[1]
3532			v_1_1 := v_1.Args[1]
3533			if v_1_1.Op != OpConst64 {
3534				continue
3535			}
3536			c := auxIntToInt64(v_1_1.AuxInt)
3537			if !(c >= int64(64-ntz64(m))) {
3538				continue
3539			}
3540			v.reset(OpConst64)
3541			v.AuxInt = int64ToAuxInt(0)
3542			return true
3543		}
3544		break
3545	}
3546	// match: (And64 (Const64 [m]) (Lsh64x64 _ (Const64 [c])))
3547	// cond: c >= int64(64-nlz64(m))
3548	// result: (Const64 [0])
3549	for {
3550		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3551			if v_0.Op != OpConst64 {
3552				continue
3553			}
3554			m := auxIntToInt64(v_0.AuxInt)
3555			if v_1.Op != OpLsh64x64 {
3556				continue
3557			}
3558			_ = v_1.Args[1]
3559			v_1_1 := v_1.Args[1]
3560			if v_1_1.Op != OpConst64 {
3561				continue
3562			}
3563			c := auxIntToInt64(v_1_1.AuxInt)
3564			if !(c >= int64(64-nlz64(m))) {
3565				continue
3566			}
3567			v.reset(OpConst64)
3568			v.AuxInt = int64ToAuxInt(0)
3569			return true
3570		}
3571		break
3572	}
3573	// match: (And64 x x)
3574	// result: x
3575	for {
3576		x := v_0
3577		if x != v_1 {
3578			break
3579		}
3580		v.copyOf(x)
3581		return true
3582	}
3583	// match: (And64 (Const64 [-1]) x)
3584	// result: x
3585	for {
3586		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3587			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
3588				continue
3589			}
3590			x := v_1
3591			v.copyOf(x)
3592			return true
3593		}
3594		break
3595	}
3596	// match: (And64 (Const64 [0]) _)
3597	// result: (Const64 [0])
3598	for {
3599		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3600			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
3601				continue
3602			}
3603			v.reset(OpConst64)
3604			v.AuxInt = int64ToAuxInt(0)
3605			return true
3606		}
3607		break
3608	}
3609	// match: (And64 (Com64 x) x)
3610	// result: (Const64 [0])
3611	for {
3612		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3613			if v_0.Op != OpCom64 {
3614				continue
3615			}
3616			x := v_0.Args[0]
3617			if x != v_1 {
3618				continue
3619			}
3620			v.reset(OpConst64)
3621			v.AuxInt = int64ToAuxInt(0)
3622			return true
3623		}
3624		break
3625	}
3626	// match: (And64 x (And64 x y))
3627	// result: (And64 x y)
3628	for {
3629		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3630			x := v_0
3631			if v_1.Op != OpAnd64 {
3632				continue
3633			}
3634			_ = v_1.Args[1]
3635			v_1_0 := v_1.Args[0]
3636			v_1_1 := v_1.Args[1]
3637			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
3638				if x != v_1_0 {
3639					continue
3640				}
3641				y := v_1_1
3642				v.reset(OpAnd64)
3643				v.AddArg2(x, y)
3644				return true
3645			}
3646		}
3647		break
3648	}
3649	// match: (And64 (And64 i:(Const64 <t>) z) x)
3650	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
3651	// result: (And64 i (And64 <t> z x))
3652	for {
3653		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3654			if v_0.Op != OpAnd64 {
3655				continue
3656			}
3657			_ = v_0.Args[1]
3658			v_0_0 := v_0.Args[0]
3659			v_0_1 := v_0.Args[1]
3660			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
3661				i := v_0_0
3662				if i.Op != OpConst64 {
3663					continue
3664				}
3665				t := i.Type
3666				z := v_0_1
3667				x := v_1
3668				if !(z.Op != OpConst64 && x.Op != OpConst64) {
3669					continue
3670				}
3671				v.reset(OpAnd64)
3672				v0 := b.NewValue0(v.Pos, OpAnd64, t)
3673				v0.AddArg2(z, x)
3674				v.AddArg2(i, v0)
3675				return true
3676			}
3677		}
3678		break
3679	}
3680	// match: (And64 (Const64 <t> [c]) (And64 (Const64 <t> [d]) x))
3681	// result: (And64 (Const64 <t> [c&d]) x)
3682	for {
3683		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3684			if v_0.Op != OpConst64 {
3685				continue
3686			}
3687			t := v_0.Type
3688			c := auxIntToInt64(v_0.AuxInt)
3689			if v_1.Op != OpAnd64 {
3690				continue
3691			}
3692			_ = v_1.Args[1]
3693			v_1_0 := v_1.Args[0]
3694			v_1_1 := v_1.Args[1]
3695			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
3696				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
3697					continue
3698				}
3699				d := auxIntToInt64(v_1_0.AuxInt)
3700				x := v_1_1
3701				v.reset(OpAnd64)
3702				v0 := b.NewValue0(v.Pos, OpConst64, t)
3703				v0.AuxInt = int64ToAuxInt(c & d)
3704				v.AddArg2(v0, x)
3705				return true
3706			}
3707		}
3708		break
3709	}
3710	return false
3711}
3712func rewriteValuegeneric_OpAnd8(v *Value) bool {
3713	v_1 := v.Args[1]
3714	v_0 := v.Args[0]
3715	b := v.Block
3716	// match: (And8 (Const8 [c]) (Const8 [d]))
3717	// result: (Const8 [c&d])
3718	for {
3719		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3720			if v_0.Op != OpConst8 {
3721				continue
3722			}
3723			c := auxIntToInt8(v_0.AuxInt)
3724			if v_1.Op != OpConst8 {
3725				continue
3726			}
3727			d := auxIntToInt8(v_1.AuxInt)
3728			v.reset(OpConst8)
3729			v.AuxInt = int8ToAuxInt(c & d)
3730			return true
3731		}
3732		break
3733	}
3734	// match: (And8 <t> (Com8 x) (Com8 y))
3735	// result: (Com8 (Or8 <t> x y))
3736	for {
3737		t := v.Type
3738		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3739			if v_0.Op != OpCom8 {
3740				continue
3741			}
3742			x := v_0.Args[0]
3743			if v_1.Op != OpCom8 {
3744				continue
3745			}
3746			y := v_1.Args[0]
3747			v.reset(OpCom8)
3748			v0 := b.NewValue0(v.Pos, OpOr8, t)
3749			v0.AddArg2(x, y)
3750			v.AddArg(v0)
3751			return true
3752		}
3753		break
3754	}
3755	// match: (And8 (Const8 [m]) (Rsh8Ux64 _ (Const64 [c])))
3756	// cond: c >= int64(8-ntz8(m))
3757	// result: (Const8 [0])
3758	for {
3759		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3760			if v_0.Op != OpConst8 {
3761				continue
3762			}
3763			m := auxIntToInt8(v_0.AuxInt)
3764			if v_1.Op != OpRsh8Ux64 {
3765				continue
3766			}
3767			_ = v_1.Args[1]
3768			v_1_1 := v_1.Args[1]
3769			if v_1_1.Op != OpConst64 {
3770				continue
3771			}
3772			c := auxIntToInt64(v_1_1.AuxInt)
3773			if !(c >= int64(8-ntz8(m))) {
3774				continue
3775			}
3776			v.reset(OpConst8)
3777			v.AuxInt = int8ToAuxInt(0)
3778			return true
3779		}
3780		break
3781	}
3782	// match: (And8 (Const8 [m]) (Lsh8x64 _ (Const64 [c])))
3783	// cond: c >= int64(8-nlz8(m))
3784	// result: (Const8 [0])
3785	for {
3786		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3787			if v_0.Op != OpConst8 {
3788				continue
3789			}
3790			m := auxIntToInt8(v_0.AuxInt)
3791			if v_1.Op != OpLsh8x64 {
3792				continue
3793			}
3794			_ = v_1.Args[1]
3795			v_1_1 := v_1.Args[1]
3796			if v_1_1.Op != OpConst64 {
3797				continue
3798			}
3799			c := auxIntToInt64(v_1_1.AuxInt)
3800			if !(c >= int64(8-nlz8(m))) {
3801				continue
3802			}
3803			v.reset(OpConst8)
3804			v.AuxInt = int8ToAuxInt(0)
3805			return true
3806		}
3807		break
3808	}
3809	// match: (And8 x x)
3810	// result: x
3811	for {
3812		x := v_0
3813		if x != v_1 {
3814			break
3815		}
3816		v.copyOf(x)
3817		return true
3818	}
3819	// match: (And8 (Const8 [-1]) x)
3820	// result: x
3821	for {
3822		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3823			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
3824				continue
3825			}
3826			x := v_1
3827			v.copyOf(x)
3828			return true
3829		}
3830		break
3831	}
3832	// match: (And8 (Const8 [0]) _)
3833	// result: (Const8 [0])
3834	for {
3835		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3836			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
3837				continue
3838			}
3839			v.reset(OpConst8)
3840			v.AuxInt = int8ToAuxInt(0)
3841			return true
3842		}
3843		break
3844	}
3845	// match: (And8 (Com8 x) x)
3846	// result: (Const8 [0])
3847	for {
3848		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3849			if v_0.Op != OpCom8 {
3850				continue
3851			}
3852			x := v_0.Args[0]
3853			if x != v_1 {
3854				continue
3855			}
3856			v.reset(OpConst8)
3857			v.AuxInt = int8ToAuxInt(0)
3858			return true
3859		}
3860		break
3861	}
3862	// match: (And8 x (And8 x y))
3863	// result: (And8 x y)
3864	for {
3865		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3866			x := v_0
3867			if v_1.Op != OpAnd8 {
3868				continue
3869			}
3870			_ = v_1.Args[1]
3871			v_1_0 := v_1.Args[0]
3872			v_1_1 := v_1.Args[1]
3873			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
3874				if x != v_1_0 {
3875					continue
3876				}
3877				y := v_1_1
3878				v.reset(OpAnd8)
3879				v.AddArg2(x, y)
3880				return true
3881			}
3882		}
3883		break
3884	}
3885	// match: (And8 (And8 i:(Const8 <t>) z) x)
3886	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
3887	// result: (And8 i (And8 <t> z x))
3888	for {
3889		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3890			if v_0.Op != OpAnd8 {
3891				continue
3892			}
3893			_ = v_0.Args[1]
3894			v_0_0 := v_0.Args[0]
3895			v_0_1 := v_0.Args[1]
3896			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
3897				i := v_0_0
3898				if i.Op != OpConst8 {
3899					continue
3900				}
3901				t := i.Type
3902				z := v_0_1
3903				x := v_1
3904				if !(z.Op != OpConst8 && x.Op != OpConst8) {
3905					continue
3906				}
3907				v.reset(OpAnd8)
3908				v0 := b.NewValue0(v.Pos, OpAnd8, t)
3909				v0.AddArg2(z, x)
3910				v.AddArg2(i, v0)
3911				return true
3912			}
3913		}
3914		break
3915	}
3916	// match: (And8 (Const8 <t> [c]) (And8 (Const8 <t> [d]) x))
3917	// result: (And8 (Const8 <t> [c&d]) x)
3918	for {
3919		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3920			if v_0.Op != OpConst8 {
3921				continue
3922			}
3923			t := v_0.Type
3924			c := auxIntToInt8(v_0.AuxInt)
3925			if v_1.Op != OpAnd8 {
3926				continue
3927			}
3928			_ = v_1.Args[1]
3929			v_1_0 := v_1.Args[0]
3930			v_1_1 := v_1.Args[1]
3931			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
3932				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
3933					continue
3934				}
3935				d := auxIntToInt8(v_1_0.AuxInt)
3936				x := v_1_1
3937				v.reset(OpAnd8)
3938				v0 := b.NewValue0(v.Pos, OpConst8, t)
3939				v0.AuxInt = int8ToAuxInt(c & d)
3940				v.AddArg2(v0, x)
3941				return true
3942			}
3943		}
3944		break
3945	}
3946	return false
3947}
3948func rewriteValuegeneric_OpAndB(v *Value) bool {
3949	v_1 := v.Args[1]
3950	v_0 := v.Args[0]
3951	b := v.Block
3952	// match: (AndB (Leq64 (Const64 [c]) x) (Less64 x (Const64 [d])))
3953	// cond: d >= c
3954	// result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
3955	for {
3956		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3957			if v_0.Op != OpLeq64 {
3958				continue
3959			}
3960			x := v_0.Args[1]
3961			v_0_0 := v_0.Args[0]
3962			if v_0_0.Op != OpConst64 {
3963				continue
3964			}
3965			c := auxIntToInt64(v_0_0.AuxInt)
3966			if v_1.Op != OpLess64 {
3967				continue
3968			}
3969			_ = v_1.Args[1]
3970			if x != v_1.Args[0] {
3971				continue
3972			}
3973			v_1_1 := v_1.Args[1]
3974			if v_1_1.Op != OpConst64 {
3975				continue
3976			}
3977			d := auxIntToInt64(v_1_1.AuxInt)
3978			if !(d >= c) {
3979				continue
3980			}
3981			v.reset(OpLess64U)
3982			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
3983			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
3984			v1.AuxInt = int64ToAuxInt(c)
3985			v0.AddArg2(x, v1)
3986			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
3987			v2.AuxInt = int64ToAuxInt(d - c)
3988			v.AddArg2(v0, v2)
3989			return true
3990		}
3991		break
3992	}
3993	// match: (AndB (Leq64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
3994	// cond: d >= c
3995	// result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
3996	for {
3997		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3998			if v_0.Op != OpLeq64 {
3999				continue
4000			}
4001			x := v_0.Args[1]
4002			v_0_0 := v_0.Args[0]
4003			if v_0_0.Op != OpConst64 {
4004				continue
4005			}
4006			c := auxIntToInt64(v_0_0.AuxInt)
4007			if v_1.Op != OpLeq64 {
4008				continue
4009			}
4010			_ = v_1.Args[1]
4011			if x != v_1.Args[0] {
4012				continue
4013			}
4014			v_1_1 := v_1.Args[1]
4015			if v_1_1.Op != OpConst64 {
4016				continue
4017			}
4018			d := auxIntToInt64(v_1_1.AuxInt)
4019			if !(d >= c) {
4020				continue
4021			}
4022			v.reset(OpLeq64U)
4023			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
4024			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
4025			v1.AuxInt = int64ToAuxInt(c)
4026			v0.AddArg2(x, v1)
4027			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
4028			v2.AuxInt = int64ToAuxInt(d - c)
4029			v.AddArg2(v0, v2)
4030			return true
4031		}
4032		break
4033	}
4034	// match: (AndB (Leq32 (Const32 [c]) x) (Less32 x (Const32 [d])))
4035	// cond: d >= c
4036	// result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
4037	for {
4038		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4039			if v_0.Op != OpLeq32 {
4040				continue
4041			}
4042			x := v_0.Args[1]
4043			v_0_0 := v_0.Args[0]
4044			if v_0_0.Op != OpConst32 {
4045				continue
4046			}
4047			c := auxIntToInt32(v_0_0.AuxInt)
4048			if v_1.Op != OpLess32 {
4049				continue
4050			}
4051			_ = v_1.Args[1]
4052			if x != v_1.Args[0] {
4053				continue
4054			}
4055			v_1_1 := v_1.Args[1]
4056			if v_1_1.Op != OpConst32 {
4057				continue
4058			}
4059			d := auxIntToInt32(v_1_1.AuxInt)
4060			if !(d >= c) {
4061				continue
4062			}
4063			v.reset(OpLess32U)
4064			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
4065			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
4066			v1.AuxInt = int32ToAuxInt(c)
4067			v0.AddArg2(x, v1)
4068			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
4069			v2.AuxInt = int32ToAuxInt(d - c)
4070			v.AddArg2(v0, v2)
4071			return true
4072		}
4073		break
4074	}
4075	// match: (AndB (Leq32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
4076	// cond: d >= c
4077	// result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
4078	for {
4079		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4080			if v_0.Op != OpLeq32 {
4081				continue
4082			}
4083			x := v_0.Args[1]
4084			v_0_0 := v_0.Args[0]
4085			if v_0_0.Op != OpConst32 {
4086				continue
4087			}
4088			c := auxIntToInt32(v_0_0.AuxInt)
4089			if v_1.Op != OpLeq32 {
4090				continue
4091			}
4092			_ = v_1.Args[1]
4093			if x != v_1.Args[0] {
4094				continue
4095			}
4096			v_1_1 := v_1.Args[1]
4097			if v_1_1.Op != OpConst32 {
4098				continue
4099			}
4100			d := auxIntToInt32(v_1_1.AuxInt)
4101			if !(d >= c) {
4102				continue
4103			}
4104			v.reset(OpLeq32U)
4105			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
4106			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
4107			v1.AuxInt = int32ToAuxInt(c)
4108			v0.AddArg2(x, v1)
4109			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
4110			v2.AuxInt = int32ToAuxInt(d - c)
4111			v.AddArg2(v0, v2)
4112			return true
4113		}
4114		break
4115	}
4116	// match: (AndB (Leq16 (Const16 [c]) x) (Less16 x (Const16 [d])))
4117	// cond: d >= c
4118	// result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
4119	for {
4120		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4121			if v_0.Op != OpLeq16 {
4122				continue
4123			}
4124			x := v_0.Args[1]
4125			v_0_0 := v_0.Args[0]
4126			if v_0_0.Op != OpConst16 {
4127				continue
4128			}
4129			c := auxIntToInt16(v_0_0.AuxInt)
4130			if v_1.Op != OpLess16 {
4131				continue
4132			}
4133			_ = v_1.Args[1]
4134			if x != v_1.Args[0] {
4135				continue
4136			}
4137			v_1_1 := v_1.Args[1]
4138			if v_1_1.Op != OpConst16 {
4139				continue
4140			}
4141			d := auxIntToInt16(v_1_1.AuxInt)
4142			if !(d >= c) {
4143				continue
4144			}
4145			v.reset(OpLess16U)
4146			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
4147			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
4148			v1.AuxInt = int16ToAuxInt(c)
4149			v0.AddArg2(x, v1)
4150			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
4151			v2.AuxInt = int16ToAuxInt(d - c)
4152			v.AddArg2(v0, v2)
4153			return true
4154		}
4155		break
4156	}
4157	// match: (AndB (Leq16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
4158	// cond: d >= c
4159	// result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
4160	for {
4161		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4162			if v_0.Op != OpLeq16 {
4163				continue
4164			}
4165			x := v_0.Args[1]
4166			v_0_0 := v_0.Args[0]
4167			if v_0_0.Op != OpConst16 {
4168				continue
4169			}
4170			c := auxIntToInt16(v_0_0.AuxInt)
4171			if v_1.Op != OpLeq16 {
4172				continue
4173			}
4174			_ = v_1.Args[1]
4175			if x != v_1.Args[0] {
4176				continue
4177			}
4178			v_1_1 := v_1.Args[1]
4179			if v_1_1.Op != OpConst16 {
4180				continue
4181			}
4182			d := auxIntToInt16(v_1_1.AuxInt)
4183			if !(d >= c) {
4184				continue
4185			}
4186			v.reset(OpLeq16U)
4187			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
4188			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
4189			v1.AuxInt = int16ToAuxInt(c)
4190			v0.AddArg2(x, v1)
4191			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
4192			v2.AuxInt = int16ToAuxInt(d - c)
4193			v.AddArg2(v0, v2)
4194			return true
4195		}
4196		break
4197	}
4198	// match: (AndB (Leq8 (Const8 [c]) x) (Less8 x (Const8 [d])))
4199	// cond: d >= c
4200	// result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
4201	for {
4202		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4203			if v_0.Op != OpLeq8 {
4204				continue
4205			}
4206			x := v_0.Args[1]
4207			v_0_0 := v_0.Args[0]
4208			if v_0_0.Op != OpConst8 {
4209				continue
4210			}
4211			c := auxIntToInt8(v_0_0.AuxInt)
4212			if v_1.Op != OpLess8 {
4213				continue
4214			}
4215			_ = v_1.Args[1]
4216			if x != v_1.Args[0] {
4217				continue
4218			}
4219			v_1_1 := v_1.Args[1]
4220			if v_1_1.Op != OpConst8 {
4221				continue
4222			}
4223			d := auxIntToInt8(v_1_1.AuxInt)
4224			if !(d >= c) {
4225				continue
4226			}
4227			v.reset(OpLess8U)
4228			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
4229			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
4230			v1.AuxInt = int8ToAuxInt(c)
4231			v0.AddArg2(x, v1)
4232			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
4233			v2.AuxInt = int8ToAuxInt(d - c)
4234			v.AddArg2(v0, v2)
4235			return true
4236		}
4237		break
4238	}
4239	// match: (AndB (Leq8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
4240	// cond: d >= c
4241	// result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
4242	for {
4243		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4244			if v_0.Op != OpLeq8 {
4245				continue
4246			}
4247			x := v_0.Args[1]
4248			v_0_0 := v_0.Args[0]
4249			if v_0_0.Op != OpConst8 {
4250				continue
4251			}
4252			c := auxIntToInt8(v_0_0.AuxInt)
4253			if v_1.Op != OpLeq8 {
4254				continue
4255			}
4256			_ = v_1.Args[1]
4257			if x != v_1.Args[0] {
4258				continue
4259			}
4260			v_1_1 := v_1.Args[1]
4261			if v_1_1.Op != OpConst8 {
4262				continue
4263			}
4264			d := auxIntToInt8(v_1_1.AuxInt)
4265			if !(d >= c) {
4266				continue
4267			}
4268			v.reset(OpLeq8U)
4269			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
4270			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
4271			v1.AuxInt = int8ToAuxInt(c)
4272			v0.AddArg2(x, v1)
4273			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
4274			v2.AuxInt = int8ToAuxInt(d - c)
4275			v.AddArg2(v0, v2)
4276			return true
4277		}
4278		break
4279	}
4280	// match: (AndB (Less64 (Const64 [c]) x) (Less64 x (Const64 [d])))
4281	// cond: d >= c+1 && c+1 > c
4282	// result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
4283	for {
4284		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4285			if v_0.Op != OpLess64 {
4286				continue
4287			}
4288			x := v_0.Args[1]
4289			v_0_0 := v_0.Args[0]
4290			if v_0_0.Op != OpConst64 {
4291				continue
4292			}
4293			c := auxIntToInt64(v_0_0.AuxInt)
4294			if v_1.Op != OpLess64 {
4295				continue
4296			}
4297			_ = v_1.Args[1]
4298			if x != v_1.Args[0] {
4299				continue
4300			}
4301			v_1_1 := v_1.Args[1]
4302			if v_1_1.Op != OpConst64 {
4303				continue
4304			}
4305			d := auxIntToInt64(v_1_1.AuxInt)
4306			if !(d >= c+1 && c+1 > c) {
4307				continue
4308			}
4309			v.reset(OpLess64U)
4310			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
4311			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
4312			v1.AuxInt = int64ToAuxInt(c + 1)
4313			v0.AddArg2(x, v1)
4314			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
4315			v2.AuxInt = int64ToAuxInt(d - c - 1)
4316			v.AddArg2(v0, v2)
4317			return true
4318		}
4319		break
4320	}
4321	// match: (AndB (Less64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
4322	// cond: d >= c+1 && c+1 > c
4323	// result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
4324	for {
4325		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4326			if v_0.Op != OpLess64 {
4327				continue
4328			}
4329			x := v_0.Args[1]
4330			v_0_0 := v_0.Args[0]
4331			if v_0_0.Op != OpConst64 {
4332				continue
4333			}
4334			c := auxIntToInt64(v_0_0.AuxInt)
4335			if v_1.Op != OpLeq64 {
4336				continue
4337			}
4338			_ = v_1.Args[1]
4339			if x != v_1.Args[0] {
4340				continue
4341			}
4342			v_1_1 := v_1.Args[1]
4343			if v_1_1.Op != OpConst64 {
4344				continue
4345			}
4346			d := auxIntToInt64(v_1_1.AuxInt)
4347			if !(d >= c+1 && c+1 > c) {
4348				continue
4349			}
4350			v.reset(OpLeq64U)
4351			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
4352			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
4353			v1.AuxInt = int64ToAuxInt(c + 1)
4354			v0.AddArg2(x, v1)
4355			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
4356			v2.AuxInt = int64ToAuxInt(d - c - 1)
4357			v.AddArg2(v0, v2)
4358			return true
4359		}
4360		break
4361	}
4362	// match: (AndB (Less32 (Const32 [c]) x) (Less32 x (Const32 [d])))
4363	// cond: d >= c+1 && c+1 > c
4364	// result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
4365	for {
4366		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4367			if v_0.Op != OpLess32 {
4368				continue
4369			}
4370			x := v_0.Args[1]
4371			v_0_0 := v_0.Args[0]
4372			if v_0_0.Op != OpConst32 {
4373				continue
4374			}
4375			c := auxIntToInt32(v_0_0.AuxInt)
4376			if v_1.Op != OpLess32 {
4377				continue
4378			}
4379			_ = v_1.Args[1]
4380			if x != v_1.Args[0] {
4381				continue
4382			}
4383			v_1_1 := v_1.Args[1]
4384			if v_1_1.Op != OpConst32 {
4385				continue
4386			}
4387			d := auxIntToInt32(v_1_1.AuxInt)
4388			if !(d >= c+1 && c+1 > c) {
4389				continue
4390			}
4391			v.reset(OpLess32U)
4392			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
4393			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
4394			v1.AuxInt = int32ToAuxInt(c + 1)
4395			v0.AddArg2(x, v1)
4396			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
4397			v2.AuxInt = int32ToAuxInt(d - c - 1)
4398			v.AddArg2(v0, v2)
4399			return true
4400		}
4401		break
4402	}
4403	// match: (AndB (Less32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
4404	// cond: d >= c+1 && c+1 > c
4405	// result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
4406	for {
4407		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4408			if v_0.Op != OpLess32 {
4409				continue
4410			}
4411			x := v_0.Args[1]
4412			v_0_0 := v_0.Args[0]
4413			if v_0_0.Op != OpConst32 {
4414				continue
4415			}
4416			c := auxIntToInt32(v_0_0.AuxInt)
4417			if v_1.Op != OpLeq32 {
4418				continue
4419			}
4420			_ = v_1.Args[1]
4421			if x != v_1.Args[0] {
4422				continue
4423			}
4424			v_1_1 := v_1.Args[1]
4425			if v_1_1.Op != OpConst32 {
4426				continue
4427			}
4428			d := auxIntToInt32(v_1_1.AuxInt)
4429			if !(d >= c+1 && c+1 > c) {
4430				continue
4431			}
4432			v.reset(OpLeq32U)
4433			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
4434			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
4435			v1.AuxInt = int32ToAuxInt(c + 1)
4436			v0.AddArg2(x, v1)
4437			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
4438			v2.AuxInt = int32ToAuxInt(d - c - 1)
4439			v.AddArg2(v0, v2)
4440			return true
4441		}
4442		break
4443	}
4444	// match: (AndB (Less16 (Const16 [c]) x) (Less16 x (Const16 [d])))
4445	// cond: d >= c+1 && c+1 > c
4446	// result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
4447	for {
4448		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4449			if v_0.Op != OpLess16 {
4450				continue
4451			}
4452			x := v_0.Args[1]
4453			v_0_0 := v_0.Args[0]
4454			if v_0_0.Op != OpConst16 {
4455				continue
4456			}
4457			c := auxIntToInt16(v_0_0.AuxInt)
4458			if v_1.Op != OpLess16 {
4459				continue
4460			}
4461			_ = v_1.Args[1]
4462			if x != v_1.Args[0] {
4463				continue
4464			}
4465			v_1_1 := v_1.Args[1]
4466			if v_1_1.Op != OpConst16 {
4467				continue
4468			}
4469			d := auxIntToInt16(v_1_1.AuxInt)
4470			if !(d >= c+1 && c+1 > c) {
4471				continue
4472			}
4473			v.reset(OpLess16U)
4474			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
4475			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
4476			v1.AuxInt = int16ToAuxInt(c + 1)
4477			v0.AddArg2(x, v1)
4478			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
4479			v2.AuxInt = int16ToAuxInt(d - c - 1)
4480			v.AddArg2(v0, v2)
4481			return true
4482		}
4483		break
4484	}
4485	// match: (AndB (Less16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
4486	// cond: d >= c+1 && c+1 > c
4487	// result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
4488	for {
4489		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4490			if v_0.Op != OpLess16 {
4491				continue
4492			}
4493			x := v_0.Args[1]
4494			v_0_0 := v_0.Args[0]
4495			if v_0_0.Op != OpConst16 {
4496				continue
4497			}
4498			c := auxIntToInt16(v_0_0.AuxInt)
4499			if v_1.Op != OpLeq16 {
4500				continue
4501			}
4502			_ = v_1.Args[1]
4503			if x != v_1.Args[0] {
4504				continue
4505			}
4506			v_1_1 := v_1.Args[1]
4507			if v_1_1.Op != OpConst16 {
4508				continue
4509			}
4510			d := auxIntToInt16(v_1_1.AuxInt)
4511			if !(d >= c+1 && c+1 > c) {
4512				continue
4513			}
4514			v.reset(OpLeq16U)
4515			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
4516			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
4517			v1.AuxInt = int16ToAuxInt(c + 1)
4518			v0.AddArg2(x, v1)
4519			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
4520			v2.AuxInt = int16ToAuxInt(d - c - 1)
4521			v.AddArg2(v0, v2)
4522			return true
4523		}
4524		break
4525	}
4526	// match: (AndB (Less8 (Const8 [c]) x) (Less8 x (Const8 [d])))
4527	// cond: d >= c+1 && c+1 > c
4528	// result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
4529	for {
4530		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4531			if v_0.Op != OpLess8 {
4532				continue
4533			}
4534			x := v_0.Args[1]
4535			v_0_0 := v_0.Args[0]
4536			if v_0_0.Op != OpConst8 {
4537				continue
4538			}
4539			c := auxIntToInt8(v_0_0.AuxInt)
4540			if v_1.Op != OpLess8 {
4541				continue
4542			}
4543			_ = v_1.Args[1]
4544			if x != v_1.Args[0] {
4545				continue
4546			}
4547			v_1_1 := v_1.Args[1]
4548			if v_1_1.Op != OpConst8 {
4549				continue
4550			}
4551			d := auxIntToInt8(v_1_1.AuxInt)
4552			if !(d >= c+1 && c+1 > c) {
4553				continue
4554			}
4555			v.reset(OpLess8U)
4556			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
4557			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
4558			v1.AuxInt = int8ToAuxInt(c + 1)
4559			v0.AddArg2(x, v1)
4560			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
4561			v2.AuxInt = int8ToAuxInt(d - c - 1)
4562			v.AddArg2(v0, v2)
4563			return true
4564		}
4565		break
4566	}
4567	// match: (AndB (Less8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
4568	// cond: d >= c+1 && c+1 > c
4569	// result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
4570	for {
4571		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4572			if v_0.Op != OpLess8 {
4573				continue
4574			}
4575			x := v_0.Args[1]
4576			v_0_0 := v_0.Args[0]
4577			if v_0_0.Op != OpConst8 {
4578				continue
4579			}
4580			c := auxIntToInt8(v_0_0.AuxInt)
4581			if v_1.Op != OpLeq8 {
4582				continue
4583			}
4584			_ = v_1.Args[1]
4585			if x != v_1.Args[0] {
4586				continue
4587			}
4588			v_1_1 := v_1.Args[1]
4589			if v_1_1.Op != OpConst8 {
4590				continue
4591			}
4592			d := auxIntToInt8(v_1_1.AuxInt)
4593			if !(d >= c+1 && c+1 > c) {
4594				continue
4595			}
4596			v.reset(OpLeq8U)
4597			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
4598			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
4599			v1.AuxInt = int8ToAuxInt(c + 1)
4600			v0.AddArg2(x, v1)
4601			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
4602			v2.AuxInt = int8ToAuxInt(d - c - 1)
4603			v.AddArg2(v0, v2)
4604			return true
4605		}
4606		break
4607	}
4608	// match: (AndB (Leq64U (Const64 [c]) x) (Less64U x (Const64 [d])))
4609	// cond: uint64(d) >= uint64(c)
4610	// result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
4611	for {
4612		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4613			if v_0.Op != OpLeq64U {
4614				continue
4615			}
4616			x := v_0.Args[1]
4617			v_0_0 := v_0.Args[0]
4618			if v_0_0.Op != OpConst64 {
4619				continue
4620			}
4621			c := auxIntToInt64(v_0_0.AuxInt)
4622			if v_1.Op != OpLess64U {
4623				continue
4624			}
4625			_ = v_1.Args[1]
4626			if x != v_1.Args[0] {
4627				continue
4628			}
4629			v_1_1 := v_1.Args[1]
4630			if v_1_1.Op != OpConst64 {
4631				continue
4632			}
4633			d := auxIntToInt64(v_1_1.AuxInt)
4634			if !(uint64(d) >= uint64(c)) {
4635				continue
4636			}
4637			v.reset(OpLess64U)
4638			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
4639			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
4640			v1.AuxInt = int64ToAuxInt(c)
4641			v0.AddArg2(x, v1)
4642			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
4643			v2.AuxInt = int64ToAuxInt(d - c)
4644			v.AddArg2(v0, v2)
4645			return true
4646		}
4647		break
4648	}
4649	// match: (AndB (Leq64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
4650	// cond: uint64(d) >= uint64(c)
4651	// result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
4652	for {
4653		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4654			if v_0.Op != OpLeq64U {
4655				continue
4656			}
4657			x := v_0.Args[1]
4658			v_0_0 := v_0.Args[0]
4659			if v_0_0.Op != OpConst64 {
4660				continue
4661			}
4662			c := auxIntToInt64(v_0_0.AuxInt)
4663			if v_1.Op != OpLeq64U {
4664				continue
4665			}
4666			_ = v_1.Args[1]
4667			if x != v_1.Args[0] {
4668				continue
4669			}
4670			v_1_1 := v_1.Args[1]
4671			if v_1_1.Op != OpConst64 {
4672				continue
4673			}
4674			d := auxIntToInt64(v_1_1.AuxInt)
4675			if !(uint64(d) >= uint64(c)) {
4676				continue
4677			}
4678			v.reset(OpLeq64U)
4679			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
4680			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
4681			v1.AuxInt = int64ToAuxInt(c)
4682			v0.AddArg2(x, v1)
4683			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
4684			v2.AuxInt = int64ToAuxInt(d - c)
4685			v.AddArg2(v0, v2)
4686			return true
4687		}
4688		break
4689	}
4690	// match: (AndB (Leq32U (Const32 [c]) x) (Less32U x (Const32 [d])))
4691	// cond: uint32(d) >= uint32(c)
4692	// result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
4693	for {
4694		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4695			if v_0.Op != OpLeq32U {
4696				continue
4697			}
4698			x := v_0.Args[1]
4699			v_0_0 := v_0.Args[0]
4700			if v_0_0.Op != OpConst32 {
4701				continue
4702			}
4703			c := auxIntToInt32(v_0_0.AuxInt)
4704			if v_1.Op != OpLess32U {
4705				continue
4706			}
4707			_ = v_1.Args[1]
4708			if x != v_1.Args[0] {
4709				continue
4710			}
4711			v_1_1 := v_1.Args[1]
4712			if v_1_1.Op != OpConst32 {
4713				continue
4714			}
4715			d := auxIntToInt32(v_1_1.AuxInt)
4716			if !(uint32(d) >= uint32(c)) {
4717				continue
4718			}
4719			v.reset(OpLess32U)
4720			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
4721			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
4722			v1.AuxInt = int32ToAuxInt(c)
4723			v0.AddArg2(x, v1)
4724			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
4725			v2.AuxInt = int32ToAuxInt(d - c)
4726			v.AddArg2(v0, v2)
4727			return true
4728		}
4729		break
4730	}
4731	// match: (AndB (Leq32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
4732	// cond: uint32(d) >= uint32(c)
4733	// result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
4734	for {
4735		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4736			if v_0.Op != OpLeq32U {
4737				continue
4738			}
4739			x := v_0.Args[1]
4740			v_0_0 := v_0.Args[0]
4741			if v_0_0.Op != OpConst32 {
4742				continue
4743			}
4744			c := auxIntToInt32(v_0_0.AuxInt)
4745			if v_1.Op != OpLeq32U {
4746				continue
4747			}
4748			_ = v_1.Args[1]
4749			if x != v_1.Args[0] {
4750				continue
4751			}
4752			v_1_1 := v_1.Args[1]
4753			if v_1_1.Op != OpConst32 {
4754				continue
4755			}
4756			d := auxIntToInt32(v_1_1.AuxInt)
4757			if !(uint32(d) >= uint32(c)) {
4758				continue
4759			}
4760			v.reset(OpLeq32U)
4761			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
4762			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
4763			v1.AuxInt = int32ToAuxInt(c)
4764			v0.AddArg2(x, v1)
4765			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
4766			v2.AuxInt = int32ToAuxInt(d - c)
4767			v.AddArg2(v0, v2)
4768			return true
4769		}
4770		break
4771	}
4772	// match: (AndB (Leq16U (Const16 [c]) x) (Less16U x (Const16 [d])))
4773	// cond: uint16(d) >= uint16(c)
4774	// result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
4775	for {
4776		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4777			if v_0.Op != OpLeq16U {
4778				continue
4779			}
4780			x := v_0.Args[1]
4781			v_0_0 := v_0.Args[0]
4782			if v_0_0.Op != OpConst16 {
4783				continue
4784			}
4785			c := auxIntToInt16(v_0_0.AuxInt)
4786			if v_1.Op != OpLess16U {
4787				continue
4788			}
4789			_ = v_1.Args[1]
4790			if x != v_1.Args[0] {
4791				continue
4792			}
4793			v_1_1 := v_1.Args[1]
4794			if v_1_1.Op != OpConst16 {
4795				continue
4796			}
4797			d := auxIntToInt16(v_1_1.AuxInt)
4798			if !(uint16(d) >= uint16(c)) {
4799				continue
4800			}
4801			v.reset(OpLess16U)
4802			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
4803			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
4804			v1.AuxInt = int16ToAuxInt(c)
4805			v0.AddArg2(x, v1)
4806			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
4807			v2.AuxInt = int16ToAuxInt(d - c)
4808			v.AddArg2(v0, v2)
4809			return true
4810		}
4811		break
4812	}
4813	// match: (AndB (Leq16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
4814	// cond: uint16(d) >= uint16(c)
4815	// result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
4816	for {
4817		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4818			if v_0.Op != OpLeq16U {
4819				continue
4820			}
4821			x := v_0.Args[1]
4822			v_0_0 := v_0.Args[0]
4823			if v_0_0.Op != OpConst16 {
4824				continue
4825			}
4826			c := auxIntToInt16(v_0_0.AuxInt)
4827			if v_1.Op != OpLeq16U {
4828				continue
4829			}
4830			_ = v_1.Args[1]
4831			if x != v_1.Args[0] {
4832				continue
4833			}
4834			v_1_1 := v_1.Args[1]
4835			if v_1_1.Op != OpConst16 {
4836				continue
4837			}
4838			d := auxIntToInt16(v_1_1.AuxInt)
4839			if !(uint16(d) >= uint16(c)) {
4840				continue
4841			}
4842			v.reset(OpLeq16U)
4843			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
4844			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
4845			v1.AuxInt = int16ToAuxInt(c)
4846			v0.AddArg2(x, v1)
4847			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
4848			v2.AuxInt = int16ToAuxInt(d - c)
4849			v.AddArg2(v0, v2)
4850			return true
4851		}
4852		break
4853	}
4854	// match: (AndB (Leq8U (Const8 [c]) x) (Less8U x (Const8 [d])))
4855	// cond: uint8(d) >= uint8(c)
4856	// result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
4857	for {
4858		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4859			if v_0.Op != OpLeq8U {
4860				continue
4861			}
4862			x := v_0.Args[1]
4863			v_0_0 := v_0.Args[0]
4864			if v_0_0.Op != OpConst8 {
4865				continue
4866			}
4867			c := auxIntToInt8(v_0_0.AuxInt)
4868			if v_1.Op != OpLess8U {
4869				continue
4870			}
4871			_ = v_1.Args[1]
4872			if x != v_1.Args[0] {
4873				continue
4874			}
4875			v_1_1 := v_1.Args[1]
4876			if v_1_1.Op != OpConst8 {
4877				continue
4878			}
4879			d := auxIntToInt8(v_1_1.AuxInt)
4880			if !(uint8(d) >= uint8(c)) {
4881				continue
4882			}
4883			v.reset(OpLess8U)
4884			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
4885			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
4886			v1.AuxInt = int8ToAuxInt(c)
4887			v0.AddArg2(x, v1)
4888			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
4889			v2.AuxInt = int8ToAuxInt(d - c)
4890			v.AddArg2(v0, v2)
4891			return true
4892		}
4893		break
4894	}
4895	// match: (AndB (Leq8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
4896	// cond: uint8(d) >= uint8(c)
4897	// result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
4898	for {
4899		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4900			if v_0.Op != OpLeq8U {
4901				continue
4902			}
4903			x := v_0.Args[1]
4904			v_0_0 := v_0.Args[0]
4905			if v_0_0.Op != OpConst8 {
4906				continue
4907			}
4908			c := auxIntToInt8(v_0_0.AuxInt)
4909			if v_1.Op != OpLeq8U {
4910				continue
4911			}
4912			_ = v_1.Args[1]
4913			if x != v_1.Args[0] {
4914				continue
4915			}
4916			v_1_1 := v_1.Args[1]
4917			if v_1_1.Op != OpConst8 {
4918				continue
4919			}
4920			d := auxIntToInt8(v_1_1.AuxInt)
4921			if !(uint8(d) >= uint8(c)) {
4922				continue
4923			}
4924			v.reset(OpLeq8U)
4925			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
4926			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
4927			v1.AuxInt = int8ToAuxInt(c)
4928			v0.AddArg2(x, v1)
4929			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
4930			v2.AuxInt = int8ToAuxInt(d - c)
4931			v.AddArg2(v0, v2)
4932			return true
4933		}
4934		break
4935	}
4936	// match: (AndB (Less64U (Const64 [c]) x) (Less64U x (Const64 [d])))
4937	// cond: uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)
4938	// result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
4939	for {
4940		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4941			if v_0.Op != OpLess64U {
4942				continue
4943			}
4944			x := v_0.Args[1]
4945			v_0_0 := v_0.Args[0]
4946			if v_0_0.Op != OpConst64 {
4947				continue
4948			}
4949			c := auxIntToInt64(v_0_0.AuxInt)
4950			if v_1.Op != OpLess64U {
4951				continue
4952			}
4953			_ = v_1.Args[1]
4954			if x != v_1.Args[0] {
4955				continue
4956			}
4957			v_1_1 := v_1.Args[1]
4958			if v_1_1.Op != OpConst64 {
4959				continue
4960			}
4961			d := auxIntToInt64(v_1_1.AuxInt)
4962			if !(uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)) {
4963				continue
4964			}
4965			v.reset(OpLess64U)
4966			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
4967			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
4968			v1.AuxInt = int64ToAuxInt(c + 1)
4969			v0.AddArg2(x, v1)
4970			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
4971			v2.AuxInt = int64ToAuxInt(d - c - 1)
4972			v.AddArg2(v0, v2)
4973			return true
4974		}
4975		break
4976	}
4977	// match: (AndB (Less64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
4978	// cond: uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)
4979	// result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
4980	for {
4981		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4982			if v_0.Op != OpLess64U {
4983				continue
4984			}
4985			x := v_0.Args[1]
4986			v_0_0 := v_0.Args[0]
4987			if v_0_0.Op != OpConst64 {
4988				continue
4989			}
4990			c := auxIntToInt64(v_0_0.AuxInt)
4991			if v_1.Op != OpLeq64U {
4992				continue
4993			}
4994			_ = v_1.Args[1]
4995			if x != v_1.Args[0] {
4996				continue
4997			}
4998			v_1_1 := v_1.Args[1]
4999			if v_1_1.Op != OpConst64 {
5000				continue
5001			}
5002			d := auxIntToInt64(v_1_1.AuxInt)
5003			if !(uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)) {
5004				continue
5005			}
5006			v.reset(OpLeq64U)
5007			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
5008			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
5009			v1.AuxInt = int64ToAuxInt(c + 1)
5010			v0.AddArg2(x, v1)
5011			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
5012			v2.AuxInt = int64ToAuxInt(d - c - 1)
5013			v.AddArg2(v0, v2)
5014			return true
5015		}
5016		break
5017	}
5018	// match: (AndB (Less32U (Const32 [c]) x) (Less32U x (Const32 [d])))
5019	// cond: uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)
5020	// result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
5021	for {
5022		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5023			if v_0.Op != OpLess32U {
5024				continue
5025			}
5026			x := v_0.Args[1]
5027			v_0_0 := v_0.Args[0]
5028			if v_0_0.Op != OpConst32 {
5029				continue
5030			}
5031			c := auxIntToInt32(v_0_0.AuxInt)
5032			if v_1.Op != OpLess32U {
5033				continue
5034			}
5035			_ = v_1.Args[1]
5036			if x != v_1.Args[0] {
5037				continue
5038			}
5039			v_1_1 := v_1.Args[1]
5040			if v_1_1.Op != OpConst32 {
5041				continue
5042			}
5043			d := auxIntToInt32(v_1_1.AuxInt)
5044			if !(uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)) {
5045				continue
5046			}
5047			v.reset(OpLess32U)
5048			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
5049			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
5050			v1.AuxInt = int32ToAuxInt(c + 1)
5051			v0.AddArg2(x, v1)
5052			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
5053			v2.AuxInt = int32ToAuxInt(d - c - 1)
5054			v.AddArg2(v0, v2)
5055			return true
5056		}
5057		break
5058	}
5059	// match: (AndB (Less32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
5060	// cond: uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)
5061	// result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
5062	for {
5063		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5064			if v_0.Op != OpLess32U {
5065				continue
5066			}
5067			x := v_0.Args[1]
5068			v_0_0 := v_0.Args[0]
5069			if v_0_0.Op != OpConst32 {
5070				continue
5071			}
5072			c := auxIntToInt32(v_0_0.AuxInt)
5073			if v_1.Op != OpLeq32U {
5074				continue
5075			}
5076			_ = v_1.Args[1]
5077			if x != v_1.Args[0] {
5078				continue
5079			}
5080			v_1_1 := v_1.Args[1]
5081			if v_1_1.Op != OpConst32 {
5082				continue
5083			}
5084			d := auxIntToInt32(v_1_1.AuxInt)
5085			if !(uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)) {
5086				continue
5087			}
5088			v.reset(OpLeq32U)
5089			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
5090			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
5091			v1.AuxInt = int32ToAuxInt(c + 1)
5092			v0.AddArg2(x, v1)
5093			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
5094			v2.AuxInt = int32ToAuxInt(d - c - 1)
5095			v.AddArg2(v0, v2)
5096			return true
5097		}
5098		break
5099	}
5100	// match: (AndB (Less16U (Const16 [c]) x) (Less16U x (Const16 [d])))
5101	// cond: uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)
5102	// result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
5103	for {
5104		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5105			if v_0.Op != OpLess16U {
5106				continue
5107			}
5108			x := v_0.Args[1]
5109			v_0_0 := v_0.Args[0]
5110			if v_0_0.Op != OpConst16 {
5111				continue
5112			}
5113			c := auxIntToInt16(v_0_0.AuxInt)
5114			if v_1.Op != OpLess16U {
5115				continue
5116			}
5117			_ = v_1.Args[1]
5118			if x != v_1.Args[0] {
5119				continue
5120			}
5121			v_1_1 := v_1.Args[1]
5122			if v_1_1.Op != OpConst16 {
5123				continue
5124			}
5125			d := auxIntToInt16(v_1_1.AuxInt)
5126			if !(uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)) {
5127				continue
5128			}
5129			v.reset(OpLess16U)
5130			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
5131			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
5132			v1.AuxInt = int16ToAuxInt(c + 1)
5133			v0.AddArg2(x, v1)
5134			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
5135			v2.AuxInt = int16ToAuxInt(d - c - 1)
5136			v.AddArg2(v0, v2)
5137			return true
5138		}
5139		break
5140	}
5141	// match: (AndB (Less16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
5142	// cond: uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)
5143	// result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
5144	for {
5145		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5146			if v_0.Op != OpLess16U {
5147				continue
5148			}
5149			x := v_0.Args[1]
5150			v_0_0 := v_0.Args[0]
5151			if v_0_0.Op != OpConst16 {
5152				continue
5153			}
5154			c := auxIntToInt16(v_0_0.AuxInt)
5155			if v_1.Op != OpLeq16U {
5156				continue
5157			}
5158			_ = v_1.Args[1]
5159			if x != v_1.Args[0] {
5160				continue
5161			}
5162			v_1_1 := v_1.Args[1]
5163			if v_1_1.Op != OpConst16 {
5164				continue
5165			}
5166			d := auxIntToInt16(v_1_1.AuxInt)
5167			if !(uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)) {
5168				continue
5169			}
5170			v.reset(OpLeq16U)
5171			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
5172			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
5173			v1.AuxInt = int16ToAuxInt(c + 1)
5174			v0.AddArg2(x, v1)
5175			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
5176			v2.AuxInt = int16ToAuxInt(d - c - 1)
5177			v.AddArg2(v0, v2)
5178			return true
5179		}
5180		break
5181	}
5182	// match: (AndB (Less8U (Const8 [c]) x) (Less8U x (Const8 [d])))
5183	// cond: uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)
5184	// result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
5185	for {
5186		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5187			if v_0.Op != OpLess8U {
5188				continue
5189			}
5190			x := v_0.Args[1]
5191			v_0_0 := v_0.Args[0]
5192			if v_0_0.Op != OpConst8 {
5193				continue
5194			}
5195			c := auxIntToInt8(v_0_0.AuxInt)
5196			if v_1.Op != OpLess8U {
5197				continue
5198			}
5199			_ = v_1.Args[1]
5200			if x != v_1.Args[0] {
5201				continue
5202			}
5203			v_1_1 := v_1.Args[1]
5204			if v_1_1.Op != OpConst8 {
5205				continue
5206			}
5207			d := auxIntToInt8(v_1_1.AuxInt)
5208			if !(uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)) {
5209				continue
5210			}
5211			v.reset(OpLess8U)
5212			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
5213			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
5214			v1.AuxInt = int8ToAuxInt(c + 1)
5215			v0.AddArg2(x, v1)
5216			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
5217			v2.AuxInt = int8ToAuxInt(d - c - 1)
5218			v.AddArg2(v0, v2)
5219			return true
5220		}
5221		break
5222	}
5223	// match: (AndB (Less8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
5224	// cond: uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)
5225	// result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
5226	for {
5227		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5228			if v_0.Op != OpLess8U {
5229				continue
5230			}
5231			x := v_0.Args[1]
5232			v_0_0 := v_0.Args[0]
5233			if v_0_0.Op != OpConst8 {
5234				continue
5235			}
5236			c := auxIntToInt8(v_0_0.AuxInt)
5237			if v_1.Op != OpLeq8U {
5238				continue
5239			}
5240			_ = v_1.Args[1]
5241			if x != v_1.Args[0] {
5242				continue
5243			}
5244			v_1_1 := v_1.Args[1]
5245			if v_1_1.Op != OpConst8 {
5246				continue
5247			}
5248			d := auxIntToInt8(v_1_1.AuxInt)
5249			if !(uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)) {
5250				continue
5251			}
5252			v.reset(OpLeq8U)
5253			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
5254			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
5255			v1.AuxInt = int8ToAuxInt(c + 1)
5256			v0.AddArg2(x, v1)
5257			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
5258			v2.AuxInt = int8ToAuxInt(d - c - 1)
5259			v.AddArg2(v0, v2)
5260			return true
5261		}
5262		break
5263	}
5264	return false
5265}
5266func rewriteValuegeneric_OpArraySelect(v *Value) bool {
5267	v_0 := v.Args[0]
5268	// match: (ArraySelect (ArrayMake1 x))
5269	// result: x
5270	for {
5271		if v_0.Op != OpArrayMake1 {
5272			break
5273		}
5274		x := v_0.Args[0]
5275		v.copyOf(x)
5276		return true
5277	}
5278	// match: (ArraySelect [0] (IData x))
5279	// result: (IData x)
5280	for {
5281		if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpIData {
5282			break
5283		}
5284		x := v_0.Args[0]
5285		v.reset(OpIData)
5286		v.AddArg(x)
5287		return true
5288	}
5289	return false
5290}
5291func rewriteValuegeneric_OpCeil(v *Value) bool {
5292	v_0 := v.Args[0]
5293	// match: (Ceil (Const64F [c]))
5294	// result: (Const64F [math.Ceil(c)])
5295	for {
5296		if v_0.Op != OpConst64F {
5297			break
5298		}
5299		c := auxIntToFloat64(v_0.AuxInt)
5300		v.reset(OpConst64F)
5301		v.AuxInt = float64ToAuxInt(math.Ceil(c))
5302		return true
5303	}
5304	return false
5305}
5306func rewriteValuegeneric_OpCom16(v *Value) bool {
5307	v_0 := v.Args[0]
5308	// match: (Com16 (Com16 x))
5309	// result: x
5310	for {
5311		if v_0.Op != OpCom16 {
5312			break
5313		}
5314		x := v_0.Args[0]
5315		v.copyOf(x)
5316		return true
5317	}
5318	// match: (Com16 (Const16 [c]))
5319	// result: (Const16 [^c])
5320	for {
5321		if v_0.Op != OpConst16 {
5322			break
5323		}
5324		c := auxIntToInt16(v_0.AuxInt)
5325		v.reset(OpConst16)
5326		v.AuxInt = int16ToAuxInt(^c)
5327		return true
5328	}
5329	// match: (Com16 (Add16 (Const16 [-1]) x))
5330	// result: (Neg16 x)
5331	for {
5332		if v_0.Op != OpAdd16 {
5333			break
5334		}
5335		_ = v_0.Args[1]
5336		v_0_0 := v_0.Args[0]
5337		v_0_1 := v_0.Args[1]
5338		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
5339			if v_0_0.Op != OpConst16 || auxIntToInt16(v_0_0.AuxInt) != -1 {
5340				continue
5341			}
5342			x := v_0_1
5343			v.reset(OpNeg16)
5344			v.AddArg(x)
5345			return true
5346		}
5347		break
5348	}
5349	return false
5350}
5351func rewriteValuegeneric_OpCom32(v *Value) bool {
5352	v_0 := v.Args[0]
5353	// match: (Com32 (Com32 x))
5354	// result: x
5355	for {
5356		if v_0.Op != OpCom32 {
5357			break
5358		}
5359		x := v_0.Args[0]
5360		v.copyOf(x)
5361		return true
5362	}
5363	// match: (Com32 (Const32 [c]))
5364	// result: (Const32 [^c])
5365	for {
5366		if v_0.Op != OpConst32 {
5367			break
5368		}
5369		c := auxIntToInt32(v_0.AuxInt)
5370		v.reset(OpConst32)
5371		v.AuxInt = int32ToAuxInt(^c)
5372		return true
5373	}
5374	// match: (Com32 (Add32 (Const32 [-1]) x))
5375	// result: (Neg32 x)
5376	for {
5377		if v_0.Op != OpAdd32 {
5378			break
5379		}
5380		_ = v_0.Args[1]
5381		v_0_0 := v_0.Args[0]
5382		v_0_1 := v_0.Args[1]
5383		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
5384			if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != -1 {
5385				continue
5386			}
5387			x := v_0_1
5388			v.reset(OpNeg32)
5389			v.AddArg(x)
5390			return true
5391		}
5392		break
5393	}
5394	return false
5395}
5396func rewriteValuegeneric_OpCom64(v *Value) bool {
5397	v_0 := v.Args[0]
5398	// match: (Com64 (Com64 x))
5399	// result: x
5400	for {
5401		if v_0.Op != OpCom64 {
5402			break
5403		}
5404		x := v_0.Args[0]
5405		v.copyOf(x)
5406		return true
5407	}
5408	// match: (Com64 (Const64 [c]))
5409	// result: (Const64 [^c])
5410	for {
5411		if v_0.Op != OpConst64 {
5412			break
5413		}
5414		c := auxIntToInt64(v_0.AuxInt)
5415		v.reset(OpConst64)
5416		v.AuxInt = int64ToAuxInt(^c)
5417		return true
5418	}
5419	// match: (Com64 (Add64 (Const64 [-1]) x))
5420	// result: (Neg64 x)
5421	for {
5422		if v_0.Op != OpAdd64 {
5423			break
5424		}
5425		_ = v_0.Args[1]
5426		v_0_0 := v_0.Args[0]
5427		v_0_1 := v_0.Args[1]
5428		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
5429			if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != -1 {
5430				continue
5431			}
5432			x := v_0_1
5433			v.reset(OpNeg64)
5434			v.AddArg(x)
5435			return true
5436		}
5437		break
5438	}
5439	return false
5440}
5441func rewriteValuegeneric_OpCom8(v *Value) bool {
5442	v_0 := v.Args[0]
5443	// match: (Com8 (Com8 x))
5444	// result: x
5445	for {
5446		if v_0.Op != OpCom8 {
5447			break
5448		}
5449		x := v_0.Args[0]
5450		v.copyOf(x)
5451		return true
5452	}
5453	// match: (Com8 (Const8 [c]))
5454	// result: (Const8 [^c])
5455	for {
5456		if v_0.Op != OpConst8 {
5457			break
5458		}
5459		c := auxIntToInt8(v_0.AuxInt)
5460		v.reset(OpConst8)
5461		v.AuxInt = int8ToAuxInt(^c)
5462		return true
5463	}
5464	// match: (Com8 (Add8 (Const8 [-1]) x))
5465	// result: (Neg8 x)
5466	for {
5467		if v_0.Op != OpAdd8 {
5468			break
5469		}
5470		_ = v_0.Args[1]
5471		v_0_0 := v_0.Args[0]
5472		v_0_1 := v_0.Args[1]
5473		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
5474			if v_0_0.Op != OpConst8 || auxIntToInt8(v_0_0.AuxInt) != -1 {
5475				continue
5476			}
5477			x := v_0_1
5478			v.reset(OpNeg8)
5479			v.AddArg(x)
5480			return true
5481		}
5482		break
5483	}
5484	return false
5485}
5486func rewriteValuegeneric_OpConstInterface(v *Value) bool {
5487	b := v.Block
5488	typ := &b.Func.Config.Types
5489	// match: (ConstInterface)
5490	// result: (IMake (ConstNil <typ.Uintptr>) (ConstNil <typ.BytePtr>))
5491	for {
5492		v.reset(OpIMake)
5493		v0 := b.NewValue0(v.Pos, OpConstNil, typ.Uintptr)
5494		v1 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr)
5495		v.AddArg2(v0, v1)
5496		return true
5497	}
5498}
5499func rewriteValuegeneric_OpConstSlice(v *Value) bool {
5500	b := v.Block
5501	config := b.Func.Config
5502	typ := &b.Func.Config.Types
5503	// match: (ConstSlice)
5504	// cond: config.PtrSize == 4
5505	// result: (SliceMake (ConstNil <v.Type.Elem().PtrTo()>) (Const32 <typ.Int> [0]) (Const32 <typ.Int> [0]))
5506	for {
5507		if !(config.PtrSize == 4) {
5508			break
5509		}
5510		v.reset(OpSliceMake)
5511		v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.Elem().PtrTo())
5512		v1 := b.NewValue0(v.Pos, OpConst32, typ.Int)
5513		v1.AuxInt = int32ToAuxInt(0)
5514		v.AddArg3(v0, v1, v1)
5515		return true
5516	}
5517	// match: (ConstSlice)
5518	// cond: config.PtrSize == 8
5519	// result: (SliceMake (ConstNil <v.Type.Elem().PtrTo()>) (Const64 <typ.Int> [0]) (Const64 <typ.Int> [0]))
5520	for {
5521		if !(config.PtrSize == 8) {
5522			break
5523		}
5524		v.reset(OpSliceMake)
5525		v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.Elem().PtrTo())
5526		v1 := b.NewValue0(v.Pos, OpConst64, typ.Int)
5527		v1.AuxInt = int64ToAuxInt(0)
5528		v.AddArg3(v0, v1, v1)
5529		return true
5530	}
5531	return false
5532}
5533func rewriteValuegeneric_OpConstString(v *Value) bool {
5534	b := v.Block
5535	config := b.Func.Config
5536	fe := b.Func.fe
5537	typ := &b.Func.Config.Types
5538	// match: (ConstString {str})
5539	// cond: config.PtrSize == 4 && str == ""
5540	// result: (StringMake (ConstNil) (Const32 <typ.Int> [0]))
5541	for {
5542		str := auxToString(v.Aux)
5543		if !(config.PtrSize == 4 && str == "") {
5544			break
5545		}
5546		v.reset(OpStringMake)
5547		v0 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr)
5548		v1 := b.NewValue0(v.Pos, OpConst32, typ.Int)
5549		v1.AuxInt = int32ToAuxInt(0)
5550		v.AddArg2(v0, v1)
5551		return true
5552	}
5553	// match: (ConstString {str})
5554	// cond: config.PtrSize == 8 && str == ""
5555	// result: (StringMake (ConstNil) (Const64 <typ.Int> [0]))
5556	for {
5557		str := auxToString(v.Aux)
5558		if !(config.PtrSize == 8 && str == "") {
5559			break
5560		}
5561		v.reset(OpStringMake)
5562		v0 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr)
5563		v1 := b.NewValue0(v.Pos, OpConst64, typ.Int)
5564		v1.AuxInt = int64ToAuxInt(0)
5565		v.AddArg2(v0, v1)
5566		return true
5567	}
5568	// match: (ConstString {str})
5569	// cond: config.PtrSize == 4 && str != ""
5570	// result: (StringMake (Addr <typ.BytePtr> {fe.StringData(str)} (SB)) (Const32 <typ.Int> [int32(len(str))]))
5571	for {
5572		str := auxToString(v.Aux)
5573		if !(config.PtrSize == 4 && str != "") {
5574			break
5575		}
5576		v.reset(OpStringMake)
5577		v0 := b.NewValue0(v.Pos, OpAddr, typ.BytePtr)
5578		v0.Aux = symToAux(fe.StringData(str))
5579		v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr)
5580		v0.AddArg(v1)
5581		v2 := b.NewValue0(v.Pos, OpConst32, typ.Int)
5582		v2.AuxInt = int32ToAuxInt(int32(len(str)))
5583		v.AddArg2(v0, v2)
5584		return true
5585	}
5586	// match: (ConstString {str})
5587	// cond: config.PtrSize == 8 && str != ""
5588	// result: (StringMake (Addr <typ.BytePtr> {fe.StringData(str)} (SB)) (Const64 <typ.Int> [int64(len(str))]))
5589	for {
5590		str := auxToString(v.Aux)
5591		if !(config.PtrSize == 8 && str != "") {
5592			break
5593		}
5594		v.reset(OpStringMake)
5595		v0 := b.NewValue0(v.Pos, OpAddr, typ.BytePtr)
5596		v0.Aux = symToAux(fe.StringData(str))
5597		v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr)
5598		v0.AddArg(v1)
5599		v2 := b.NewValue0(v.Pos, OpConst64, typ.Int)
5600		v2.AuxInt = int64ToAuxInt(int64(len(str)))
5601		v.AddArg2(v0, v2)
5602		return true
5603	}
5604	return false
5605}
5606func rewriteValuegeneric_OpConvert(v *Value) bool {
5607	v_1 := v.Args[1]
5608	v_0 := v.Args[0]
5609	// match: (Convert (Add64 (Convert ptr mem) off) mem)
5610	// result: (AddPtr ptr off)
5611	for {
5612		if v_0.Op != OpAdd64 {
5613			break
5614		}
5615		_ = v_0.Args[1]
5616		v_0_0 := v_0.Args[0]
5617		v_0_1 := v_0.Args[1]
5618		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
5619			if v_0_0.Op != OpConvert {
5620				continue
5621			}
5622			mem := v_0_0.Args[1]
5623			ptr := v_0_0.Args[0]
5624			off := v_0_1
5625			if mem != v_1 {
5626				continue
5627			}
5628			v.reset(OpAddPtr)
5629			v.AddArg2(ptr, off)
5630			return true
5631		}
5632		break
5633	}
5634	// match: (Convert (Add32 (Convert ptr mem) off) mem)
5635	// result: (AddPtr ptr off)
5636	for {
5637		if v_0.Op != OpAdd32 {
5638			break
5639		}
5640		_ = v_0.Args[1]
5641		v_0_0 := v_0.Args[0]
5642		v_0_1 := v_0.Args[1]
5643		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
5644			if v_0_0.Op != OpConvert {
5645				continue
5646			}
5647			mem := v_0_0.Args[1]
5648			ptr := v_0_0.Args[0]
5649			off := v_0_1
5650			if mem != v_1 {
5651				continue
5652			}
5653			v.reset(OpAddPtr)
5654			v.AddArg2(ptr, off)
5655			return true
5656		}
5657		break
5658	}
5659	// match: (Convert (Convert ptr mem) mem)
5660	// result: ptr
5661	for {
5662		if v_0.Op != OpConvert {
5663			break
5664		}
5665		mem := v_0.Args[1]
5666		ptr := v_0.Args[0]
5667		if mem != v_1 {
5668			break
5669		}
5670		v.copyOf(ptr)
5671		return true
5672	}
5673	return false
5674}
5675func rewriteValuegeneric_OpCtz16(v *Value) bool {
5676	v_0 := v.Args[0]
5677	b := v.Block
5678	config := b.Func.Config
5679	// match: (Ctz16 (Const16 [c]))
5680	// cond: config.PtrSize == 4
5681	// result: (Const32 [int32(ntz16(c))])
5682	for {
5683		if v_0.Op != OpConst16 {
5684			break
5685		}
5686		c := auxIntToInt16(v_0.AuxInt)
5687		if !(config.PtrSize == 4) {
5688			break
5689		}
5690		v.reset(OpConst32)
5691		v.AuxInt = int32ToAuxInt(int32(ntz16(c)))
5692		return true
5693	}
5694	// match: (Ctz16 (Const16 [c]))
5695	// cond: config.PtrSize == 8
5696	// result: (Const64 [int64(ntz16(c))])
5697	for {
5698		if v_0.Op != OpConst16 {
5699			break
5700		}
5701		c := auxIntToInt16(v_0.AuxInt)
5702		if !(config.PtrSize == 8) {
5703			break
5704		}
5705		v.reset(OpConst64)
5706		v.AuxInt = int64ToAuxInt(int64(ntz16(c)))
5707		return true
5708	}
5709	return false
5710}
5711func rewriteValuegeneric_OpCtz32(v *Value) bool {
5712	v_0 := v.Args[0]
5713	b := v.Block
5714	config := b.Func.Config
5715	// match: (Ctz32 (Const32 [c]))
5716	// cond: config.PtrSize == 4
5717	// result: (Const32 [int32(ntz32(c))])
5718	for {
5719		if v_0.Op != OpConst32 {
5720			break
5721		}
5722		c := auxIntToInt32(v_0.AuxInt)
5723		if !(config.PtrSize == 4) {
5724			break
5725		}
5726		v.reset(OpConst32)
5727		v.AuxInt = int32ToAuxInt(int32(ntz32(c)))
5728		return true
5729	}
5730	// match: (Ctz32 (Const32 [c]))
5731	// cond: config.PtrSize == 8
5732	// result: (Const64 [int64(ntz32(c))])
5733	for {
5734		if v_0.Op != OpConst32 {
5735			break
5736		}
5737		c := auxIntToInt32(v_0.AuxInt)
5738		if !(config.PtrSize == 8) {
5739			break
5740		}
5741		v.reset(OpConst64)
5742		v.AuxInt = int64ToAuxInt(int64(ntz32(c)))
5743		return true
5744	}
5745	return false
5746}
5747func rewriteValuegeneric_OpCtz64(v *Value) bool {
5748	v_0 := v.Args[0]
5749	b := v.Block
5750	config := b.Func.Config
5751	// match: (Ctz64 (Const64 [c]))
5752	// cond: config.PtrSize == 4
5753	// result: (Const32 [int32(ntz64(c))])
5754	for {
5755		if v_0.Op != OpConst64 {
5756			break
5757		}
5758		c := auxIntToInt64(v_0.AuxInt)
5759		if !(config.PtrSize == 4) {
5760			break
5761		}
5762		v.reset(OpConst32)
5763		v.AuxInt = int32ToAuxInt(int32(ntz64(c)))
5764		return true
5765	}
5766	// match: (Ctz64 (Const64 [c]))
5767	// cond: config.PtrSize == 8
5768	// result: (Const64 [int64(ntz64(c))])
5769	for {
5770		if v_0.Op != OpConst64 {
5771			break
5772		}
5773		c := auxIntToInt64(v_0.AuxInt)
5774		if !(config.PtrSize == 8) {
5775			break
5776		}
5777		v.reset(OpConst64)
5778		v.AuxInt = int64ToAuxInt(int64(ntz64(c)))
5779		return true
5780	}
5781	return false
5782}
5783func rewriteValuegeneric_OpCtz8(v *Value) bool {
5784	v_0 := v.Args[0]
5785	b := v.Block
5786	config := b.Func.Config
5787	// match: (Ctz8 (Const8 [c]))
5788	// cond: config.PtrSize == 4
5789	// result: (Const32 [int32(ntz8(c))])
5790	for {
5791		if v_0.Op != OpConst8 {
5792			break
5793		}
5794		c := auxIntToInt8(v_0.AuxInt)
5795		if !(config.PtrSize == 4) {
5796			break
5797		}
5798		v.reset(OpConst32)
5799		v.AuxInt = int32ToAuxInt(int32(ntz8(c)))
5800		return true
5801	}
5802	// match: (Ctz8 (Const8 [c]))
5803	// cond: config.PtrSize == 8
5804	// result: (Const64 [int64(ntz8(c))])
5805	for {
5806		if v_0.Op != OpConst8 {
5807			break
5808		}
5809		c := auxIntToInt8(v_0.AuxInt)
5810		if !(config.PtrSize == 8) {
5811			break
5812		}
5813		v.reset(OpConst64)
5814		v.AuxInt = int64ToAuxInt(int64(ntz8(c)))
5815		return true
5816	}
5817	return false
5818}
5819func rewriteValuegeneric_OpCvt32Fto32(v *Value) bool {
5820	v_0 := v.Args[0]
5821	// match: (Cvt32Fto32 (Const32F [c]))
5822	// result: (Const32 [int32(c)])
5823	for {
5824		if v_0.Op != OpConst32F {
5825			break
5826		}
5827		c := auxIntToFloat32(v_0.AuxInt)
5828		v.reset(OpConst32)
5829		v.AuxInt = int32ToAuxInt(int32(c))
5830		return true
5831	}
5832	return false
5833}
5834func rewriteValuegeneric_OpCvt32Fto64(v *Value) bool {
5835	v_0 := v.Args[0]
5836	// match: (Cvt32Fto64 (Const32F [c]))
5837	// result: (Const64 [int64(c)])
5838	for {
5839		if v_0.Op != OpConst32F {
5840			break
5841		}
5842		c := auxIntToFloat32(v_0.AuxInt)
5843		v.reset(OpConst64)
5844		v.AuxInt = int64ToAuxInt(int64(c))
5845		return true
5846	}
5847	return false
5848}
5849func rewriteValuegeneric_OpCvt32Fto64F(v *Value) bool {
5850	v_0 := v.Args[0]
5851	// match: (Cvt32Fto64F (Const32F [c]))
5852	// result: (Const64F [float64(c)])
5853	for {
5854		if v_0.Op != OpConst32F {
5855			break
5856		}
5857		c := auxIntToFloat32(v_0.AuxInt)
5858		v.reset(OpConst64F)
5859		v.AuxInt = float64ToAuxInt(float64(c))
5860		return true
5861	}
5862	return false
5863}
5864func rewriteValuegeneric_OpCvt32to32F(v *Value) bool {
5865	v_0 := v.Args[0]
5866	// match: (Cvt32to32F (Const32 [c]))
5867	// result: (Const32F [float32(c)])
5868	for {
5869		if v_0.Op != OpConst32 {
5870			break
5871		}
5872		c := auxIntToInt32(v_0.AuxInt)
5873		v.reset(OpConst32F)
5874		v.AuxInt = float32ToAuxInt(float32(c))
5875		return true
5876	}
5877	return false
5878}
5879func rewriteValuegeneric_OpCvt32to64F(v *Value) bool {
5880	v_0 := v.Args[0]
5881	// match: (Cvt32to64F (Const32 [c]))
5882	// result: (Const64F [float64(c)])
5883	for {
5884		if v_0.Op != OpConst32 {
5885			break
5886		}
5887		c := auxIntToInt32(v_0.AuxInt)
5888		v.reset(OpConst64F)
5889		v.AuxInt = float64ToAuxInt(float64(c))
5890		return true
5891	}
5892	return false
5893}
5894func rewriteValuegeneric_OpCvt64Fto32(v *Value) bool {
5895	v_0 := v.Args[0]
5896	// match: (Cvt64Fto32 (Const64F [c]))
5897	// result: (Const32 [int32(c)])
5898	for {
5899		if v_0.Op != OpConst64F {
5900			break
5901		}
5902		c := auxIntToFloat64(v_0.AuxInt)
5903		v.reset(OpConst32)
5904		v.AuxInt = int32ToAuxInt(int32(c))
5905		return true
5906	}
5907	return false
5908}
5909func rewriteValuegeneric_OpCvt64Fto32F(v *Value) bool {
5910	v_0 := v.Args[0]
5911	// match: (Cvt64Fto32F (Const64F [c]))
5912	// result: (Const32F [float32(c)])
5913	for {
5914		if v_0.Op != OpConst64F {
5915			break
5916		}
5917		c := auxIntToFloat64(v_0.AuxInt)
5918		v.reset(OpConst32F)
5919		v.AuxInt = float32ToAuxInt(float32(c))
5920		return true
5921	}
5922	// match: (Cvt64Fto32F sqrt0:(Sqrt (Cvt32Fto64F x)))
5923	// cond: sqrt0.Uses==1
5924	// result: (Sqrt32 x)
5925	for {
5926		sqrt0 := v_0
5927		if sqrt0.Op != OpSqrt {
5928			break
5929		}
5930		sqrt0_0 := sqrt0.Args[0]
5931		if sqrt0_0.Op != OpCvt32Fto64F {
5932			break
5933		}
5934		x := sqrt0_0.Args[0]
5935		if !(sqrt0.Uses == 1) {
5936			break
5937		}
5938		v.reset(OpSqrt32)
5939		v.AddArg(x)
5940		return true
5941	}
5942	return false
5943}
5944func rewriteValuegeneric_OpCvt64Fto64(v *Value) bool {
5945	v_0 := v.Args[0]
5946	// match: (Cvt64Fto64 (Const64F [c]))
5947	// result: (Const64 [int64(c)])
5948	for {
5949		if v_0.Op != OpConst64F {
5950			break
5951		}
5952		c := auxIntToFloat64(v_0.AuxInt)
5953		v.reset(OpConst64)
5954		v.AuxInt = int64ToAuxInt(int64(c))
5955		return true
5956	}
5957	return false
5958}
5959func rewriteValuegeneric_OpCvt64to32F(v *Value) bool {
5960	v_0 := v.Args[0]
5961	// match: (Cvt64to32F (Const64 [c]))
5962	// result: (Const32F [float32(c)])
5963	for {
5964		if v_0.Op != OpConst64 {
5965			break
5966		}
5967		c := auxIntToInt64(v_0.AuxInt)
5968		v.reset(OpConst32F)
5969		v.AuxInt = float32ToAuxInt(float32(c))
5970		return true
5971	}
5972	return false
5973}
5974func rewriteValuegeneric_OpCvt64to64F(v *Value) bool {
5975	v_0 := v.Args[0]
5976	// match: (Cvt64to64F (Const64 [c]))
5977	// result: (Const64F [float64(c)])
5978	for {
5979		if v_0.Op != OpConst64 {
5980			break
5981		}
5982		c := auxIntToInt64(v_0.AuxInt)
5983		v.reset(OpConst64F)
5984		v.AuxInt = float64ToAuxInt(float64(c))
5985		return true
5986	}
5987	return false
5988}
5989func rewriteValuegeneric_OpCvtBoolToUint8(v *Value) bool {
5990	v_0 := v.Args[0]
5991	// match: (CvtBoolToUint8 (ConstBool [false]))
5992	// result: (Const8 [0])
5993	for {
5994		if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != false {
5995			break
5996		}
5997		v.reset(OpConst8)
5998		v.AuxInt = int8ToAuxInt(0)
5999		return true
6000	}
6001	// match: (CvtBoolToUint8 (ConstBool [true]))
6002	// result: (Const8 [1])
6003	for {
6004		if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != true {
6005			break
6006		}
6007		v.reset(OpConst8)
6008		v.AuxInt = int8ToAuxInt(1)
6009		return true
6010	}
6011	return false
6012}
6013func rewriteValuegeneric_OpDiv16(v *Value) bool {
6014	v_1 := v.Args[1]
6015	v_0 := v.Args[0]
6016	b := v.Block
6017	typ := &b.Func.Config.Types
6018	// match: (Div16 (Const16 [c]) (Const16 [d]))
6019	// cond: d != 0
6020	// result: (Const16 [c/d])
6021	for {
6022		if v_0.Op != OpConst16 {
6023			break
6024		}
6025		c := auxIntToInt16(v_0.AuxInt)
6026		if v_1.Op != OpConst16 {
6027			break
6028		}
6029		d := auxIntToInt16(v_1.AuxInt)
6030		if !(d != 0) {
6031			break
6032		}
6033		v.reset(OpConst16)
6034		v.AuxInt = int16ToAuxInt(c / d)
6035		return true
6036	}
6037	// match: (Div16 n (Const16 [c]))
6038	// cond: isNonNegative(n) && isPowerOfTwo16(c)
6039	// result: (Rsh16Ux64 n (Const64 <typ.UInt64> [log16(c)]))
6040	for {
6041		n := v_0
6042		if v_1.Op != OpConst16 {
6043			break
6044		}
6045		c := auxIntToInt16(v_1.AuxInt)
6046		if !(isNonNegative(n) && isPowerOfTwo16(c)) {
6047			break
6048		}
6049		v.reset(OpRsh16Ux64)
6050		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6051		v0.AuxInt = int64ToAuxInt(log16(c))
6052		v.AddArg2(n, v0)
6053		return true
6054	}
6055	// match: (Div16 <t> n (Const16 [c]))
6056	// cond: c < 0 && c != -1<<15
6057	// result: (Neg16 (Div16 <t> n (Const16 <t> [-c])))
6058	for {
6059		t := v.Type
6060		n := v_0
6061		if v_1.Op != OpConst16 {
6062			break
6063		}
6064		c := auxIntToInt16(v_1.AuxInt)
6065		if !(c < 0 && c != -1<<15) {
6066			break
6067		}
6068		v.reset(OpNeg16)
6069		v0 := b.NewValue0(v.Pos, OpDiv16, t)
6070		v1 := b.NewValue0(v.Pos, OpConst16, t)
6071		v1.AuxInt = int16ToAuxInt(-c)
6072		v0.AddArg2(n, v1)
6073		v.AddArg(v0)
6074		return true
6075	}
6076	// match: (Div16 <t> x (Const16 [-1<<15]))
6077	// result: (Rsh16Ux64 (And16 <t> x (Neg16 <t> x)) (Const64 <typ.UInt64> [15]))
6078	for {
6079		t := v.Type
6080		x := v_0
6081		if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != -1<<15 {
6082			break
6083		}
6084		v.reset(OpRsh16Ux64)
6085		v0 := b.NewValue0(v.Pos, OpAnd16, t)
6086		v1 := b.NewValue0(v.Pos, OpNeg16, t)
6087		v1.AddArg(x)
6088		v0.AddArg2(x, v1)
6089		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6090		v2.AuxInt = int64ToAuxInt(15)
6091		v.AddArg2(v0, v2)
6092		return true
6093	}
6094	// match: (Div16 <t> n (Const16 [c]))
6095	// cond: isPowerOfTwo16(c)
6096	// result: (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [int64(16-log16(c))]))) (Const64 <typ.UInt64> [int64(log16(c))]))
6097	for {
6098		t := v.Type
6099		n := v_0
6100		if v_1.Op != OpConst16 {
6101			break
6102		}
6103		c := auxIntToInt16(v_1.AuxInt)
6104		if !(isPowerOfTwo16(c)) {
6105			break
6106		}
6107		v.reset(OpRsh16x64)
6108		v0 := b.NewValue0(v.Pos, OpAdd16, t)
6109		v1 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
6110		v2 := b.NewValue0(v.Pos, OpRsh16x64, t)
6111		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6112		v3.AuxInt = int64ToAuxInt(15)
6113		v2.AddArg2(n, v3)
6114		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6115		v4.AuxInt = int64ToAuxInt(int64(16 - log16(c)))
6116		v1.AddArg2(v2, v4)
6117		v0.AddArg2(n, v1)
6118		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6119		v5.AuxInt = int64ToAuxInt(int64(log16(c)))
6120		v.AddArg2(v0, v5)
6121		return true
6122	}
6123	// match: (Div16 <t> x (Const16 [c]))
6124	// cond: smagicOK16(c)
6125	// result: (Sub16 <t> (Rsh32x64 <t> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(smagic16(c).m)]) (SignExt16to32 x)) (Const64 <typ.UInt64> [16+smagic16(c).s])) (Rsh32x64 <t> (SignExt16to32 x) (Const64 <typ.UInt64> [31])))
6126	for {
6127		t := v.Type
6128		x := v_0
6129		if v_1.Op != OpConst16 {
6130			break
6131		}
6132		c := auxIntToInt16(v_1.AuxInt)
6133		if !(smagicOK16(c)) {
6134			break
6135		}
6136		v.reset(OpSub16)
6137		v.Type = t
6138		v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
6139		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
6140		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6141		v2.AuxInt = int32ToAuxInt(int32(smagic16(c).m))
6142		v3 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
6143		v3.AddArg(x)
6144		v1.AddArg2(v2, v3)
6145		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6146		v4.AuxInt = int64ToAuxInt(16 + smagic16(c).s)
6147		v0.AddArg2(v1, v4)
6148		v5 := b.NewValue0(v.Pos, OpRsh32x64, t)
6149		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6150		v6.AuxInt = int64ToAuxInt(31)
6151		v5.AddArg2(v3, v6)
6152		v.AddArg2(v0, v5)
6153		return true
6154	}
6155	return false
6156}
6157func rewriteValuegeneric_OpDiv16u(v *Value) bool {
6158	v_1 := v.Args[1]
6159	v_0 := v.Args[0]
6160	b := v.Block
6161	config := b.Func.Config
6162	typ := &b.Func.Config.Types
6163	// match: (Div16u (Const16 [c]) (Const16 [d]))
6164	// cond: d != 0
6165	// result: (Const16 [int16(uint16(c)/uint16(d))])
6166	for {
6167		if v_0.Op != OpConst16 {
6168			break
6169		}
6170		c := auxIntToInt16(v_0.AuxInt)
6171		if v_1.Op != OpConst16 {
6172			break
6173		}
6174		d := auxIntToInt16(v_1.AuxInt)
6175		if !(d != 0) {
6176			break
6177		}
6178		v.reset(OpConst16)
6179		v.AuxInt = int16ToAuxInt(int16(uint16(c) / uint16(d)))
6180		return true
6181	}
6182	// match: (Div16u n (Const16 [c]))
6183	// cond: isPowerOfTwo16(c)
6184	// result: (Rsh16Ux64 n (Const64 <typ.UInt64> [log16(c)]))
6185	for {
6186		n := v_0
6187		if v_1.Op != OpConst16 {
6188			break
6189		}
6190		c := auxIntToInt16(v_1.AuxInt)
6191		if !(isPowerOfTwo16(c)) {
6192			break
6193		}
6194		v.reset(OpRsh16Ux64)
6195		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6196		v0.AuxInt = int64ToAuxInt(log16(c))
6197		v.AddArg2(n, v0)
6198		return true
6199	}
6200	// match: (Div16u x (Const16 [c]))
6201	// cond: umagicOK16(c) && config.RegSize == 8
6202	// result: (Trunc64to16 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<16+umagic16(c).m)]) (ZeroExt16to64 x)) (Const64 <typ.UInt64> [16+umagic16(c).s])))
6203	for {
6204		x := v_0
6205		if v_1.Op != OpConst16 {
6206			break
6207		}
6208		c := auxIntToInt16(v_1.AuxInt)
6209		if !(umagicOK16(c) && config.RegSize == 8) {
6210			break
6211		}
6212		v.reset(OpTrunc64to16)
6213		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
6214		v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
6215		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6216		v2.AuxInt = int64ToAuxInt(int64(1<<16 + umagic16(c).m))
6217		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6218		v3.AddArg(x)
6219		v1.AddArg2(v2, v3)
6220		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6221		v4.AuxInt = int64ToAuxInt(16 + umagic16(c).s)
6222		v0.AddArg2(v1, v4)
6223		v.AddArg(v0)
6224		return true
6225	}
6226	// match: (Div16u x (Const16 [c]))
6227	// cond: umagicOK16(c) && config.RegSize == 4 && umagic16(c).m&1 == 0
6228	// result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<15+umagic16(c).m/2)]) (ZeroExt16to32 x)) (Const64 <typ.UInt64> [16+umagic16(c).s-1])))
6229	for {
6230		x := v_0
6231		if v_1.Op != OpConst16 {
6232			break
6233		}
6234		c := auxIntToInt16(v_1.AuxInt)
6235		if !(umagicOK16(c) && config.RegSize == 4 && umagic16(c).m&1 == 0) {
6236			break
6237		}
6238		v.reset(OpTrunc32to16)
6239		v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
6240		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
6241		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6242		v2.AuxInt = int32ToAuxInt(int32(1<<15 + umagic16(c).m/2))
6243		v3 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
6244		v3.AddArg(x)
6245		v1.AddArg2(v2, v3)
6246		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6247		v4.AuxInt = int64ToAuxInt(16 + umagic16(c).s - 1)
6248		v0.AddArg2(v1, v4)
6249		v.AddArg(v0)
6250		return true
6251	}
6252	// match: (Div16u x (Const16 [c]))
6253	// cond: umagicOK16(c) && config.RegSize == 4 && c&1 == 0
6254	// result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<15+(umagic16(c).m+1)/2)]) (Rsh32Ux64 <typ.UInt32> (ZeroExt16to32 x) (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [16+umagic16(c).s-2])))
6255	for {
6256		x := v_0
6257		if v_1.Op != OpConst16 {
6258			break
6259		}
6260		c := auxIntToInt16(v_1.AuxInt)
6261		if !(umagicOK16(c) && config.RegSize == 4 && c&1 == 0) {
6262			break
6263		}
6264		v.reset(OpTrunc32to16)
6265		v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
6266		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
6267		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6268		v2.AuxInt = int32ToAuxInt(int32(1<<15 + (umagic16(c).m+1)/2))
6269		v3 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
6270		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
6271		v4.AddArg(x)
6272		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6273		v5.AuxInt = int64ToAuxInt(1)
6274		v3.AddArg2(v4, v5)
6275		v1.AddArg2(v2, v3)
6276		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6277		v6.AuxInt = int64ToAuxInt(16 + umagic16(c).s - 2)
6278		v0.AddArg2(v1, v6)
6279		v.AddArg(v0)
6280		return true
6281	}
6282	// match: (Div16u x (Const16 [c]))
6283	// cond: umagicOK16(c) && config.RegSize == 4 && config.useAvg
6284	// result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Avg32u (Lsh32x64 <typ.UInt32> (ZeroExt16to32 x) (Const64 <typ.UInt64> [16])) (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(umagic16(c).m)]) (ZeroExt16to32 x))) (Const64 <typ.UInt64> [16+umagic16(c).s-1])))
6285	for {
6286		x := v_0
6287		if v_1.Op != OpConst16 {
6288			break
6289		}
6290		c := auxIntToInt16(v_1.AuxInt)
6291		if !(umagicOK16(c) && config.RegSize == 4 && config.useAvg) {
6292			break
6293		}
6294		v.reset(OpTrunc32to16)
6295		v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
6296		v1 := b.NewValue0(v.Pos, OpAvg32u, typ.UInt32)
6297		v2 := b.NewValue0(v.Pos, OpLsh32x64, typ.UInt32)
6298		v3 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
6299		v3.AddArg(x)
6300		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6301		v4.AuxInt = int64ToAuxInt(16)
6302		v2.AddArg2(v3, v4)
6303		v5 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
6304		v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6305		v6.AuxInt = int32ToAuxInt(int32(umagic16(c).m))
6306		v5.AddArg2(v6, v3)
6307		v1.AddArg2(v2, v5)
6308		v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6309		v7.AuxInt = int64ToAuxInt(16 + umagic16(c).s - 1)
6310		v0.AddArg2(v1, v7)
6311		v.AddArg(v0)
6312		return true
6313	}
6314	return false
6315}
6316func rewriteValuegeneric_OpDiv32(v *Value) bool {
6317	v_1 := v.Args[1]
6318	v_0 := v.Args[0]
6319	b := v.Block
6320	config := b.Func.Config
6321	typ := &b.Func.Config.Types
6322	// match: (Div32 (Const32 [c]) (Const32 [d]))
6323	// cond: d != 0
6324	// result: (Const32 [c/d])
6325	for {
6326		if v_0.Op != OpConst32 {
6327			break
6328		}
6329		c := auxIntToInt32(v_0.AuxInt)
6330		if v_1.Op != OpConst32 {
6331			break
6332		}
6333		d := auxIntToInt32(v_1.AuxInt)
6334		if !(d != 0) {
6335			break
6336		}
6337		v.reset(OpConst32)
6338		v.AuxInt = int32ToAuxInt(c / d)
6339		return true
6340	}
6341	// match: (Div32 n (Const32 [c]))
6342	// cond: isNonNegative(n) && isPowerOfTwo32(c)
6343	// result: (Rsh32Ux64 n (Const64 <typ.UInt64> [log32(c)]))
6344	for {
6345		n := v_0
6346		if v_1.Op != OpConst32 {
6347			break
6348		}
6349		c := auxIntToInt32(v_1.AuxInt)
6350		if !(isNonNegative(n) && isPowerOfTwo32(c)) {
6351			break
6352		}
6353		v.reset(OpRsh32Ux64)
6354		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6355		v0.AuxInt = int64ToAuxInt(log32(c))
6356		v.AddArg2(n, v0)
6357		return true
6358	}
6359	// match: (Div32 <t> n (Const32 [c]))
6360	// cond: c < 0 && c != -1<<31
6361	// result: (Neg32 (Div32 <t> n (Const32 <t> [-c])))
6362	for {
6363		t := v.Type
6364		n := v_0
6365		if v_1.Op != OpConst32 {
6366			break
6367		}
6368		c := auxIntToInt32(v_1.AuxInt)
6369		if !(c < 0 && c != -1<<31) {
6370			break
6371		}
6372		v.reset(OpNeg32)
6373		v0 := b.NewValue0(v.Pos, OpDiv32, t)
6374		v1 := b.NewValue0(v.Pos, OpConst32, t)
6375		v1.AuxInt = int32ToAuxInt(-c)
6376		v0.AddArg2(n, v1)
6377		v.AddArg(v0)
6378		return true
6379	}
6380	// match: (Div32 <t> x (Const32 [-1<<31]))
6381	// result: (Rsh32Ux64 (And32 <t> x (Neg32 <t> x)) (Const64 <typ.UInt64> [31]))
6382	for {
6383		t := v.Type
6384		x := v_0
6385		if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != -1<<31 {
6386			break
6387		}
6388		v.reset(OpRsh32Ux64)
6389		v0 := b.NewValue0(v.Pos, OpAnd32, t)
6390		v1 := b.NewValue0(v.Pos, OpNeg32, t)
6391		v1.AddArg(x)
6392		v0.AddArg2(x, v1)
6393		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6394		v2.AuxInt = int64ToAuxInt(31)
6395		v.AddArg2(v0, v2)
6396		return true
6397	}
6398	// match: (Div32 <t> n (Const32 [c]))
6399	// cond: isPowerOfTwo32(c)
6400	// result: (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [int64(32-log32(c))]))) (Const64 <typ.UInt64> [int64(log32(c))]))
6401	for {
6402		t := v.Type
6403		n := v_0
6404		if v_1.Op != OpConst32 {
6405			break
6406		}
6407		c := auxIntToInt32(v_1.AuxInt)
6408		if !(isPowerOfTwo32(c)) {
6409			break
6410		}
6411		v.reset(OpRsh32x64)
6412		v0 := b.NewValue0(v.Pos, OpAdd32, t)
6413		v1 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
6414		v2 := b.NewValue0(v.Pos, OpRsh32x64, t)
6415		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6416		v3.AuxInt = int64ToAuxInt(31)
6417		v2.AddArg2(n, v3)
6418		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6419		v4.AuxInt = int64ToAuxInt(int64(32 - log32(c)))
6420		v1.AddArg2(v2, v4)
6421		v0.AddArg2(n, v1)
6422		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6423		v5.AuxInt = int64ToAuxInt(int64(log32(c)))
6424		v.AddArg2(v0, v5)
6425		return true
6426	}
6427	// match: (Div32 <t> x (Const32 [c]))
6428	// cond: smagicOK32(c) && config.RegSize == 8
6429	// result: (Sub32 <t> (Rsh64x64 <t> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(smagic32(c).m)]) (SignExt32to64 x)) (Const64 <typ.UInt64> [32+smagic32(c).s])) (Rsh64x64 <t> (SignExt32to64 x) (Const64 <typ.UInt64> [63])))
6430	for {
6431		t := v.Type
6432		x := v_0
6433		if v_1.Op != OpConst32 {
6434			break
6435		}
6436		c := auxIntToInt32(v_1.AuxInt)
6437		if !(smagicOK32(c) && config.RegSize == 8) {
6438			break
6439		}
6440		v.reset(OpSub32)
6441		v.Type = t
6442		v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
6443		v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
6444		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6445		v2.AuxInt = int64ToAuxInt(int64(smagic32(c).m))
6446		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6447		v3.AddArg(x)
6448		v1.AddArg2(v2, v3)
6449		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6450		v4.AuxInt = int64ToAuxInt(32 + smagic32(c).s)
6451		v0.AddArg2(v1, v4)
6452		v5 := b.NewValue0(v.Pos, OpRsh64x64, t)
6453		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6454		v6.AuxInt = int64ToAuxInt(63)
6455		v5.AddArg2(v3, v6)
6456		v.AddArg2(v0, v5)
6457		return true
6458	}
6459	// match: (Div32 <t> x (Const32 [c]))
6460	// cond: smagicOK32(c) && config.RegSize == 4 && smagic32(c).m&1 == 0 && config.useHmul
6461	// result: (Sub32 <t> (Rsh32x64 <t> (Hmul32 <t> (Const32 <typ.UInt32> [int32(smagic32(c).m/2)]) x) (Const64 <typ.UInt64> [smagic32(c).s-1])) (Rsh32x64 <t> x (Const64 <typ.UInt64> [31])))
6462	for {
6463		t := v.Type
6464		x := v_0
6465		if v_1.Op != OpConst32 {
6466			break
6467		}
6468		c := auxIntToInt32(v_1.AuxInt)
6469		if !(smagicOK32(c) && config.RegSize == 4 && smagic32(c).m&1 == 0 && config.useHmul) {
6470			break
6471		}
6472		v.reset(OpSub32)
6473		v.Type = t
6474		v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
6475		v1 := b.NewValue0(v.Pos, OpHmul32, t)
6476		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6477		v2.AuxInt = int32ToAuxInt(int32(smagic32(c).m / 2))
6478		v1.AddArg2(v2, x)
6479		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6480		v3.AuxInt = int64ToAuxInt(smagic32(c).s - 1)
6481		v0.AddArg2(v1, v3)
6482		v4 := b.NewValue0(v.Pos, OpRsh32x64, t)
6483		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6484		v5.AuxInt = int64ToAuxInt(31)
6485		v4.AddArg2(x, v5)
6486		v.AddArg2(v0, v4)
6487		return true
6488	}
6489	// match: (Div32 <t> x (Const32 [c]))
6490	// cond: smagicOK32(c) && config.RegSize == 4 && smagic32(c).m&1 != 0 && config.useHmul
6491	// result: (Sub32 <t> (Rsh32x64 <t> (Add32 <t> (Hmul32 <t> (Const32 <typ.UInt32> [int32(smagic32(c).m)]) x) x) (Const64 <typ.UInt64> [smagic32(c).s])) (Rsh32x64 <t> x (Const64 <typ.UInt64> [31])))
6492	for {
6493		t := v.Type
6494		x := v_0
6495		if v_1.Op != OpConst32 {
6496			break
6497		}
6498		c := auxIntToInt32(v_1.AuxInt)
6499		if !(smagicOK32(c) && config.RegSize == 4 && smagic32(c).m&1 != 0 && config.useHmul) {
6500			break
6501		}
6502		v.reset(OpSub32)
6503		v.Type = t
6504		v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
6505		v1 := b.NewValue0(v.Pos, OpAdd32, t)
6506		v2 := b.NewValue0(v.Pos, OpHmul32, t)
6507		v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6508		v3.AuxInt = int32ToAuxInt(int32(smagic32(c).m))
6509		v2.AddArg2(v3, x)
6510		v1.AddArg2(v2, x)
6511		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6512		v4.AuxInt = int64ToAuxInt(smagic32(c).s)
6513		v0.AddArg2(v1, v4)
6514		v5 := b.NewValue0(v.Pos, OpRsh32x64, t)
6515		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6516		v6.AuxInt = int64ToAuxInt(31)
6517		v5.AddArg2(x, v6)
6518		v.AddArg2(v0, v5)
6519		return true
6520	}
6521	return false
6522}
6523func rewriteValuegeneric_OpDiv32F(v *Value) bool {
6524	v_1 := v.Args[1]
6525	v_0 := v.Args[0]
6526	b := v.Block
6527	// match: (Div32F (Const32F [c]) (Const32F [d]))
6528	// cond: c/d == c/d
6529	// result: (Const32F [c/d])
6530	for {
6531		if v_0.Op != OpConst32F {
6532			break
6533		}
6534		c := auxIntToFloat32(v_0.AuxInt)
6535		if v_1.Op != OpConst32F {
6536			break
6537		}
6538		d := auxIntToFloat32(v_1.AuxInt)
6539		if !(c/d == c/d) {
6540			break
6541		}
6542		v.reset(OpConst32F)
6543		v.AuxInt = float32ToAuxInt(c / d)
6544		return true
6545	}
6546	// match: (Div32F x (Const32F <t> [c]))
6547	// cond: reciprocalExact32(c)
6548	// result: (Mul32F x (Const32F <t> [1/c]))
6549	for {
6550		x := v_0
6551		if v_1.Op != OpConst32F {
6552			break
6553		}
6554		t := v_1.Type
6555		c := auxIntToFloat32(v_1.AuxInt)
6556		if !(reciprocalExact32(c)) {
6557			break
6558		}
6559		v.reset(OpMul32F)
6560		v0 := b.NewValue0(v.Pos, OpConst32F, t)
6561		v0.AuxInt = float32ToAuxInt(1 / c)
6562		v.AddArg2(x, v0)
6563		return true
6564	}
6565	return false
6566}
6567func rewriteValuegeneric_OpDiv32u(v *Value) bool {
6568	v_1 := v.Args[1]
6569	v_0 := v.Args[0]
6570	b := v.Block
6571	config := b.Func.Config
6572	typ := &b.Func.Config.Types
6573	// match: (Div32u (Const32 [c]) (Const32 [d]))
6574	// cond: d != 0
6575	// result: (Const32 [int32(uint32(c)/uint32(d))])
6576	for {
6577		if v_0.Op != OpConst32 {
6578			break
6579		}
6580		c := auxIntToInt32(v_0.AuxInt)
6581		if v_1.Op != OpConst32 {
6582			break
6583		}
6584		d := auxIntToInt32(v_1.AuxInt)
6585		if !(d != 0) {
6586			break
6587		}
6588		v.reset(OpConst32)
6589		v.AuxInt = int32ToAuxInt(int32(uint32(c) / uint32(d)))
6590		return true
6591	}
6592	// match: (Div32u n (Const32 [c]))
6593	// cond: isPowerOfTwo32(c)
6594	// result: (Rsh32Ux64 n (Const64 <typ.UInt64> [log32(c)]))
6595	for {
6596		n := v_0
6597		if v_1.Op != OpConst32 {
6598			break
6599		}
6600		c := auxIntToInt32(v_1.AuxInt)
6601		if !(isPowerOfTwo32(c)) {
6602			break
6603		}
6604		v.reset(OpRsh32Ux64)
6605		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6606		v0.AuxInt = int64ToAuxInt(log32(c))
6607		v.AddArg2(n, v0)
6608		return true
6609	}
6610	// match: (Div32u x (Const32 [c]))
6611	// cond: umagicOK32(c) && config.RegSize == 4 && umagic32(c).m&1 == 0 && config.useHmul
6612	// result: (Rsh32Ux64 <typ.UInt32> (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<31+umagic32(c).m/2)]) x) (Const64 <typ.UInt64> [umagic32(c).s-1]))
6613	for {
6614		x := v_0
6615		if v_1.Op != OpConst32 {
6616			break
6617		}
6618		c := auxIntToInt32(v_1.AuxInt)
6619		if !(umagicOK32(c) && config.RegSize == 4 && umagic32(c).m&1 == 0 && config.useHmul) {
6620			break
6621		}
6622		v.reset(OpRsh32Ux64)
6623		v.Type = typ.UInt32
6624		v0 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32)
6625		v1 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6626		v1.AuxInt = int32ToAuxInt(int32(1<<31 + umagic32(c).m/2))
6627		v0.AddArg2(v1, x)
6628		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6629		v2.AuxInt = int64ToAuxInt(umagic32(c).s - 1)
6630		v.AddArg2(v0, v2)
6631		return true
6632	}
6633	// match: (Div32u x (Const32 [c]))
6634	// cond: umagicOK32(c) && config.RegSize == 4 && c&1 == 0 && config.useHmul
6635	// result: (Rsh32Ux64 <typ.UInt32> (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<31+(umagic32(c).m+1)/2)]) (Rsh32Ux64 <typ.UInt32> x (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [umagic32(c).s-2]))
6636	for {
6637		x := v_0
6638		if v_1.Op != OpConst32 {
6639			break
6640		}
6641		c := auxIntToInt32(v_1.AuxInt)
6642		if !(umagicOK32(c) && config.RegSize == 4 && c&1 == 0 && config.useHmul) {
6643			break
6644		}
6645		v.reset(OpRsh32Ux64)
6646		v.Type = typ.UInt32
6647		v0 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32)
6648		v1 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6649		v1.AuxInt = int32ToAuxInt(int32(1<<31 + (umagic32(c).m+1)/2))
6650		v2 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
6651		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6652		v3.AuxInt = int64ToAuxInt(1)
6653		v2.AddArg2(x, v3)
6654		v0.AddArg2(v1, v2)
6655		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6656		v4.AuxInt = int64ToAuxInt(umagic32(c).s - 2)
6657		v.AddArg2(v0, v4)
6658		return true
6659	}
6660	// match: (Div32u x (Const32 [c]))
6661	// cond: umagicOK32(c) && config.RegSize == 4 && config.useAvg && config.useHmul
6662	// result: (Rsh32Ux64 <typ.UInt32> (Avg32u x (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int32(umagic32(c).m)]) x)) (Const64 <typ.UInt64> [umagic32(c).s-1]))
6663	for {
6664		x := v_0
6665		if v_1.Op != OpConst32 {
6666			break
6667		}
6668		c := auxIntToInt32(v_1.AuxInt)
6669		if !(umagicOK32(c) && config.RegSize == 4 && config.useAvg && config.useHmul) {
6670			break
6671		}
6672		v.reset(OpRsh32Ux64)
6673		v.Type = typ.UInt32
6674		v0 := b.NewValue0(v.Pos, OpAvg32u, typ.UInt32)
6675		v1 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32)
6676		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6677		v2.AuxInt = int32ToAuxInt(int32(umagic32(c).m))
6678		v1.AddArg2(v2, x)
6679		v0.AddArg2(x, v1)
6680		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6681		v3.AuxInt = int64ToAuxInt(umagic32(c).s - 1)
6682		v.AddArg2(v0, v3)
6683		return true
6684	}
6685	// match: (Div32u x (Const32 [c]))
6686	// cond: umagicOK32(c) && config.RegSize == 8 && umagic32(c).m&1 == 0
6687	// result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<31+umagic32(c).m/2)]) (ZeroExt32to64 x)) (Const64 <typ.UInt64> [32+umagic32(c).s-1])))
6688	for {
6689		x := v_0
6690		if v_1.Op != OpConst32 {
6691			break
6692		}
6693		c := auxIntToInt32(v_1.AuxInt)
6694		if !(umagicOK32(c) && config.RegSize == 8 && umagic32(c).m&1 == 0) {
6695			break
6696		}
6697		v.reset(OpTrunc64to32)
6698		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
6699		v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
6700		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6701		v2.AuxInt = int64ToAuxInt(int64(1<<31 + umagic32(c).m/2))
6702		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6703		v3.AddArg(x)
6704		v1.AddArg2(v2, v3)
6705		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6706		v4.AuxInt = int64ToAuxInt(32 + umagic32(c).s - 1)
6707		v0.AddArg2(v1, v4)
6708		v.AddArg(v0)
6709		return true
6710	}
6711	// match: (Div32u x (Const32 [c]))
6712	// cond: umagicOK32(c) && config.RegSize == 8 && c&1 == 0
6713	// result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<31+(umagic32(c).m+1)/2)]) (Rsh64Ux64 <typ.UInt64> (ZeroExt32to64 x) (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [32+umagic32(c).s-2])))
6714	for {
6715		x := v_0
6716		if v_1.Op != OpConst32 {
6717			break
6718		}
6719		c := auxIntToInt32(v_1.AuxInt)
6720		if !(umagicOK32(c) && config.RegSize == 8 && c&1 == 0) {
6721			break
6722		}
6723		v.reset(OpTrunc64to32)
6724		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
6725		v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
6726		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6727		v2.AuxInt = int64ToAuxInt(int64(1<<31 + (umagic32(c).m+1)/2))
6728		v3 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
6729		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6730		v4.AddArg(x)
6731		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6732		v5.AuxInt = int64ToAuxInt(1)
6733		v3.AddArg2(v4, v5)
6734		v1.AddArg2(v2, v3)
6735		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6736		v6.AuxInt = int64ToAuxInt(32 + umagic32(c).s - 2)
6737		v0.AddArg2(v1, v6)
6738		v.AddArg(v0)
6739		return true
6740	}
6741	// match: (Div32u x (Const32 [c]))
6742	// cond: umagicOK32(c) && config.RegSize == 8 && config.useAvg
6743	// result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Avg64u (Lsh64x64 <typ.UInt64> (ZeroExt32to64 x) (Const64 <typ.UInt64> [32])) (Mul64 <typ.UInt64> (Const64 <typ.UInt32> [int64(umagic32(c).m)]) (ZeroExt32to64 x))) (Const64 <typ.UInt64> [32+umagic32(c).s-1])))
6744	for {
6745		x := v_0
6746		if v_1.Op != OpConst32 {
6747			break
6748		}
6749		c := auxIntToInt32(v_1.AuxInt)
6750		if !(umagicOK32(c) && config.RegSize == 8 && config.useAvg) {
6751			break
6752		}
6753		v.reset(OpTrunc64to32)
6754		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
6755		v1 := b.NewValue0(v.Pos, OpAvg64u, typ.UInt64)
6756		v2 := b.NewValue0(v.Pos, OpLsh64x64, typ.UInt64)
6757		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6758		v3.AddArg(x)
6759		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6760		v4.AuxInt = int64ToAuxInt(32)
6761		v2.AddArg2(v3, v4)
6762		v5 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
6763		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt32)
6764		v6.AuxInt = int64ToAuxInt(int64(umagic32(c).m))
6765		v5.AddArg2(v6, v3)
6766		v1.AddArg2(v2, v5)
6767		v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6768		v7.AuxInt = int64ToAuxInt(32 + umagic32(c).s - 1)
6769		v0.AddArg2(v1, v7)
6770		v.AddArg(v0)
6771		return true
6772	}
6773	return false
6774}
6775func rewriteValuegeneric_OpDiv64(v *Value) bool {
6776	v_1 := v.Args[1]
6777	v_0 := v.Args[0]
6778	b := v.Block
6779	config := b.Func.Config
6780	typ := &b.Func.Config.Types
6781	// match: (Div64 (Const64 [c]) (Const64 [d]))
6782	// cond: d != 0
6783	// result: (Const64 [c/d])
6784	for {
6785		if v_0.Op != OpConst64 {
6786			break
6787		}
6788		c := auxIntToInt64(v_0.AuxInt)
6789		if v_1.Op != OpConst64 {
6790			break
6791		}
6792		d := auxIntToInt64(v_1.AuxInt)
6793		if !(d != 0) {
6794			break
6795		}
6796		v.reset(OpConst64)
6797		v.AuxInt = int64ToAuxInt(c / d)
6798		return true
6799	}
6800	// match: (Div64 n (Const64 [c]))
6801	// cond: isNonNegative(n) && isPowerOfTwo64(c)
6802	// result: (Rsh64Ux64 n (Const64 <typ.UInt64> [log64(c)]))
6803	for {
6804		n := v_0
6805		if v_1.Op != OpConst64 {
6806			break
6807		}
6808		c := auxIntToInt64(v_1.AuxInt)
6809		if !(isNonNegative(n) && isPowerOfTwo64(c)) {
6810			break
6811		}
6812		v.reset(OpRsh64Ux64)
6813		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6814		v0.AuxInt = int64ToAuxInt(log64(c))
6815		v.AddArg2(n, v0)
6816		return true
6817	}
6818	// match: (Div64 n (Const64 [-1<<63]))
6819	// cond: isNonNegative(n)
6820	// result: (Const64 [0])
6821	for {
6822		n := v_0
6823		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 || !(isNonNegative(n)) {
6824			break
6825		}
6826		v.reset(OpConst64)
6827		v.AuxInt = int64ToAuxInt(0)
6828		return true
6829	}
6830	// match: (Div64 <t> n (Const64 [c]))
6831	// cond: c < 0 && c != -1<<63
6832	// result: (Neg64 (Div64 <t> n (Const64 <t> [-c])))
6833	for {
6834		t := v.Type
6835		n := v_0
6836		if v_1.Op != OpConst64 {
6837			break
6838		}
6839		c := auxIntToInt64(v_1.AuxInt)
6840		if !(c < 0 && c != -1<<63) {
6841			break
6842		}
6843		v.reset(OpNeg64)
6844		v0 := b.NewValue0(v.Pos, OpDiv64, t)
6845		v1 := b.NewValue0(v.Pos, OpConst64, t)
6846		v1.AuxInt = int64ToAuxInt(-c)
6847		v0.AddArg2(n, v1)
6848		v.AddArg(v0)
6849		return true
6850	}
6851	// match: (Div64 <t> x (Const64 [-1<<63]))
6852	// result: (Rsh64Ux64 (And64 <t> x (Neg64 <t> x)) (Const64 <typ.UInt64> [63]))
6853	for {
6854		t := v.Type
6855		x := v_0
6856		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 {
6857			break
6858		}
6859		v.reset(OpRsh64Ux64)
6860		v0 := b.NewValue0(v.Pos, OpAnd64, t)
6861		v1 := b.NewValue0(v.Pos, OpNeg64, t)
6862		v1.AddArg(x)
6863		v0.AddArg2(x, v1)
6864		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6865		v2.AuxInt = int64ToAuxInt(63)
6866		v.AddArg2(v0, v2)
6867		return true
6868	}
6869	// match: (Div64 <t> n (Const64 [c]))
6870	// cond: isPowerOfTwo64(c)
6871	// result: (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [int64(64-log64(c))]))) (Const64 <typ.UInt64> [int64(log64(c))]))
6872	for {
6873		t := v.Type
6874		n := v_0
6875		if v_1.Op != OpConst64 {
6876			break
6877		}
6878		c := auxIntToInt64(v_1.AuxInt)
6879		if !(isPowerOfTwo64(c)) {
6880			break
6881		}
6882		v.reset(OpRsh64x64)
6883		v0 := b.NewValue0(v.Pos, OpAdd64, t)
6884		v1 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
6885		v2 := b.NewValue0(v.Pos, OpRsh64x64, t)
6886		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6887		v3.AuxInt = int64ToAuxInt(63)
6888		v2.AddArg2(n, v3)
6889		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6890		v4.AuxInt = int64ToAuxInt(int64(64 - log64(c)))
6891		v1.AddArg2(v2, v4)
6892		v0.AddArg2(n, v1)
6893		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6894		v5.AuxInt = int64ToAuxInt(int64(log64(c)))
6895		v.AddArg2(v0, v5)
6896		return true
6897	}
6898	// match: (Div64 <t> x (Const64 [c]))
6899	// cond: smagicOK64(c) && smagic64(c).m&1 == 0 && config.useHmul
6900	// result: (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <typ.UInt64> [int64(smagic64(c).m/2)]) x) (Const64 <typ.UInt64> [smagic64(c).s-1])) (Rsh64x64 <t> x (Const64 <typ.UInt64> [63])))
6901	for {
6902		t := v.Type
6903		x := v_0
6904		if v_1.Op != OpConst64 {
6905			break
6906		}
6907		c := auxIntToInt64(v_1.AuxInt)
6908		if !(smagicOK64(c) && smagic64(c).m&1 == 0 && config.useHmul) {
6909			break
6910		}
6911		v.reset(OpSub64)
6912		v.Type = t
6913		v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
6914		v1 := b.NewValue0(v.Pos, OpHmul64, t)
6915		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6916		v2.AuxInt = int64ToAuxInt(int64(smagic64(c).m / 2))
6917		v1.AddArg2(v2, x)
6918		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6919		v3.AuxInt = int64ToAuxInt(smagic64(c).s - 1)
6920		v0.AddArg2(v1, v3)
6921		v4 := b.NewValue0(v.Pos, OpRsh64x64, t)
6922		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6923		v5.AuxInt = int64ToAuxInt(63)
6924		v4.AddArg2(x, v5)
6925		v.AddArg2(v0, v4)
6926		return true
6927	}
6928	// match: (Div64 <t> x (Const64 [c]))
6929	// cond: smagicOK64(c) && smagic64(c).m&1 != 0 && config.useHmul
6930	// result: (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <typ.UInt64> [int64(smagic64(c).m)]) x) x) (Const64 <typ.UInt64> [smagic64(c).s])) (Rsh64x64 <t> x (Const64 <typ.UInt64> [63])))
6931	for {
6932		t := v.Type
6933		x := v_0
6934		if v_1.Op != OpConst64 {
6935			break
6936		}
6937		c := auxIntToInt64(v_1.AuxInt)
6938		if !(smagicOK64(c) && smagic64(c).m&1 != 0 && config.useHmul) {
6939			break
6940		}
6941		v.reset(OpSub64)
6942		v.Type = t
6943		v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
6944		v1 := b.NewValue0(v.Pos, OpAdd64, t)
6945		v2 := b.NewValue0(v.Pos, OpHmul64, t)
6946		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6947		v3.AuxInt = int64ToAuxInt(int64(smagic64(c).m))
6948		v2.AddArg2(v3, x)
6949		v1.AddArg2(v2, x)
6950		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6951		v4.AuxInt = int64ToAuxInt(smagic64(c).s)
6952		v0.AddArg2(v1, v4)
6953		v5 := b.NewValue0(v.Pos, OpRsh64x64, t)
6954		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6955		v6.AuxInt = int64ToAuxInt(63)
6956		v5.AddArg2(x, v6)
6957		v.AddArg2(v0, v5)
6958		return true
6959	}
6960	return false
6961}
6962func rewriteValuegeneric_OpDiv64F(v *Value) bool {
6963	v_1 := v.Args[1]
6964	v_0 := v.Args[0]
6965	b := v.Block
6966	// match: (Div64F (Const64F [c]) (Const64F [d]))
6967	// cond: c/d == c/d
6968	// result: (Const64F [c/d])
6969	for {
6970		if v_0.Op != OpConst64F {
6971			break
6972		}
6973		c := auxIntToFloat64(v_0.AuxInt)
6974		if v_1.Op != OpConst64F {
6975			break
6976		}
6977		d := auxIntToFloat64(v_1.AuxInt)
6978		if !(c/d == c/d) {
6979			break
6980		}
6981		v.reset(OpConst64F)
6982		v.AuxInt = float64ToAuxInt(c / d)
6983		return true
6984	}
6985	// match: (Div64F x (Const64F <t> [c]))
6986	// cond: reciprocalExact64(c)
6987	// result: (Mul64F x (Const64F <t> [1/c]))
6988	for {
6989		x := v_0
6990		if v_1.Op != OpConst64F {
6991			break
6992		}
6993		t := v_1.Type
6994		c := auxIntToFloat64(v_1.AuxInt)
6995		if !(reciprocalExact64(c)) {
6996			break
6997		}
6998		v.reset(OpMul64F)
6999		v0 := b.NewValue0(v.Pos, OpConst64F, t)
7000		v0.AuxInt = float64ToAuxInt(1 / c)
7001		v.AddArg2(x, v0)
7002		return true
7003	}
7004	return false
7005}
7006func rewriteValuegeneric_OpDiv64u(v *Value) bool {
7007	v_1 := v.Args[1]
7008	v_0 := v.Args[0]
7009	b := v.Block
7010	config := b.Func.Config
7011	typ := &b.Func.Config.Types
7012	// match: (Div64u (Const64 [c]) (Const64 [d]))
7013	// cond: d != 0
7014	// result: (Const64 [int64(uint64(c)/uint64(d))])
7015	for {
7016		if v_0.Op != OpConst64 {
7017			break
7018		}
7019		c := auxIntToInt64(v_0.AuxInt)
7020		if v_1.Op != OpConst64 {
7021			break
7022		}
7023		d := auxIntToInt64(v_1.AuxInt)
7024		if !(d != 0) {
7025			break
7026		}
7027		v.reset(OpConst64)
7028		v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
7029		return true
7030	}
7031	// match: (Div64u n (Const64 [c]))
7032	// cond: isPowerOfTwo64(c)
7033	// result: (Rsh64Ux64 n (Const64 <typ.UInt64> [log64(c)]))
7034	for {
7035		n := v_0
7036		if v_1.Op != OpConst64 {
7037			break
7038		}
7039		c := auxIntToInt64(v_1.AuxInt)
7040		if !(isPowerOfTwo64(c)) {
7041			break
7042		}
7043		v.reset(OpRsh64Ux64)
7044		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7045		v0.AuxInt = int64ToAuxInt(log64(c))
7046		v.AddArg2(n, v0)
7047		return true
7048	}
7049	// match: (Div64u n (Const64 [-1<<63]))
7050	// result: (Rsh64Ux64 n (Const64 <typ.UInt64> [63]))
7051	for {
7052		n := v_0
7053		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 {
7054			break
7055		}
7056		v.reset(OpRsh64Ux64)
7057		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7058		v0.AuxInt = int64ToAuxInt(63)
7059		v.AddArg2(n, v0)
7060		return true
7061	}
7062	// match: (Div64u x (Const64 [c]))
7063	// cond: c > 0 && c <= 0xFFFF && umagicOK32(int32(c)) && config.RegSize == 4 && config.useHmul
7064	// result: (Add64 (Add64 <typ.UInt64> (Add64 <typ.UInt64> (Lsh64x64 <typ.UInt64> (ZeroExt32to64 (Div32u <typ.UInt32> (Trunc64to32 <typ.UInt32> (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [32]))) (Const32 <typ.UInt32> [int32(c)]))) (Const64 <typ.UInt64> [32])) (ZeroExt32to64 (Div32u <typ.UInt32> (Trunc64to32 <typ.UInt32> x) (Const32 <typ.UInt32> [int32(c)])))) (Mul64 <typ.UInt64> (ZeroExt32to64 <typ.UInt64> (Mod32u <typ.UInt32> (Trunc64to32 <typ.UInt32> (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [32]))) (Const32 <typ.UInt32> [int32(c)]))) (Const64 <typ.UInt64> [int64((1<<32)/c)]))) (ZeroExt32to64 (Div32u <typ.UInt32> (Add32 <typ.UInt32> (Mod32u <typ.UInt32> (Trunc64to32 <typ.UInt32> x) (Const32 <typ.UInt32> [int32(c)])) (Mul32 <typ.UInt32> (Mod32u <typ.UInt32> (Trunc64to32 <typ.UInt32> (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [32]))) (Const32 <typ.UInt32> [int32(c)])) (Const32 <typ.UInt32> [int32((1<<32)%c)]))) (Const32 <typ.UInt32> [int32(c)]))))
7065	for {
7066		x := v_0
7067		if v_1.Op != OpConst64 {
7068			break
7069		}
7070		c := auxIntToInt64(v_1.AuxInt)
7071		if !(c > 0 && c <= 0xFFFF && umagicOK32(int32(c)) && config.RegSize == 4 && config.useHmul) {
7072			break
7073		}
7074		v.reset(OpAdd64)
7075		v0 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
7076		v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
7077		v2 := b.NewValue0(v.Pos, OpLsh64x64, typ.UInt64)
7078		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7079		v4 := b.NewValue0(v.Pos, OpDiv32u, typ.UInt32)
7080		v5 := b.NewValue0(v.Pos, OpTrunc64to32, typ.UInt32)
7081		v6 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
7082		v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7083		v7.AuxInt = int64ToAuxInt(32)
7084		v6.AddArg2(x, v7)
7085		v5.AddArg(v6)
7086		v8 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
7087		v8.AuxInt = int32ToAuxInt(int32(c))
7088		v4.AddArg2(v5, v8)
7089		v3.AddArg(v4)
7090		v2.AddArg2(v3, v7)
7091		v9 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7092		v10 := b.NewValue0(v.Pos, OpDiv32u, typ.UInt32)
7093		v11 := b.NewValue0(v.Pos, OpTrunc64to32, typ.UInt32)
7094		v11.AddArg(x)
7095		v10.AddArg2(v11, v8)
7096		v9.AddArg(v10)
7097		v1.AddArg2(v2, v9)
7098		v12 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
7099		v13 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7100		v14 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
7101		v14.AddArg2(v5, v8)
7102		v13.AddArg(v14)
7103		v15 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7104		v15.AuxInt = int64ToAuxInt(int64((1 << 32) / c))
7105		v12.AddArg2(v13, v15)
7106		v0.AddArg2(v1, v12)
7107		v16 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7108		v17 := b.NewValue0(v.Pos, OpDiv32u, typ.UInt32)
7109		v18 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
7110		v19 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
7111		v19.AddArg2(v11, v8)
7112		v20 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
7113		v21 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
7114		v21.AuxInt = int32ToAuxInt(int32((1 << 32) % c))
7115		v20.AddArg2(v14, v21)
7116		v18.AddArg2(v19, v20)
7117		v17.AddArg2(v18, v8)
7118		v16.AddArg(v17)
7119		v.AddArg2(v0, v16)
7120		return true
7121	}
7122	// match: (Div64u x (Const64 [c]))
7123	// cond: umagicOK64(c) && config.RegSize == 8 && umagic64(c).m&1 == 0 && config.useHmul
7124	// result: (Rsh64Ux64 <typ.UInt64> (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<63+umagic64(c).m/2)]) x) (Const64 <typ.UInt64> [umagic64(c).s-1]))
7125	for {
7126		x := v_0
7127		if v_1.Op != OpConst64 {
7128			break
7129		}
7130		c := auxIntToInt64(v_1.AuxInt)
7131		if !(umagicOK64(c) && config.RegSize == 8 && umagic64(c).m&1 == 0 && config.useHmul) {
7132			break
7133		}
7134		v.reset(OpRsh64Ux64)
7135		v.Type = typ.UInt64
7136		v0 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64)
7137		v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7138		v1.AuxInt = int64ToAuxInt(int64(1<<63 + umagic64(c).m/2))
7139		v0.AddArg2(v1, x)
7140		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7141		v2.AuxInt = int64ToAuxInt(umagic64(c).s - 1)
7142		v.AddArg2(v0, v2)
7143		return true
7144	}
7145	// match: (Div64u x (Const64 [c]))
7146	// cond: umagicOK64(c) && config.RegSize == 8 && c&1 == 0 && config.useHmul
7147	// result: (Rsh64Ux64 <typ.UInt64> (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<63+(umagic64(c).m+1)/2)]) (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [umagic64(c).s-2]))
7148	for {
7149		x := v_0
7150		if v_1.Op != OpConst64 {
7151			break
7152		}
7153		c := auxIntToInt64(v_1.AuxInt)
7154		if !(umagicOK64(c) && config.RegSize == 8 && c&1 == 0 && config.useHmul) {
7155			break
7156		}
7157		v.reset(OpRsh64Ux64)
7158		v.Type = typ.UInt64
7159		v0 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64)
7160		v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7161		v1.AuxInt = int64ToAuxInt(int64(1<<63 + (umagic64(c).m+1)/2))
7162		v2 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
7163		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7164		v3.AuxInt = int64ToAuxInt(1)
7165		v2.AddArg2(x, v3)
7166		v0.AddArg2(v1, v2)
7167		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7168		v4.AuxInt = int64ToAuxInt(umagic64(c).s - 2)
7169		v.AddArg2(v0, v4)
7170		return true
7171	}
7172	// match: (Div64u x (Const64 [c]))
7173	// cond: umagicOK64(c) && config.RegSize == 8 && config.useAvg && config.useHmul
7174	// result: (Rsh64Ux64 <typ.UInt64> (Avg64u x (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(umagic64(c).m)]) x)) (Const64 <typ.UInt64> [umagic64(c).s-1]))
7175	for {
7176		x := v_0
7177		if v_1.Op != OpConst64 {
7178			break
7179		}
7180		c := auxIntToInt64(v_1.AuxInt)
7181		if !(umagicOK64(c) && config.RegSize == 8 && config.useAvg && config.useHmul) {
7182			break
7183		}
7184		v.reset(OpRsh64Ux64)
7185		v.Type = typ.UInt64
7186		v0 := b.NewValue0(v.Pos, OpAvg64u, typ.UInt64)
7187		v1 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64)
7188		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7189		v2.AuxInt = int64ToAuxInt(int64(umagic64(c).m))
7190		v1.AddArg2(v2, x)
7191		v0.AddArg2(x, v1)
7192		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7193		v3.AuxInt = int64ToAuxInt(umagic64(c).s - 1)
7194		v.AddArg2(v0, v3)
7195		return true
7196	}
7197	return false
7198}
7199func rewriteValuegeneric_OpDiv8(v *Value) bool {
7200	v_1 := v.Args[1]
7201	v_0 := v.Args[0]
7202	b := v.Block
7203	typ := &b.Func.Config.Types
7204	// match: (Div8 (Const8 [c]) (Const8 [d]))
7205	// cond: d != 0
7206	// result: (Const8 [c/d])
7207	for {
7208		if v_0.Op != OpConst8 {
7209			break
7210		}
7211		c := auxIntToInt8(v_0.AuxInt)
7212		if v_1.Op != OpConst8 {
7213			break
7214		}
7215		d := auxIntToInt8(v_1.AuxInt)
7216		if !(d != 0) {
7217			break
7218		}
7219		v.reset(OpConst8)
7220		v.AuxInt = int8ToAuxInt(c / d)
7221		return true
7222	}
7223	// match: (Div8 n (Const8 [c]))
7224	// cond: isNonNegative(n) && isPowerOfTwo8(c)
7225	// result: (Rsh8Ux64 n (Const64 <typ.UInt64> [log8(c)]))
7226	for {
7227		n := v_0
7228		if v_1.Op != OpConst8 {
7229			break
7230		}
7231		c := auxIntToInt8(v_1.AuxInt)
7232		if !(isNonNegative(n) && isPowerOfTwo8(c)) {
7233			break
7234		}
7235		v.reset(OpRsh8Ux64)
7236		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7237		v0.AuxInt = int64ToAuxInt(log8(c))
7238		v.AddArg2(n, v0)
7239		return true
7240	}
7241	// match: (Div8 <t> n (Const8 [c]))
7242	// cond: c < 0 && c != -1<<7
7243	// result: (Neg8 (Div8 <t> n (Const8 <t> [-c])))
7244	for {
7245		t := v.Type
7246		n := v_0
7247		if v_1.Op != OpConst8 {
7248			break
7249		}
7250		c := auxIntToInt8(v_1.AuxInt)
7251		if !(c < 0 && c != -1<<7) {
7252			break
7253		}
7254		v.reset(OpNeg8)
7255		v0 := b.NewValue0(v.Pos, OpDiv8, t)
7256		v1 := b.NewValue0(v.Pos, OpConst8, t)
7257		v1.AuxInt = int8ToAuxInt(-c)
7258		v0.AddArg2(n, v1)
7259		v.AddArg(v0)
7260		return true
7261	}
7262	// match: (Div8 <t> x (Const8 [-1<<7 ]))
7263	// result: (Rsh8Ux64 (And8 <t> x (Neg8 <t> x)) (Const64 <typ.UInt64> [7 ]))
7264	for {
7265		t := v.Type
7266		x := v_0
7267		if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != -1<<7 {
7268			break
7269		}
7270		v.reset(OpRsh8Ux64)
7271		v0 := b.NewValue0(v.Pos, OpAnd8, t)
7272		v1 := b.NewValue0(v.Pos, OpNeg8, t)
7273		v1.AddArg(x)
7274		v0.AddArg2(x, v1)
7275		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7276		v2.AuxInt = int64ToAuxInt(7)
7277		v.AddArg2(v0, v2)
7278		return true
7279	}
7280	// match: (Div8 <t> n (Const8 [c]))
7281	// cond: isPowerOfTwo8(c)
7282	// result: (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [int64( 8-log8(c))]))) (Const64 <typ.UInt64> [int64(log8(c))]))
7283	for {
7284		t := v.Type
7285		n := v_0
7286		if v_1.Op != OpConst8 {
7287			break
7288		}
7289		c := auxIntToInt8(v_1.AuxInt)
7290		if !(isPowerOfTwo8(c)) {
7291			break
7292		}
7293		v.reset(OpRsh8x64)
7294		v0 := b.NewValue0(v.Pos, OpAdd8, t)
7295		v1 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
7296		v2 := b.NewValue0(v.Pos, OpRsh8x64, t)
7297		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7298		v3.AuxInt = int64ToAuxInt(7)
7299		v2.AddArg2(n, v3)
7300		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7301		v4.AuxInt = int64ToAuxInt(int64(8 - log8(c)))
7302		v1.AddArg2(v2, v4)
7303		v0.AddArg2(n, v1)
7304		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7305		v5.AuxInt = int64ToAuxInt(int64(log8(c)))
7306		v.AddArg2(v0, v5)
7307		return true
7308	}
7309	// match: (Div8 <t> x (Const8 [c]))
7310	// cond: smagicOK8(c)
7311	// result: (Sub8 <t> (Rsh32x64 <t> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(smagic8(c).m)]) (SignExt8to32 x)) (Const64 <typ.UInt64> [8+smagic8(c).s])) (Rsh32x64 <t> (SignExt8to32 x) (Const64 <typ.UInt64> [31])))
7312	for {
7313		t := v.Type
7314		x := v_0
7315		if v_1.Op != OpConst8 {
7316			break
7317		}
7318		c := auxIntToInt8(v_1.AuxInt)
7319		if !(smagicOK8(c)) {
7320			break
7321		}
7322		v.reset(OpSub8)
7323		v.Type = t
7324		v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
7325		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
7326		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
7327		v2.AuxInt = int32ToAuxInt(int32(smagic8(c).m))
7328		v3 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
7329		v3.AddArg(x)
7330		v1.AddArg2(v2, v3)
7331		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7332		v4.AuxInt = int64ToAuxInt(8 + smagic8(c).s)
7333		v0.AddArg2(v1, v4)
7334		v5 := b.NewValue0(v.Pos, OpRsh32x64, t)
7335		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7336		v6.AuxInt = int64ToAuxInt(31)
7337		v5.AddArg2(v3, v6)
7338		v.AddArg2(v0, v5)
7339		return true
7340	}
7341	return false
7342}
7343func rewriteValuegeneric_OpDiv8u(v *Value) bool {
7344	v_1 := v.Args[1]
7345	v_0 := v.Args[0]
7346	b := v.Block
7347	typ := &b.Func.Config.Types
7348	// match: (Div8u (Const8 [c]) (Const8 [d]))
7349	// cond: d != 0
7350	// result: (Const8 [int8(uint8(c)/uint8(d))])
7351	for {
7352		if v_0.Op != OpConst8 {
7353			break
7354		}
7355		c := auxIntToInt8(v_0.AuxInt)
7356		if v_1.Op != OpConst8 {
7357			break
7358		}
7359		d := auxIntToInt8(v_1.AuxInt)
7360		if !(d != 0) {
7361			break
7362		}
7363		v.reset(OpConst8)
7364		v.AuxInt = int8ToAuxInt(int8(uint8(c) / uint8(d)))
7365		return true
7366	}
7367	// match: (Div8u n (Const8 [c]))
7368	// cond: isPowerOfTwo8(c)
7369	// result: (Rsh8Ux64 n (Const64 <typ.UInt64> [log8(c)]))
7370	for {
7371		n := v_0
7372		if v_1.Op != OpConst8 {
7373			break
7374		}
7375		c := auxIntToInt8(v_1.AuxInt)
7376		if !(isPowerOfTwo8(c)) {
7377			break
7378		}
7379		v.reset(OpRsh8Ux64)
7380		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7381		v0.AuxInt = int64ToAuxInt(log8(c))
7382		v.AddArg2(n, v0)
7383		return true
7384	}
7385	// match: (Div8u x (Const8 [c]))
7386	// cond: umagicOK8(c)
7387	// result: (Trunc32to8 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<8+umagic8(c).m)]) (ZeroExt8to32 x)) (Const64 <typ.UInt64> [8+umagic8(c).s])))
7388	for {
7389		x := v_0
7390		if v_1.Op != OpConst8 {
7391			break
7392		}
7393		c := auxIntToInt8(v_1.AuxInt)
7394		if !(umagicOK8(c)) {
7395			break
7396		}
7397		v.reset(OpTrunc32to8)
7398		v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
7399		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
7400		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
7401		v2.AuxInt = int32ToAuxInt(int32(1<<8 + umagic8(c).m))
7402		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
7403		v3.AddArg(x)
7404		v1.AddArg2(v2, v3)
7405		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7406		v4.AuxInt = int64ToAuxInt(8 + umagic8(c).s)
7407		v0.AddArg2(v1, v4)
7408		v.AddArg(v0)
7409		return true
7410	}
7411	return false
7412}
7413func rewriteValuegeneric_OpEq16(v *Value) bool {
7414	v_1 := v.Args[1]
7415	v_0 := v.Args[0]
7416	b := v.Block
7417	config := b.Func.Config
7418	typ := &b.Func.Config.Types
7419	// match: (Eq16 x x)
7420	// result: (ConstBool [true])
7421	for {
7422		x := v_0
7423		if x != v_1 {
7424			break
7425		}
7426		v.reset(OpConstBool)
7427		v.AuxInt = boolToAuxInt(true)
7428		return true
7429	}
7430	// match: (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
7431	// result: (Eq16 (Const16 <t> [c-d]) x)
7432	for {
7433		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7434			if v_0.Op != OpConst16 {
7435				continue
7436			}
7437			t := v_0.Type
7438			c := auxIntToInt16(v_0.AuxInt)
7439			if v_1.Op != OpAdd16 {
7440				continue
7441			}
7442			_ = v_1.Args[1]
7443			v_1_0 := v_1.Args[0]
7444			v_1_1 := v_1.Args[1]
7445			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
7446				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
7447					continue
7448				}
7449				d := auxIntToInt16(v_1_0.AuxInt)
7450				x := v_1_1
7451				v.reset(OpEq16)
7452				v0 := b.NewValue0(v.Pos, OpConst16, t)
7453				v0.AuxInt = int16ToAuxInt(c - d)
7454				v.AddArg2(v0, x)
7455				return true
7456			}
7457		}
7458		break
7459	}
7460	// match: (Eq16 (Const16 [c]) (Const16 [d]))
7461	// result: (ConstBool [c == d])
7462	for {
7463		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7464			if v_0.Op != OpConst16 {
7465				continue
7466			}
7467			c := auxIntToInt16(v_0.AuxInt)
7468			if v_1.Op != OpConst16 {
7469				continue
7470			}
7471			d := auxIntToInt16(v_1.AuxInt)
7472			v.reset(OpConstBool)
7473			v.AuxInt = boolToAuxInt(c == d)
7474			return true
7475		}
7476		break
7477	}
7478	// match: (Eq16 (Mod16u x (Const16 [c])) (Const16 [0]))
7479	// cond: x.Op != OpConst16 && udivisibleOK16(c) && !hasSmallRotate(config)
7480	// result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt16to32 <typ.UInt32> x) (Const32 <typ.UInt32> [int32(uint16(c))])) (Const32 <typ.UInt32> [0]))
7481	for {
7482		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7483			if v_0.Op != OpMod16u {
7484				continue
7485			}
7486			_ = v_0.Args[1]
7487			x := v_0.Args[0]
7488			v_0_1 := v_0.Args[1]
7489			if v_0_1.Op != OpConst16 {
7490				continue
7491			}
7492			c := auxIntToInt16(v_0_1.AuxInt)
7493			if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 || !(x.Op != OpConst16 && udivisibleOK16(c) && !hasSmallRotate(config)) {
7494				continue
7495			}
7496			v.reset(OpEq32)
7497			v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
7498			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
7499			v1.AddArg(x)
7500			v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
7501			v2.AuxInt = int32ToAuxInt(int32(uint16(c)))
7502			v0.AddArg2(v1, v2)
7503			v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
7504			v3.AuxInt = int32ToAuxInt(0)
7505			v.AddArg2(v0, v3)
7506			return true
7507		}
7508		break
7509	}
7510	// match: (Eq16 (Mod16 x (Const16 [c])) (Const16 [0]))
7511	// cond: x.Op != OpConst16 && sdivisibleOK16(c) && !hasSmallRotate(config)
7512	// result: (Eq32 (Mod32 <typ.Int32> (SignExt16to32 <typ.Int32> x) (Const32 <typ.Int32> [int32(c)])) (Const32 <typ.Int32> [0]))
7513	for {
7514		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7515			if v_0.Op != OpMod16 {
7516				continue
7517			}
7518			_ = v_0.Args[1]
7519			x := v_0.Args[0]
7520			v_0_1 := v_0.Args[1]
7521			if v_0_1.Op != OpConst16 {
7522				continue
7523			}
7524			c := auxIntToInt16(v_0_1.AuxInt)
7525			if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 || !(x.Op != OpConst16 && sdivisibleOK16(c) && !hasSmallRotate(config)) {
7526				continue
7527			}
7528			v.reset(OpEq32)
7529			v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
7530			v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
7531			v1.AddArg(x)
7532			v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
7533			v2.AuxInt = int32ToAuxInt(int32(c))
7534			v0.AddArg2(v1, v2)
7535			v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
7536			v3.AuxInt = int32ToAuxInt(0)
7537			v.AddArg2(v0, v3)
7538			return true
7539		}
7540		break
7541	}
7542	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt16to64 x)) (Const64 [s]))) ) )
7543	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic16(c).m) && s == 16+umagic16(c).s && x.Op != OpConst16 && udivisibleOK16(c)
7544	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(udivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(16-udivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(udivisible16(c).max)]) )
7545	for {
7546		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7547			x := v_0
7548			if v_1.Op != OpMul16 {
7549				continue
7550			}
7551			_ = v_1.Args[1]
7552			v_1_0 := v_1.Args[0]
7553			v_1_1 := v_1.Args[1]
7554			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
7555				if v_1_0.Op != OpConst16 {
7556					continue
7557				}
7558				c := auxIntToInt16(v_1_0.AuxInt)
7559				if v_1_1.Op != OpTrunc64to16 {
7560					continue
7561				}
7562				v_1_1_0 := v_1_1.Args[0]
7563				if v_1_1_0.Op != OpRsh64Ux64 {
7564					continue
7565				}
7566				_ = v_1_1_0.Args[1]
7567				mul := v_1_1_0.Args[0]
7568				if mul.Op != OpMul64 {
7569					continue
7570				}
7571				_ = mul.Args[1]
7572				mul_0 := mul.Args[0]
7573				mul_1 := mul.Args[1]
7574				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
7575					if mul_0.Op != OpConst64 {
7576						continue
7577					}
7578					m := auxIntToInt64(mul_0.AuxInt)
7579					if mul_1.Op != OpZeroExt16to64 || x != mul_1.Args[0] {
7580						continue
7581					}
7582					v_1_1_0_1 := v_1_1_0.Args[1]
7583					if v_1_1_0_1.Op != OpConst64 {
7584						continue
7585					}
7586					s := auxIntToInt64(v_1_1_0_1.AuxInt)
7587					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic16(c).m) && s == 16+umagic16(c).s && x.Op != OpConst16 && udivisibleOK16(c)) {
7588						continue
7589					}
7590					v.reset(OpLeq16U)
7591					v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
7592					v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
7593					v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7594					v2.AuxInt = int16ToAuxInt(int16(udivisible16(c).m))
7595					v1.AddArg2(v2, x)
7596					v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7597					v3.AuxInt = int16ToAuxInt(int16(16 - udivisible16(c).k))
7598					v0.AddArg2(v1, v3)
7599					v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7600					v4.AuxInt = int16ToAuxInt(int16(udivisible16(c).max))
7601					v.AddArg2(v0, v4)
7602					return true
7603				}
7604			}
7605		}
7606		break
7607	}
7608	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x)) (Const64 [s]))) ) )
7609	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<15+umagic16(c).m/2) && s == 16+umagic16(c).s-1 && x.Op != OpConst16 && udivisibleOK16(c)
7610	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(udivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(16-udivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(udivisible16(c).max)]) )
7611	for {
7612		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7613			x := v_0
7614			if v_1.Op != OpMul16 {
7615				continue
7616			}
7617			_ = v_1.Args[1]
7618			v_1_0 := v_1.Args[0]
7619			v_1_1 := v_1.Args[1]
7620			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
7621				if v_1_0.Op != OpConst16 {
7622					continue
7623				}
7624				c := auxIntToInt16(v_1_0.AuxInt)
7625				if v_1_1.Op != OpTrunc32to16 {
7626					continue
7627				}
7628				v_1_1_0 := v_1_1.Args[0]
7629				if v_1_1_0.Op != OpRsh32Ux64 {
7630					continue
7631				}
7632				_ = v_1_1_0.Args[1]
7633				mul := v_1_1_0.Args[0]
7634				if mul.Op != OpMul32 {
7635					continue
7636				}
7637				_ = mul.Args[1]
7638				mul_0 := mul.Args[0]
7639				mul_1 := mul.Args[1]
7640				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
7641					if mul_0.Op != OpConst32 {
7642						continue
7643					}
7644					m := auxIntToInt32(mul_0.AuxInt)
7645					if mul_1.Op != OpZeroExt16to32 || x != mul_1.Args[0] {
7646						continue
7647					}
7648					v_1_1_0_1 := v_1_1_0.Args[1]
7649					if v_1_1_0_1.Op != OpConst64 {
7650						continue
7651					}
7652					s := auxIntToInt64(v_1_1_0_1.AuxInt)
7653					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<15+umagic16(c).m/2) && s == 16+umagic16(c).s-1 && x.Op != OpConst16 && udivisibleOK16(c)) {
7654						continue
7655					}
7656					v.reset(OpLeq16U)
7657					v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
7658					v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
7659					v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7660					v2.AuxInt = int16ToAuxInt(int16(udivisible16(c).m))
7661					v1.AddArg2(v2, x)
7662					v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7663					v3.AuxInt = int16ToAuxInt(int16(16 - udivisible16(c).k))
7664					v0.AddArg2(v1, v3)
7665					v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7666					v4.AuxInt = int16ToAuxInt(int16(udivisible16(c).max))
7667					v.AddArg2(v0, v4)
7668					return true
7669				}
7670			}
7671		}
7672		break
7673	}
7674	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1]))) (Const64 [s]))) ) )
7675	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<15+(umagic16(c).m+1)/2) && s == 16+umagic16(c).s-2 && x.Op != OpConst16 && udivisibleOK16(c)
7676	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(udivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(16-udivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(udivisible16(c).max)]) )
7677	for {
7678		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7679			x := v_0
7680			if v_1.Op != OpMul16 {
7681				continue
7682			}
7683			_ = v_1.Args[1]
7684			v_1_0 := v_1.Args[0]
7685			v_1_1 := v_1.Args[1]
7686			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
7687				if v_1_0.Op != OpConst16 {
7688					continue
7689				}
7690				c := auxIntToInt16(v_1_0.AuxInt)
7691				if v_1_1.Op != OpTrunc32to16 {
7692					continue
7693				}
7694				v_1_1_0 := v_1_1.Args[0]
7695				if v_1_1_0.Op != OpRsh32Ux64 {
7696					continue
7697				}
7698				_ = v_1_1_0.Args[1]
7699				mul := v_1_1_0.Args[0]
7700				if mul.Op != OpMul32 {
7701					continue
7702				}
7703				_ = mul.Args[1]
7704				mul_0 := mul.Args[0]
7705				mul_1 := mul.Args[1]
7706				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
7707					if mul_0.Op != OpConst32 {
7708						continue
7709					}
7710					m := auxIntToInt32(mul_0.AuxInt)
7711					if mul_1.Op != OpRsh32Ux64 {
7712						continue
7713					}
7714					_ = mul_1.Args[1]
7715					mul_1_0 := mul_1.Args[0]
7716					if mul_1_0.Op != OpZeroExt16to32 || x != mul_1_0.Args[0] {
7717						continue
7718					}
7719					mul_1_1 := mul_1.Args[1]
7720					if mul_1_1.Op != OpConst64 || auxIntToInt64(mul_1_1.AuxInt) != 1 {
7721						continue
7722					}
7723					v_1_1_0_1 := v_1_1_0.Args[1]
7724					if v_1_1_0_1.Op != OpConst64 {
7725						continue
7726					}
7727					s := auxIntToInt64(v_1_1_0_1.AuxInt)
7728					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<15+(umagic16(c).m+1)/2) && s == 16+umagic16(c).s-2 && x.Op != OpConst16 && udivisibleOK16(c)) {
7729						continue
7730					}
7731					v.reset(OpLeq16U)
7732					v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
7733					v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
7734					v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7735					v2.AuxInt = int16ToAuxInt(int16(udivisible16(c).m))
7736					v1.AddArg2(v2, x)
7737					v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7738					v3.AuxInt = int16ToAuxInt(int16(16 - udivisible16(c).k))
7739					v0.AddArg2(v1, v3)
7740					v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7741					v4.AuxInt = int16ToAuxInt(int16(udivisible16(c).max))
7742					v.AddArg2(v0, v4)
7743					return true
7744				}
7745			}
7746		}
7747		break
7748	}
7749	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x))) (Const64 [s]))) ) )
7750	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(umagic16(c).m) && s == 16+umagic16(c).s-1 && x.Op != OpConst16 && udivisibleOK16(c)
7751	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(udivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(16-udivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(udivisible16(c).max)]) )
7752	for {
7753		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7754			x := v_0
7755			if v_1.Op != OpMul16 {
7756				continue
7757			}
7758			_ = v_1.Args[1]
7759			v_1_0 := v_1.Args[0]
7760			v_1_1 := v_1.Args[1]
7761			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
7762				if v_1_0.Op != OpConst16 {
7763					continue
7764				}
7765				c := auxIntToInt16(v_1_0.AuxInt)
7766				if v_1_1.Op != OpTrunc32to16 {
7767					continue
7768				}
7769				v_1_1_0 := v_1_1.Args[0]
7770				if v_1_1_0.Op != OpRsh32Ux64 {
7771					continue
7772				}
7773				_ = v_1_1_0.Args[1]
7774				v_1_1_0_0 := v_1_1_0.Args[0]
7775				if v_1_1_0_0.Op != OpAvg32u {
7776					continue
7777				}
7778				_ = v_1_1_0_0.Args[1]
7779				v_1_1_0_0_0 := v_1_1_0_0.Args[0]
7780				if v_1_1_0_0_0.Op != OpLsh32x64 {
7781					continue
7782				}
7783				_ = v_1_1_0_0_0.Args[1]
7784				v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
7785				if v_1_1_0_0_0_0.Op != OpZeroExt16to32 || x != v_1_1_0_0_0_0.Args[0] {
7786					continue
7787				}
7788				v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
7789				if v_1_1_0_0_0_1.Op != OpConst64 || auxIntToInt64(v_1_1_0_0_0_1.AuxInt) != 16 {
7790					continue
7791				}
7792				mul := v_1_1_0_0.Args[1]
7793				if mul.Op != OpMul32 {
7794					continue
7795				}
7796				_ = mul.Args[1]
7797				mul_0 := mul.Args[0]
7798				mul_1 := mul.Args[1]
7799				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
7800					if mul_0.Op != OpConst32 {
7801						continue
7802					}
7803					m := auxIntToInt32(mul_0.AuxInt)
7804					if mul_1.Op != OpZeroExt16to32 || x != mul_1.Args[0] {
7805						continue
7806					}
7807					v_1_1_0_1 := v_1_1_0.Args[1]
7808					if v_1_1_0_1.Op != OpConst64 {
7809						continue
7810					}
7811					s := auxIntToInt64(v_1_1_0_1.AuxInt)
7812					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(umagic16(c).m) && s == 16+umagic16(c).s-1 && x.Op != OpConst16 && udivisibleOK16(c)) {
7813						continue
7814					}
7815					v.reset(OpLeq16U)
7816					v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
7817					v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
7818					v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7819					v2.AuxInt = int16ToAuxInt(int16(udivisible16(c).m))
7820					v1.AddArg2(v2, x)
7821					v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7822					v3.AuxInt = int16ToAuxInt(int16(16 - udivisible16(c).k))
7823					v0.AddArg2(v1, v3)
7824					v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7825					v4.AuxInt = int16ToAuxInt(int16(udivisible16(c).max))
7826					v.AddArg2(v0, v4)
7827					return true
7828				}
7829			}
7830		}
7831		break
7832	}
7833	// match: (Eq16 x (Mul16 (Const16 [c]) (Sub16 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt16to32 x)) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31]))) ) )
7834	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic16(c).m) && s == 16+smagic16(c).s && x.Op != OpConst16 && sdivisibleOK16(c)
7835	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(sdivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(sdivisible16(c).a)]) ) (Const16 <typ.UInt16> [int16(16-sdivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(sdivisible16(c).max)]) )
7836	for {
7837		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7838			x := v_0
7839			if v_1.Op != OpMul16 {
7840				continue
7841			}
7842			_ = v_1.Args[1]
7843			v_1_0 := v_1.Args[0]
7844			v_1_1 := v_1.Args[1]
7845			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
7846				if v_1_0.Op != OpConst16 {
7847					continue
7848				}
7849				c := auxIntToInt16(v_1_0.AuxInt)
7850				if v_1_1.Op != OpSub16 {
7851					continue
7852				}
7853				_ = v_1_1.Args[1]
7854				v_1_1_0 := v_1_1.Args[0]
7855				if v_1_1_0.Op != OpRsh32x64 {
7856					continue
7857				}
7858				_ = v_1_1_0.Args[1]
7859				mul := v_1_1_0.Args[0]
7860				if mul.Op != OpMul32 {
7861					continue
7862				}
7863				_ = mul.Args[1]
7864				mul_0 := mul.Args[0]
7865				mul_1 := mul.Args[1]
7866				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
7867					if mul_0.Op != OpConst32 {
7868						continue
7869					}
7870					m := auxIntToInt32(mul_0.AuxInt)
7871					if mul_1.Op != OpSignExt16to32 || x != mul_1.Args[0] {
7872						continue
7873					}
7874					v_1_1_0_1 := v_1_1_0.Args[1]
7875					if v_1_1_0_1.Op != OpConst64 {
7876						continue
7877					}
7878					s := auxIntToInt64(v_1_1_0_1.AuxInt)
7879					v_1_1_1 := v_1_1.Args[1]
7880					if v_1_1_1.Op != OpRsh32x64 {
7881						continue
7882					}
7883					_ = v_1_1_1.Args[1]
7884					v_1_1_1_0 := v_1_1_1.Args[0]
7885					if v_1_1_1_0.Op != OpSignExt16to32 || x != v_1_1_1_0.Args[0] {
7886						continue
7887					}
7888					v_1_1_1_1 := v_1_1_1.Args[1]
7889					if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 31 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic16(c).m) && s == 16+smagic16(c).s && x.Op != OpConst16 && sdivisibleOK16(c)) {
7890						continue
7891					}
7892					v.reset(OpLeq16U)
7893					v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
7894					v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
7895					v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
7896					v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7897					v3.AuxInt = int16ToAuxInt(int16(sdivisible16(c).m))
7898					v2.AddArg2(v3, x)
7899					v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7900					v4.AuxInt = int16ToAuxInt(int16(sdivisible16(c).a))
7901					v1.AddArg2(v2, v4)
7902					v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7903					v5.AuxInt = int16ToAuxInt(int16(16 - sdivisible16(c).k))
7904					v0.AddArg2(v1, v5)
7905					v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7906					v6.AuxInt = int16ToAuxInt(int16(sdivisible16(c).max))
7907					v.AddArg2(v0, v6)
7908					return true
7909				}
7910			}
7911		}
7912		break
7913	}
7914	// match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
7915	// cond: k > 0 && k < 15 && kbar == 16 - k
7916	// result: (Eq16 (And16 <t> n (Const16 <t> [1<<uint(k)-1])) (Const16 <t> [0]))
7917	for {
7918		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7919			n := v_0
7920			if v_1.Op != OpLsh16x64 {
7921				continue
7922			}
7923			_ = v_1.Args[1]
7924			v_1_0 := v_1.Args[0]
7925			if v_1_0.Op != OpRsh16x64 {
7926				continue
7927			}
7928			_ = v_1_0.Args[1]
7929			v_1_0_0 := v_1_0.Args[0]
7930			if v_1_0_0.Op != OpAdd16 {
7931				continue
7932			}
7933			t := v_1_0_0.Type
7934			_ = v_1_0_0.Args[1]
7935			v_1_0_0_0 := v_1_0_0.Args[0]
7936			v_1_0_0_1 := v_1_0_0.Args[1]
7937			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
7938				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh16Ux64 || v_1_0_0_1.Type != t {
7939					continue
7940				}
7941				_ = v_1_0_0_1.Args[1]
7942				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
7943				if v_1_0_0_1_0.Op != OpRsh16x64 || v_1_0_0_1_0.Type != t {
7944					continue
7945				}
7946				_ = v_1_0_0_1_0.Args[1]
7947				if n != v_1_0_0_1_0.Args[0] {
7948					continue
7949				}
7950				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
7951				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 15 {
7952					continue
7953				}
7954				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
7955				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
7956					continue
7957				}
7958				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
7959				v_1_0_1 := v_1_0.Args[1]
7960				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
7961					continue
7962				}
7963				k := auxIntToInt64(v_1_0_1.AuxInt)
7964				v_1_1 := v_1.Args[1]
7965				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 15 && kbar == 16-k) {
7966					continue
7967				}
7968				v.reset(OpEq16)
7969				v0 := b.NewValue0(v.Pos, OpAnd16, t)
7970				v1 := b.NewValue0(v.Pos, OpConst16, t)
7971				v1.AuxInt = int16ToAuxInt(1<<uint(k) - 1)
7972				v0.AddArg2(n, v1)
7973				v2 := b.NewValue0(v.Pos, OpConst16, t)
7974				v2.AuxInt = int16ToAuxInt(0)
7975				v.AddArg2(v0, v2)
7976				return true
7977			}
7978		}
7979		break
7980	}
7981	// match: (Eq16 s:(Sub16 x y) (Const16 [0]))
7982	// cond: s.Uses == 1
7983	// result: (Eq16 x y)
7984	for {
7985		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7986			s := v_0
7987			if s.Op != OpSub16 {
7988				continue
7989			}
7990			y := s.Args[1]
7991			x := s.Args[0]
7992			if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 || !(s.Uses == 1) {
7993				continue
7994			}
7995			v.reset(OpEq16)
7996			v.AddArg2(x, y)
7997			return true
7998		}
7999		break
8000	}
8001	// match: (Eq16 (And16 <t> x (Const16 <t> [y])) (Const16 <t> [y]))
8002	// cond: oneBit16(y)
8003	// result: (Neq16 (And16 <t> x (Const16 <t> [y])) (Const16 <t> [0]))
8004	for {
8005		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8006			if v_0.Op != OpAnd16 {
8007				continue
8008			}
8009			t := v_0.Type
8010			_ = v_0.Args[1]
8011			v_0_0 := v_0.Args[0]
8012			v_0_1 := v_0.Args[1]
8013			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
8014				x := v_0_0
8015				if v_0_1.Op != OpConst16 || v_0_1.Type != t {
8016					continue
8017				}
8018				y := auxIntToInt16(v_0_1.AuxInt)
8019				if v_1.Op != OpConst16 || v_1.Type != t || auxIntToInt16(v_1.AuxInt) != y || !(oneBit16(y)) {
8020					continue
8021				}
8022				v.reset(OpNeq16)
8023				v0 := b.NewValue0(v.Pos, OpAnd16, t)
8024				v1 := b.NewValue0(v.Pos, OpConst16, t)
8025				v1.AuxInt = int16ToAuxInt(y)
8026				v0.AddArg2(x, v1)
8027				v2 := b.NewValue0(v.Pos, OpConst16, t)
8028				v2.AuxInt = int16ToAuxInt(0)
8029				v.AddArg2(v0, v2)
8030				return true
8031			}
8032		}
8033		break
8034	}
8035	return false
8036}
8037func rewriteValuegeneric_OpEq32(v *Value) bool {
8038	v_1 := v.Args[1]
8039	v_0 := v.Args[0]
8040	b := v.Block
8041	typ := &b.Func.Config.Types
8042	// match: (Eq32 x x)
8043	// result: (ConstBool [true])
8044	for {
8045		x := v_0
8046		if x != v_1 {
8047			break
8048		}
8049		v.reset(OpConstBool)
8050		v.AuxInt = boolToAuxInt(true)
8051		return true
8052	}
8053	// match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
8054	// result: (Eq32 (Const32 <t> [c-d]) x)
8055	for {
8056		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8057			if v_0.Op != OpConst32 {
8058				continue
8059			}
8060			t := v_0.Type
8061			c := auxIntToInt32(v_0.AuxInt)
8062			if v_1.Op != OpAdd32 {
8063				continue
8064			}
8065			_ = v_1.Args[1]
8066			v_1_0 := v_1.Args[0]
8067			v_1_1 := v_1.Args[1]
8068			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8069				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
8070					continue
8071				}
8072				d := auxIntToInt32(v_1_0.AuxInt)
8073				x := v_1_1
8074				v.reset(OpEq32)
8075				v0 := b.NewValue0(v.Pos, OpConst32, t)
8076				v0.AuxInt = int32ToAuxInt(c - d)
8077				v.AddArg2(v0, x)
8078				return true
8079			}
8080		}
8081		break
8082	}
8083	// match: (Eq32 (Const32 [c]) (Const32 [d]))
8084	// result: (ConstBool [c == d])
8085	for {
8086		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8087			if v_0.Op != OpConst32 {
8088				continue
8089			}
8090			c := auxIntToInt32(v_0.AuxInt)
8091			if v_1.Op != OpConst32 {
8092				continue
8093			}
8094			d := auxIntToInt32(v_1.AuxInt)
8095			v.reset(OpConstBool)
8096			v.AuxInt = boolToAuxInt(c == d)
8097			return true
8098		}
8099		break
8100	}
8101	// match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s])) ) )
8102	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<31+umagic32(c).m/2) && s == umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)
8103	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
8104	for {
8105		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8106			x := v_0
8107			if v_1.Op != OpMul32 {
8108				continue
8109			}
8110			_ = v_1.Args[1]
8111			v_1_0 := v_1.Args[0]
8112			v_1_1 := v_1.Args[1]
8113			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8114				if v_1_0.Op != OpConst32 {
8115					continue
8116				}
8117				c := auxIntToInt32(v_1_0.AuxInt)
8118				if v_1_1.Op != OpRsh32Ux64 {
8119					continue
8120				}
8121				_ = v_1_1.Args[1]
8122				mul := v_1_1.Args[0]
8123				if mul.Op != OpHmul32u {
8124					continue
8125				}
8126				_ = mul.Args[1]
8127				mul_0 := mul.Args[0]
8128				mul_1 := mul.Args[1]
8129				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
8130					if mul_0.Op != OpConst32 {
8131						continue
8132					}
8133					m := auxIntToInt32(mul_0.AuxInt)
8134					if x != mul_1 {
8135						continue
8136					}
8137					v_1_1_1 := v_1_1.Args[1]
8138					if v_1_1_1.Op != OpConst64 {
8139						continue
8140					}
8141					s := auxIntToInt64(v_1_1_1.AuxInt)
8142					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<31+umagic32(c).m/2) && s == umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)) {
8143						continue
8144					}
8145					v.reset(OpLeq32U)
8146					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8147					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8148					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8149					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
8150					v1.AddArg2(v2, x)
8151					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8152					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
8153					v0.AddArg2(v1, v3)
8154					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8155					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
8156					v.AddArg2(v0, v4)
8157					return true
8158				}
8159			}
8160		}
8161		break
8162	}
8163	// match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s])) ) )
8164	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<31+(umagic32(c).m+1)/2) && s == umagic32(c).s-2 && x.Op != OpConst32 && udivisibleOK32(c)
8165	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
8166	for {
8167		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8168			x := v_0
8169			if v_1.Op != OpMul32 {
8170				continue
8171			}
8172			_ = v_1.Args[1]
8173			v_1_0 := v_1.Args[0]
8174			v_1_1 := v_1.Args[1]
8175			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8176				if v_1_0.Op != OpConst32 {
8177					continue
8178				}
8179				c := auxIntToInt32(v_1_0.AuxInt)
8180				if v_1_1.Op != OpRsh32Ux64 {
8181					continue
8182				}
8183				_ = v_1_1.Args[1]
8184				mul := v_1_1.Args[0]
8185				if mul.Op != OpHmul32u {
8186					continue
8187				}
8188				_ = mul.Args[1]
8189				mul_0 := mul.Args[0]
8190				mul_1 := mul.Args[1]
8191				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
8192					if mul_0.Op != OpConst32 || mul_0.Type != typ.UInt32 {
8193						continue
8194					}
8195					m := auxIntToInt32(mul_0.AuxInt)
8196					if mul_1.Op != OpRsh32Ux64 {
8197						continue
8198					}
8199					_ = mul_1.Args[1]
8200					if x != mul_1.Args[0] {
8201						continue
8202					}
8203					mul_1_1 := mul_1.Args[1]
8204					if mul_1_1.Op != OpConst64 || auxIntToInt64(mul_1_1.AuxInt) != 1 {
8205						continue
8206					}
8207					v_1_1_1 := v_1_1.Args[1]
8208					if v_1_1_1.Op != OpConst64 {
8209						continue
8210					}
8211					s := auxIntToInt64(v_1_1_1.AuxInt)
8212					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<31+(umagic32(c).m+1)/2) && s == umagic32(c).s-2 && x.Op != OpConst32 && udivisibleOK32(c)) {
8213						continue
8214					}
8215					v.reset(OpLeq32U)
8216					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8217					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8218					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8219					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
8220					v1.AddArg2(v2, x)
8221					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8222					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
8223					v0.AddArg2(v1, v3)
8224					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8225					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
8226					v.AddArg2(v0, v4)
8227					return true
8228				}
8229			}
8230		}
8231		break
8232	}
8233	// match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s])) ) )
8234	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(umagic32(c).m) && s == umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)
8235	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
8236	for {
8237		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8238			x := v_0
8239			if v_1.Op != OpMul32 {
8240				continue
8241			}
8242			_ = v_1.Args[1]
8243			v_1_0 := v_1.Args[0]
8244			v_1_1 := v_1.Args[1]
8245			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8246				if v_1_0.Op != OpConst32 {
8247					continue
8248				}
8249				c := auxIntToInt32(v_1_0.AuxInt)
8250				if v_1_1.Op != OpRsh32Ux64 {
8251					continue
8252				}
8253				_ = v_1_1.Args[1]
8254				v_1_1_0 := v_1_1.Args[0]
8255				if v_1_1_0.Op != OpAvg32u {
8256					continue
8257				}
8258				_ = v_1_1_0.Args[1]
8259				if x != v_1_1_0.Args[0] {
8260					continue
8261				}
8262				mul := v_1_1_0.Args[1]
8263				if mul.Op != OpHmul32u {
8264					continue
8265				}
8266				_ = mul.Args[1]
8267				mul_0 := mul.Args[0]
8268				mul_1 := mul.Args[1]
8269				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
8270					if mul_0.Op != OpConst32 {
8271						continue
8272					}
8273					m := auxIntToInt32(mul_0.AuxInt)
8274					if x != mul_1 {
8275						continue
8276					}
8277					v_1_1_1 := v_1_1.Args[1]
8278					if v_1_1_1.Op != OpConst64 {
8279						continue
8280					}
8281					s := auxIntToInt64(v_1_1_1.AuxInt)
8282					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(umagic32(c).m) && s == umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)) {
8283						continue
8284					}
8285					v.reset(OpLeq32U)
8286					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8287					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8288					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8289					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
8290					v1.AddArg2(v2, x)
8291					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8292					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
8293					v0.AddArg2(v1, v3)
8294					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8295					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
8296					v.AddArg2(v0, v4)
8297					return true
8298				}
8299			}
8300		}
8301		break
8302	}
8303	// match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s]))) ) )
8304	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic32(c).m/2) && s == 32+umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)
8305	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
8306	for {
8307		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8308			x := v_0
8309			if v_1.Op != OpMul32 {
8310				continue
8311			}
8312			_ = v_1.Args[1]
8313			v_1_0 := v_1.Args[0]
8314			v_1_1 := v_1.Args[1]
8315			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8316				if v_1_0.Op != OpConst32 {
8317					continue
8318				}
8319				c := auxIntToInt32(v_1_0.AuxInt)
8320				if v_1_1.Op != OpTrunc64to32 {
8321					continue
8322				}
8323				v_1_1_0 := v_1_1.Args[0]
8324				if v_1_1_0.Op != OpRsh64Ux64 {
8325					continue
8326				}
8327				_ = v_1_1_0.Args[1]
8328				mul := v_1_1_0.Args[0]
8329				if mul.Op != OpMul64 {
8330					continue
8331				}
8332				_ = mul.Args[1]
8333				mul_0 := mul.Args[0]
8334				mul_1 := mul.Args[1]
8335				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
8336					if mul_0.Op != OpConst64 {
8337						continue
8338					}
8339					m := auxIntToInt64(mul_0.AuxInt)
8340					if mul_1.Op != OpZeroExt32to64 || x != mul_1.Args[0] {
8341						continue
8342					}
8343					v_1_1_0_1 := v_1_1_0.Args[1]
8344					if v_1_1_0_1.Op != OpConst64 {
8345						continue
8346					}
8347					s := auxIntToInt64(v_1_1_0_1.AuxInt)
8348					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic32(c).m/2) && s == 32+umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)) {
8349						continue
8350					}
8351					v.reset(OpLeq32U)
8352					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8353					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8354					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8355					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
8356					v1.AddArg2(v2, x)
8357					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8358					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
8359					v0.AddArg2(v1, v3)
8360					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8361					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
8362					v.AddArg2(v0, v4)
8363					return true
8364				}
8365			}
8366		}
8367		break
8368	}
8369	// match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s]))) ) )
8370	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic32(c).m+1)/2) && s == 32+umagic32(c).s-2 && x.Op != OpConst32 && udivisibleOK32(c)
8371	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
8372	for {
8373		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8374			x := v_0
8375			if v_1.Op != OpMul32 {
8376				continue
8377			}
8378			_ = v_1.Args[1]
8379			v_1_0 := v_1.Args[0]
8380			v_1_1 := v_1.Args[1]
8381			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8382				if v_1_0.Op != OpConst32 {
8383					continue
8384				}
8385				c := auxIntToInt32(v_1_0.AuxInt)
8386				if v_1_1.Op != OpTrunc64to32 {
8387					continue
8388				}
8389				v_1_1_0 := v_1_1.Args[0]
8390				if v_1_1_0.Op != OpRsh64Ux64 {
8391					continue
8392				}
8393				_ = v_1_1_0.Args[1]
8394				mul := v_1_1_0.Args[0]
8395				if mul.Op != OpMul64 {
8396					continue
8397				}
8398				_ = mul.Args[1]
8399				mul_0 := mul.Args[0]
8400				mul_1 := mul.Args[1]
8401				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
8402					if mul_0.Op != OpConst64 {
8403						continue
8404					}
8405					m := auxIntToInt64(mul_0.AuxInt)
8406					if mul_1.Op != OpRsh64Ux64 {
8407						continue
8408					}
8409					_ = mul_1.Args[1]
8410					mul_1_0 := mul_1.Args[0]
8411					if mul_1_0.Op != OpZeroExt32to64 || x != mul_1_0.Args[0] {
8412						continue
8413					}
8414					mul_1_1 := mul_1.Args[1]
8415					if mul_1_1.Op != OpConst64 || auxIntToInt64(mul_1_1.AuxInt) != 1 {
8416						continue
8417					}
8418					v_1_1_0_1 := v_1_1_0.Args[1]
8419					if v_1_1_0_1.Op != OpConst64 {
8420						continue
8421					}
8422					s := auxIntToInt64(v_1_1_0_1.AuxInt)
8423					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic32(c).m+1)/2) && s == 32+umagic32(c).s-2 && x.Op != OpConst32 && udivisibleOK32(c)) {
8424						continue
8425					}
8426					v.reset(OpLeq32U)
8427					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8428					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8429					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8430					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
8431					v1.AddArg2(v2, x)
8432					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8433					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
8434					v0.AddArg2(v1, v3)
8435					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8436					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
8437					v.AddArg2(v0, v4)
8438					return true
8439				}
8440			}
8441		}
8442		break
8443	}
8444	// match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s]))) ) )
8445	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic32(c).m) && s == 32+umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)
8446	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
8447	for {
8448		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8449			x := v_0
8450			if v_1.Op != OpMul32 {
8451				continue
8452			}
8453			_ = v_1.Args[1]
8454			v_1_0 := v_1.Args[0]
8455			v_1_1 := v_1.Args[1]
8456			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8457				if v_1_0.Op != OpConst32 {
8458					continue
8459				}
8460				c := auxIntToInt32(v_1_0.AuxInt)
8461				if v_1_1.Op != OpTrunc64to32 {
8462					continue
8463				}
8464				v_1_1_0 := v_1_1.Args[0]
8465				if v_1_1_0.Op != OpRsh64Ux64 {
8466					continue
8467				}
8468				_ = v_1_1_0.Args[1]
8469				v_1_1_0_0 := v_1_1_0.Args[0]
8470				if v_1_1_0_0.Op != OpAvg64u {
8471					continue
8472				}
8473				_ = v_1_1_0_0.Args[1]
8474				v_1_1_0_0_0 := v_1_1_0_0.Args[0]
8475				if v_1_1_0_0_0.Op != OpLsh64x64 {
8476					continue
8477				}
8478				_ = v_1_1_0_0_0.Args[1]
8479				v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
8480				if v_1_1_0_0_0_0.Op != OpZeroExt32to64 || x != v_1_1_0_0_0_0.Args[0] {
8481					continue
8482				}
8483				v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
8484				if v_1_1_0_0_0_1.Op != OpConst64 || auxIntToInt64(v_1_1_0_0_0_1.AuxInt) != 32 {
8485					continue
8486				}
8487				mul := v_1_1_0_0.Args[1]
8488				if mul.Op != OpMul64 {
8489					continue
8490				}
8491				_ = mul.Args[1]
8492				mul_0 := mul.Args[0]
8493				mul_1 := mul.Args[1]
8494				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
8495					if mul_0.Op != OpConst64 {
8496						continue
8497					}
8498					m := auxIntToInt64(mul_0.AuxInt)
8499					if mul_1.Op != OpZeroExt32to64 || x != mul_1.Args[0] {
8500						continue
8501					}
8502					v_1_1_0_1 := v_1_1_0.Args[1]
8503					if v_1_1_0_1.Op != OpConst64 {
8504						continue
8505					}
8506					s := auxIntToInt64(v_1_1_0_1.AuxInt)
8507					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic32(c).m) && s == 32+umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)) {
8508						continue
8509					}
8510					v.reset(OpLeq32U)
8511					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8512					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8513					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8514					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
8515					v1.AddArg2(v2, x)
8516					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8517					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
8518					v0.AddArg2(v1, v3)
8519					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8520					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
8521					v.AddArg2(v0, v4)
8522					return true
8523				}
8524			}
8525		}
8526		break
8527	}
8528	// match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh64x64 mul:(Mul64 (Const64 [m]) (SignExt32to64 x)) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63]))) ) )
8529	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic32(c).m) && s == 32+smagic32(c).s && x.Op != OpConst32 && sdivisibleOK32(c)
8530	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(sdivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(sdivisible32(c).a)]) ) (Const32 <typ.UInt32> [int32(32-sdivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(sdivisible32(c).max)]) )
8531	for {
8532		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8533			x := v_0
8534			if v_1.Op != OpMul32 {
8535				continue
8536			}
8537			_ = v_1.Args[1]
8538			v_1_0 := v_1.Args[0]
8539			v_1_1 := v_1.Args[1]
8540			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8541				if v_1_0.Op != OpConst32 {
8542					continue
8543				}
8544				c := auxIntToInt32(v_1_0.AuxInt)
8545				if v_1_1.Op != OpSub32 {
8546					continue
8547				}
8548				_ = v_1_1.Args[1]
8549				v_1_1_0 := v_1_1.Args[0]
8550				if v_1_1_0.Op != OpRsh64x64 {
8551					continue
8552				}
8553				_ = v_1_1_0.Args[1]
8554				mul := v_1_1_0.Args[0]
8555				if mul.Op != OpMul64 {
8556					continue
8557				}
8558				_ = mul.Args[1]
8559				mul_0 := mul.Args[0]
8560				mul_1 := mul.Args[1]
8561				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
8562					if mul_0.Op != OpConst64 {
8563						continue
8564					}
8565					m := auxIntToInt64(mul_0.AuxInt)
8566					if mul_1.Op != OpSignExt32to64 || x != mul_1.Args[0] {
8567						continue
8568					}
8569					v_1_1_0_1 := v_1_1_0.Args[1]
8570					if v_1_1_0_1.Op != OpConst64 {
8571						continue
8572					}
8573					s := auxIntToInt64(v_1_1_0_1.AuxInt)
8574					v_1_1_1 := v_1_1.Args[1]
8575					if v_1_1_1.Op != OpRsh64x64 {
8576						continue
8577					}
8578					_ = v_1_1_1.Args[1]
8579					v_1_1_1_0 := v_1_1_1.Args[0]
8580					if v_1_1_1_0.Op != OpSignExt32to64 || x != v_1_1_1_0.Args[0] {
8581						continue
8582					}
8583					v_1_1_1_1 := v_1_1_1.Args[1]
8584					if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 63 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic32(c).m) && s == 32+smagic32(c).s && x.Op != OpConst32 && sdivisibleOK32(c)) {
8585						continue
8586					}
8587					v.reset(OpLeq32U)
8588					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8589					v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
8590					v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8591					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8592					v3.AuxInt = int32ToAuxInt(int32(sdivisible32(c).m))
8593					v2.AddArg2(v3, x)
8594					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8595					v4.AuxInt = int32ToAuxInt(int32(sdivisible32(c).a))
8596					v1.AddArg2(v2, v4)
8597					v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8598					v5.AuxInt = int32ToAuxInt(int32(32 - sdivisible32(c).k))
8599					v0.AddArg2(v1, v5)
8600					v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8601					v6.AuxInt = int32ToAuxInt(int32(sdivisible32(c).max))
8602					v.AddArg2(v0, v6)
8603					return true
8604				}
8605			}
8606		}
8607		break
8608	}
8609	// match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 mul:(Hmul32 (Const32 [m]) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) ) )
8610	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic32(c).m/2) && s == smagic32(c).s-1 && x.Op != OpConst32 && sdivisibleOK32(c)
8611	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(sdivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(sdivisible32(c).a)]) ) (Const32 <typ.UInt32> [int32(32-sdivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(sdivisible32(c).max)]) )
8612	for {
8613		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8614			x := v_0
8615			if v_1.Op != OpMul32 {
8616				continue
8617			}
8618			_ = v_1.Args[1]
8619			v_1_0 := v_1.Args[0]
8620			v_1_1 := v_1.Args[1]
8621			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8622				if v_1_0.Op != OpConst32 {
8623					continue
8624				}
8625				c := auxIntToInt32(v_1_0.AuxInt)
8626				if v_1_1.Op != OpSub32 {
8627					continue
8628				}
8629				_ = v_1_1.Args[1]
8630				v_1_1_0 := v_1_1.Args[0]
8631				if v_1_1_0.Op != OpRsh32x64 {
8632					continue
8633				}
8634				_ = v_1_1_0.Args[1]
8635				mul := v_1_1_0.Args[0]
8636				if mul.Op != OpHmul32 {
8637					continue
8638				}
8639				_ = mul.Args[1]
8640				mul_0 := mul.Args[0]
8641				mul_1 := mul.Args[1]
8642				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
8643					if mul_0.Op != OpConst32 {
8644						continue
8645					}
8646					m := auxIntToInt32(mul_0.AuxInt)
8647					if x != mul_1 {
8648						continue
8649					}
8650					v_1_1_0_1 := v_1_1_0.Args[1]
8651					if v_1_1_0_1.Op != OpConst64 {
8652						continue
8653					}
8654					s := auxIntToInt64(v_1_1_0_1.AuxInt)
8655					v_1_1_1 := v_1_1.Args[1]
8656					if v_1_1_1.Op != OpRsh32x64 {
8657						continue
8658					}
8659					_ = v_1_1_1.Args[1]
8660					if x != v_1_1_1.Args[0] {
8661						continue
8662					}
8663					v_1_1_1_1 := v_1_1_1.Args[1]
8664					if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 31 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic32(c).m/2) && s == smagic32(c).s-1 && x.Op != OpConst32 && sdivisibleOK32(c)) {
8665						continue
8666					}
8667					v.reset(OpLeq32U)
8668					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8669					v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
8670					v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8671					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8672					v3.AuxInt = int32ToAuxInt(int32(sdivisible32(c).m))
8673					v2.AddArg2(v3, x)
8674					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8675					v4.AuxInt = int32ToAuxInt(int32(sdivisible32(c).a))
8676					v1.AddArg2(v2, v4)
8677					v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8678					v5.AuxInt = int32ToAuxInt(int32(32 - sdivisible32(c).k))
8679					v0.AddArg2(v1, v5)
8680					v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8681					v6.AuxInt = int32ToAuxInt(int32(sdivisible32(c).max))
8682					v.AddArg2(v0, v6)
8683					return true
8684				}
8685			}
8686		}
8687		break
8688	}
8689	// match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 (Const32 [m]) x) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) ) )
8690	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic32(c).m) && s == smagic32(c).s && x.Op != OpConst32 && sdivisibleOK32(c)
8691	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(sdivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(sdivisible32(c).a)]) ) (Const32 <typ.UInt32> [int32(32-sdivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(sdivisible32(c).max)]) )
8692	for {
8693		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8694			x := v_0
8695			if v_1.Op != OpMul32 {
8696				continue
8697			}
8698			_ = v_1.Args[1]
8699			v_1_0 := v_1.Args[0]
8700			v_1_1 := v_1.Args[1]
8701			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8702				if v_1_0.Op != OpConst32 {
8703					continue
8704				}
8705				c := auxIntToInt32(v_1_0.AuxInt)
8706				if v_1_1.Op != OpSub32 {
8707					continue
8708				}
8709				_ = v_1_1.Args[1]
8710				v_1_1_0 := v_1_1.Args[0]
8711				if v_1_1_0.Op != OpRsh32x64 {
8712					continue
8713				}
8714				_ = v_1_1_0.Args[1]
8715				v_1_1_0_0 := v_1_1_0.Args[0]
8716				if v_1_1_0_0.Op != OpAdd32 {
8717					continue
8718				}
8719				_ = v_1_1_0_0.Args[1]
8720				v_1_1_0_0_0 := v_1_1_0_0.Args[0]
8721				v_1_1_0_0_1 := v_1_1_0_0.Args[1]
8722				for _i2 := 0; _i2 <= 1; _i2, v_1_1_0_0_0, v_1_1_0_0_1 = _i2+1, v_1_1_0_0_1, v_1_1_0_0_0 {
8723					mul := v_1_1_0_0_0
8724					if mul.Op != OpHmul32 {
8725						continue
8726					}
8727					_ = mul.Args[1]
8728					mul_0 := mul.Args[0]
8729					mul_1 := mul.Args[1]
8730					for _i3 := 0; _i3 <= 1; _i3, mul_0, mul_1 = _i3+1, mul_1, mul_0 {
8731						if mul_0.Op != OpConst32 {
8732							continue
8733						}
8734						m := auxIntToInt32(mul_0.AuxInt)
8735						if x != mul_1 || x != v_1_1_0_0_1 {
8736							continue
8737						}
8738						v_1_1_0_1 := v_1_1_0.Args[1]
8739						if v_1_1_0_1.Op != OpConst64 {
8740							continue
8741						}
8742						s := auxIntToInt64(v_1_1_0_1.AuxInt)
8743						v_1_1_1 := v_1_1.Args[1]
8744						if v_1_1_1.Op != OpRsh32x64 {
8745							continue
8746						}
8747						_ = v_1_1_1.Args[1]
8748						if x != v_1_1_1.Args[0] {
8749							continue
8750						}
8751						v_1_1_1_1 := v_1_1_1.Args[1]
8752						if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 31 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic32(c).m) && s == smagic32(c).s && x.Op != OpConst32 && sdivisibleOK32(c)) {
8753							continue
8754						}
8755						v.reset(OpLeq32U)
8756						v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8757						v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
8758						v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8759						v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8760						v3.AuxInt = int32ToAuxInt(int32(sdivisible32(c).m))
8761						v2.AddArg2(v3, x)
8762						v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8763						v4.AuxInt = int32ToAuxInt(int32(sdivisible32(c).a))
8764						v1.AddArg2(v2, v4)
8765						v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8766						v5.AuxInt = int32ToAuxInt(int32(32 - sdivisible32(c).k))
8767						v0.AddArg2(v1, v5)
8768						v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8769						v6.AuxInt = int32ToAuxInt(int32(sdivisible32(c).max))
8770						v.AddArg2(v0, v6)
8771						return true
8772					}
8773				}
8774			}
8775		}
8776		break
8777	}
8778	// match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
8779	// cond: k > 0 && k < 31 && kbar == 32 - k
8780	// result: (Eq32 (And32 <t> n (Const32 <t> [1<<uint(k)-1])) (Const32 <t> [0]))
8781	for {
8782		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8783			n := v_0
8784			if v_1.Op != OpLsh32x64 {
8785				continue
8786			}
8787			_ = v_1.Args[1]
8788			v_1_0 := v_1.Args[0]
8789			if v_1_0.Op != OpRsh32x64 {
8790				continue
8791			}
8792			_ = v_1_0.Args[1]
8793			v_1_0_0 := v_1_0.Args[0]
8794			if v_1_0_0.Op != OpAdd32 {
8795				continue
8796			}
8797			t := v_1_0_0.Type
8798			_ = v_1_0_0.Args[1]
8799			v_1_0_0_0 := v_1_0_0.Args[0]
8800			v_1_0_0_1 := v_1_0_0.Args[1]
8801			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
8802				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh32Ux64 || v_1_0_0_1.Type != t {
8803					continue
8804				}
8805				_ = v_1_0_0_1.Args[1]
8806				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
8807				if v_1_0_0_1_0.Op != OpRsh32x64 || v_1_0_0_1_0.Type != t {
8808					continue
8809				}
8810				_ = v_1_0_0_1_0.Args[1]
8811				if n != v_1_0_0_1_0.Args[0] {
8812					continue
8813				}
8814				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
8815				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 31 {
8816					continue
8817				}
8818				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
8819				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
8820					continue
8821				}
8822				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
8823				v_1_0_1 := v_1_0.Args[1]
8824				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
8825					continue
8826				}
8827				k := auxIntToInt64(v_1_0_1.AuxInt)
8828				v_1_1 := v_1.Args[1]
8829				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 31 && kbar == 32-k) {
8830					continue
8831				}
8832				v.reset(OpEq32)
8833				v0 := b.NewValue0(v.Pos, OpAnd32, t)
8834				v1 := b.NewValue0(v.Pos, OpConst32, t)
8835				v1.AuxInt = int32ToAuxInt(1<<uint(k) - 1)
8836				v0.AddArg2(n, v1)
8837				v2 := b.NewValue0(v.Pos, OpConst32, t)
8838				v2.AuxInt = int32ToAuxInt(0)
8839				v.AddArg2(v0, v2)
8840				return true
8841			}
8842		}
8843		break
8844	}
8845	// match: (Eq32 s:(Sub32 x y) (Const32 [0]))
8846	// cond: s.Uses == 1
8847	// result: (Eq32 x y)
8848	for {
8849		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8850			s := v_0
8851			if s.Op != OpSub32 {
8852				continue
8853			}
8854			y := s.Args[1]
8855			x := s.Args[0]
8856			if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != 0 || !(s.Uses == 1) {
8857				continue
8858			}
8859			v.reset(OpEq32)
8860			v.AddArg2(x, y)
8861			return true
8862		}
8863		break
8864	}
8865	// match: (Eq32 (And32 <t> x (Const32 <t> [y])) (Const32 <t> [y]))
8866	// cond: oneBit32(y)
8867	// result: (Neq32 (And32 <t> x (Const32 <t> [y])) (Const32 <t> [0]))
8868	for {
8869		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8870			if v_0.Op != OpAnd32 {
8871				continue
8872			}
8873			t := v_0.Type
8874			_ = v_0.Args[1]
8875			v_0_0 := v_0.Args[0]
8876			v_0_1 := v_0.Args[1]
8877			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
8878				x := v_0_0
8879				if v_0_1.Op != OpConst32 || v_0_1.Type != t {
8880					continue
8881				}
8882				y := auxIntToInt32(v_0_1.AuxInt)
8883				if v_1.Op != OpConst32 || v_1.Type != t || auxIntToInt32(v_1.AuxInt) != y || !(oneBit32(y)) {
8884					continue
8885				}
8886				v.reset(OpNeq32)
8887				v0 := b.NewValue0(v.Pos, OpAnd32, t)
8888				v1 := b.NewValue0(v.Pos, OpConst32, t)
8889				v1.AuxInt = int32ToAuxInt(y)
8890				v0.AddArg2(x, v1)
8891				v2 := b.NewValue0(v.Pos, OpConst32, t)
8892				v2.AuxInt = int32ToAuxInt(0)
8893				v.AddArg2(v0, v2)
8894				return true
8895			}
8896		}
8897		break
8898	}
8899	return false
8900}
8901func rewriteValuegeneric_OpEq32F(v *Value) bool {
8902	v_1 := v.Args[1]
8903	v_0 := v.Args[0]
8904	// match: (Eq32F (Const32F [c]) (Const32F [d]))
8905	// result: (ConstBool [c == d])
8906	for {
8907		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8908			if v_0.Op != OpConst32F {
8909				continue
8910			}
8911			c := auxIntToFloat32(v_0.AuxInt)
8912			if v_1.Op != OpConst32F {
8913				continue
8914			}
8915			d := auxIntToFloat32(v_1.AuxInt)
8916			v.reset(OpConstBool)
8917			v.AuxInt = boolToAuxInt(c == d)
8918			return true
8919		}
8920		break
8921	}
8922	return false
8923}
8924func rewriteValuegeneric_OpEq64(v *Value) bool {
8925	v_1 := v.Args[1]
8926	v_0 := v.Args[0]
8927	b := v.Block
8928	typ := &b.Func.Config.Types
8929	// match: (Eq64 x x)
8930	// result: (ConstBool [true])
8931	for {
8932		x := v_0
8933		if x != v_1 {
8934			break
8935		}
8936		v.reset(OpConstBool)
8937		v.AuxInt = boolToAuxInt(true)
8938		return true
8939	}
8940	// match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
8941	// result: (Eq64 (Const64 <t> [c-d]) x)
8942	for {
8943		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8944			if v_0.Op != OpConst64 {
8945				continue
8946			}
8947			t := v_0.Type
8948			c := auxIntToInt64(v_0.AuxInt)
8949			if v_1.Op != OpAdd64 {
8950				continue
8951			}
8952			_ = v_1.Args[1]
8953			v_1_0 := v_1.Args[0]
8954			v_1_1 := v_1.Args[1]
8955			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8956				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
8957					continue
8958				}
8959				d := auxIntToInt64(v_1_0.AuxInt)
8960				x := v_1_1
8961				v.reset(OpEq64)
8962				v0 := b.NewValue0(v.Pos, OpConst64, t)
8963				v0.AuxInt = int64ToAuxInt(c - d)
8964				v.AddArg2(v0, x)
8965				return true
8966			}
8967		}
8968		break
8969	}
8970	// match: (Eq64 (Const64 [c]) (Const64 [d]))
8971	// result: (ConstBool [c == d])
8972	for {
8973		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8974			if v_0.Op != OpConst64 {
8975				continue
8976			}
8977			c := auxIntToInt64(v_0.AuxInt)
8978			if v_1.Op != OpConst64 {
8979				continue
8980			}
8981			d := auxIntToInt64(v_1.AuxInt)
8982			v.reset(OpConstBool)
8983			v.AuxInt = boolToAuxInt(c == d)
8984			return true
8985		}
8986		break
8987	}
8988	// match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s])) ) )
8989	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic64(c).m/2) && s == umagic64(c).s-1 && x.Op != OpConst64 && udivisibleOK64(c)
8990	// result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible64(c).m)]) x) (Const64 <typ.UInt64> [64-udivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(udivisible64(c).max)]) )
8991	for {
8992		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8993			x := v_0
8994			if v_1.Op != OpMul64 {
8995				continue
8996			}
8997			_ = v_1.Args[1]
8998			v_1_0 := v_1.Args[0]
8999			v_1_1 := v_1.Args[1]
9000			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
9001				if v_1_0.Op != OpConst64 {
9002					continue
9003				}
9004				c := auxIntToInt64(v_1_0.AuxInt)
9005				if v_1_1.Op != OpRsh64Ux64 {
9006					continue
9007				}
9008				_ = v_1_1.Args[1]
9009				mul := v_1_1.Args[0]
9010				if mul.Op != OpHmul64u {
9011					continue
9012				}
9013				_ = mul.Args[1]
9014				mul_0 := mul.Args[0]
9015				mul_1 := mul.Args[1]
9016				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
9017					if mul_0.Op != OpConst64 {
9018						continue
9019					}
9020					m := auxIntToInt64(mul_0.AuxInt)
9021					if x != mul_1 {
9022						continue
9023					}
9024					v_1_1_1 := v_1_1.Args[1]
9025					if v_1_1_1.Op != OpConst64 {
9026						continue
9027					}
9028					s := auxIntToInt64(v_1_1_1.AuxInt)
9029					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic64(c).m/2) && s == umagic64(c).s-1 && x.Op != OpConst64 && udivisibleOK64(c)) {
9030						continue
9031					}
9032					v.reset(OpLeq64U)
9033					v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
9034					v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
9035					v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9036					v2.AuxInt = int64ToAuxInt(int64(udivisible64(c).m))
9037					v1.AddArg2(v2, x)
9038					v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9039					v3.AuxInt = int64ToAuxInt(64 - udivisible64(c).k)
9040					v0.AddArg2(v1, v3)
9041					v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9042					v4.AuxInt = int64ToAuxInt(int64(udivisible64(c).max))
9043					v.AddArg2(v0, v4)
9044					return true
9045				}
9046			}
9047		}
9048		break
9049	}
9050	// match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s])) ) )
9051	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic64(c).m+1)/2) && s == umagic64(c).s-2 && x.Op != OpConst64 && udivisibleOK64(c)
9052	// result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible64(c).m)]) x) (Const64 <typ.UInt64> [64-udivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(udivisible64(c).max)]) )
9053	for {
9054		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9055			x := v_0
9056			if v_1.Op != OpMul64 {
9057				continue
9058			}
9059			_ = v_1.Args[1]
9060			v_1_0 := v_1.Args[0]
9061			v_1_1 := v_1.Args[1]
9062			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
9063				if v_1_0.Op != OpConst64 {
9064					continue
9065				}
9066				c := auxIntToInt64(v_1_0.AuxInt)
9067				if v_1_1.Op != OpRsh64Ux64 {
9068					continue
9069				}
9070				_ = v_1_1.Args[1]
9071				mul := v_1_1.Args[0]
9072				if mul.Op != OpHmul64u {
9073					continue
9074				}
9075				_ = mul.Args[1]
9076				mul_0 := mul.Args[0]
9077				mul_1 := mul.Args[1]
9078				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
9079					if mul_0.Op != OpConst64 {
9080						continue
9081					}
9082					m := auxIntToInt64(mul_0.AuxInt)
9083					if mul_1.Op != OpRsh64Ux64 {
9084						continue
9085					}
9086					_ = mul_1.Args[1]
9087					if x != mul_1.Args[0] {
9088						continue
9089					}
9090					mul_1_1 := mul_1.Args[1]
9091					if mul_1_1.Op != OpConst64 || auxIntToInt64(mul_1_1.AuxInt) != 1 {
9092						continue
9093					}
9094					v_1_1_1 := v_1_1.Args[1]
9095					if v_1_1_1.Op != OpConst64 {
9096						continue
9097					}
9098					s := auxIntToInt64(v_1_1_1.AuxInt)
9099					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic64(c).m+1)/2) && s == umagic64(c).s-2 && x.Op != OpConst64 && udivisibleOK64(c)) {
9100						continue
9101					}
9102					v.reset(OpLeq64U)
9103					v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
9104					v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
9105					v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9106					v2.AuxInt = int64ToAuxInt(int64(udivisible64(c).m))
9107					v1.AddArg2(v2, x)
9108					v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9109					v3.AuxInt = int64ToAuxInt(64 - udivisible64(c).k)
9110					v0.AddArg2(v1, v3)
9111					v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9112					v4.AuxInt = int64ToAuxInt(int64(udivisible64(c).max))
9113					v.AddArg2(v0, v4)
9114					return true
9115				}
9116			}
9117		}
9118		break
9119	}
9120	// match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s])) ) )
9121	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic64(c).m) && s == umagic64(c).s-1 && x.Op != OpConst64 && udivisibleOK64(c)
9122	// result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible64(c).m)]) x) (Const64 <typ.UInt64> [64-udivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(udivisible64(c).max)]) )
9123	for {
9124		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9125			x := v_0
9126			if v_1.Op != OpMul64 {
9127				continue
9128			}
9129			_ = v_1.Args[1]
9130			v_1_0 := v_1.Args[0]
9131			v_1_1 := v_1.Args[1]
9132			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
9133				if v_1_0.Op != OpConst64 {
9134					continue
9135				}
9136				c := auxIntToInt64(v_1_0.AuxInt)
9137				if v_1_1.Op != OpRsh64Ux64 {
9138					continue
9139				}
9140				_ = v_1_1.Args[1]
9141				v_1_1_0 := v_1_1.Args[0]
9142				if v_1_1_0.Op != OpAvg64u {
9143					continue
9144				}
9145				_ = v_1_1_0.Args[1]
9146				if x != v_1_1_0.Args[0] {
9147					continue
9148				}
9149				mul := v_1_1_0.Args[1]
9150				if mul.Op != OpHmul64u {
9151					continue
9152				}
9153				_ = mul.Args[1]
9154				mul_0 := mul.Args[0]
9155				mul_1 := mul.Args[1]
9156				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
9157					if mul_0.Op != OpConst64 {
9158						continue
9159					}
9160					m := auxIntToInt64(mul_0.AuxInt)
9161					if x != mul_1 {
9162						continue
9163					}
9164					v_1_1_1 := v_1_1.Args[1]
9165					if v_1_1_1.Op != OpConst64 {
9166						continue
9167					}
9168					s := auxIntToInt64(v_1_1_1.AuxInt)
9169					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic64(c).m) && s == umagic64(c).s-1 && x.Op != OpConst64 && udivisibleOK64(c)) {
9170						continue
9171					}
9172					v.reset(OpLeq64U)
9173					v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
9174					v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
9175					v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9176					v2.AuxInt = int64ToAuxInt(int64(udivisible64(c).m))
9177					v1.AddArg2(v2, x)
9178					v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9179					v3.AuxInt = int64ToAuxInt(64 - udivisible64(c).k)
9180					v0.AddArg2(v1, v3)
9181					v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9182					v4.AuxInt = int64ToAuxInt(int64(udivisible64(c).max))
9183					v.AddArg2(v0, v4)
9184					return true
9185				}
9186			}
9187		}
9188		break
9189	}
9190	// match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 mul:(Hmul64 (Const64 [m]) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) ) )
9191	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic64(c).m/2) && s == smagic64(c).s-1 && x.Op != OpConst64 && sdivisibleOK64(c)
9192	// result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible64(c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible64(c).a)]) ) (Const64 <typ.UInt64> [64-sdivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(sdivisible64(c).max)]) )
9193	for {
9194		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9195			x := v_0
9196			if v_1.Op != OpMul64 {
9197				continue
9198			}
9199			_ = v_1.Args[1]
9200			v_1_0 := v_1.Args[0]
9201			v_1_1 := v_1.Args[1]
9202			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
9203				if v_1_0.Op != OpConst64 {
9204					continue
9205				}
9206				c := auxIntToInt64(v_1_0.AuxInt)
9207				if v_1_1.Op != OpSub64 {
9208					continue
9209				}
9210				_ = v_1_1.Args[1]
9211				v_1_1_0 := v_1_1.Args[0]
9212				if v_1_1_0.Op != OpRsh64x64 {
9213					continue
9214				}
9215				_ = v_1_1_0.Args[1]
9216				mul := v_1_1_0.Args[0]
9217				if mul.Op != OpHmul64 {
9218					continue
9219				}
9220				_ = mul.Args[1]
9221				mul_0 := mul.Args[0]
9222				mul_1 := mul.Args[1]
9223				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
9224					if mul_0.Op != OpConst64 {
9225						continue
9226					}
9227					m := auxIntToInt64(mul_0.AuxInt)
9228					if x != mul_1 {
9229						continue
9230					}
9231					v_1_1_0_1 := v_1_1_0.Args[1]
9232					if v_1_1_0_1.Op != OpConst64 {
9233						continue
9234					}
9235					s := auxIntToInt64(v_1_1_0_1.AuxInt)
9236					v_1_1_1 := v_1_1.Args[1]
9237					if v_1_1_1.Op != OpRsh64x64 {
9238						continue
9239					}
9240					_ = v_1_1_1.Args[1]
9241					if x != v_1_1_1.Args[0] {
9242						continue
9243					}
9244					v_1_1_1_1 := v_1_1_1.Args[1]
9245					if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 63 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic64(c).m/2) && s == smagic64(c).s-1 && x.Op != OpConst64 && sdivisibleOK64(c)) {
9246						continue
9247					}
9248					v.reset(OpLeq64U)
9249					v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
9250					v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
9251					v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
9252					v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9253					v3.AuxInt = int64ToAuxInt(int64(sdivisible64(c).m))
9254					v2.AddArg2(v3, x)
9255					v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9256					v4.AuxInt = int64ToAuxInt(int64(sdivisible64(c).a))
9257					v1.AddArg2(v2, v4)
9258					v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9259					v5.AuxInt = int64ToAuxInt(64 - sdivisible64(c).k)
9260					v0.AddArg2(v1, v5)
9261					v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9262					v6.AuxInt = int64ToAuxInt(int64(sdivisible64(c).max))
9263					v.AddArg2(v0, v6)
9264					return true
9265				}
9266			}
9267		}
9268		break
9269	}
9270	// match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 (Const64 [m]) x) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) ) )
9271	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic64(c).m) && s == smagic64(c).s && x.Op != OpConst64 && sdivisibleOK64(c)
9272	// result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible64(c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible64(c).a)]) ) (Const64 <typ.UInt64> [64-sdivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(sdivisible64(c).max)]) )
9273	for {
9274		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9275			x := v_0
9276			if v_1.Op != OpMul64 {
9277				continue
9278			}
9279			_ = v_1.Args[1]
9280			v_1_0 := v_1.Args[0]
9281			v_1_1 := v_1.Args[1]
9282			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
9283				if v_1_0.Op != OpConst64 {
9284					continue
9285				}
9286				c := auxIntToInt64(v_1_0.AuxInt)
9287				if v_1_1.Op != OpSub64 {
9288					continue
9289				}
9290				_ = v_1_1.Args[1]
9291				v_1_1_0 := v_1_1.Args[0]
9292				if v_1_1_0.Op != OpRsh64x64 {
9293					continue
9294				}
9295				_ = v_1_1_0.Args[1]
9296				v_1_1_0_0 := v_1_1_0.Args[0]
9297				if v_1_1_0_0.Op != OpAdd64 {
9298					continue
9299				}
9300				_ = v_1_1_0_0.Args[1]
9301				v_1_1_0_0_0 := v_1_1_0_0.Args[0]
9302				v_1_1_0_0_1 := v_1_1_0_0.Args[1]
9303				for _i2 := 0; _i2 <= 1; _i2, v_1_1_0_0_0, v_1_1_0_0_1 = _i2+1, v_1_1_0_0_1, v_1_1_0_0_0 {
9304					mul := v_1_1_0_0_0
9305					if mul.Op != OpHmul64 {
9306						continue
9307					}
9308					_ = mul.Args[1]
9309					mul_0 := mul.Args[0]
9310					mul_1 := mul.Args[1]
9311					for _i3 := 0; _i3 <= 1; _i3, mul_0, mul_1 = _i3+1, mul_1, mul_0 {
9312						if mul_0.Op != OpConst64 {
9313							continue
9314						}
9315						m := auxIntToInt64(mul_0.AuxInt)
9316						if x != mul_1 || x != v_1_1_0_0_1 {
9317							continue
9318						}
9319						v_1_1_0_1 := v_1_1_0.Args[1]
9320						if v_1_1_0_1.Op != OpConst64 {
9321							continue
9322						}
9323						s := auxIntToInt64(v_1_1_0_1.AuxInt)
9324						v_1_1_1 := v_1_1.Args[1]
9325						if v_1_1_1.Op != OpRsh64x64 {
9326							continue
9327						}
9328						_ = v_1_1_1.Args[1]
9329						if x != v_1_1_1.Args[0] {
9330							continue
9331						}
9332						v_1_1_1_1 := v_1_1_1.Args[1]
9333						if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 63 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic64(c).m) && s == smagic64(c).s && x.Op != OpConst64 && sdivisibleOK64(c)) {
9334							continue
9335						}
9336						v.reset(OpLeq64U)
9337						v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
9338						v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
9339						v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
9340						v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9341						v3.AuxInt = int64ToAuxInt(int64(sdivisible64(c).m))
9342						v2.AddArg2(v3, x)
9343						v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9344						v4.AuxInt = int64ToAuxInt(int64(sdivisible64(c).a))
9345						v1.AddArg2(v2, v4)
9346						v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9347						v5.AuxInt = int64ToAuxInt(64 - sdivisible64(c).k)
9348						v0.AddArg2(v1, v5)
9349						v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9350						v6.AuxInt = int64ToAuxInt(int64(sdivisible64(c).max))
9351						v.AddArg2(v0, v6)
9352						return true
9353					}
9354				}
9355			}
9356		}
9357		break
9358	}
9359	// match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
9360	// cond: k > 0 && k < 63 && kbar == 64 - k
9361	// result: (Eq64 (And64 <t> n (Const64 <t> [1<<uint(k)-1])) (Const64 <t> [0]))
9362	for {
9363		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9364			n := v_0
9365			if v_1.Op != OpLsh64x64 {
9366				continue
9367			}
9368			_ = v_1.Args[1]
9369			v_1_0 := v_1.Args[0]
9370			if v_1_0.Op != OpRsh64x64 {
9371				continue
9372			}
9373			_ = v_1_0.Args[1]
9374			v_1_0_0 := v_1_0.Args[0]
9375			if v_1_0_0.Op != OpAdd64 {
9376				continue
9377			}
9378			t := v_1_0_0.Type
9379			_ = v_1_0_0.Args[1]
9380			v_1_0_0_0 := v_1_0_0.Args[0]
9381			v_1_0_0_1 := v_1_0_0.Args[1]
9382			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
9383				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh64Ux64 || v_1_0_0_1.Type != t {
9384					continue
9385				}
9386				_ = v_1_0_0_1.Args[1]
9387				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
9388				if v_1_0_0_1_0.Op != OpRsh64x64 || v_1_0_0_1_0.Type != t {
9389					continue
9390				}
9391				_ = v_1_0_0_1_0.Args[1]
9392				if n != v_1_0_0_1_0.Args[0] {
9393					continue
9394				}
9395				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
9396				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 63 {
9397					continue
9398				}
9399				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
9400				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
9401					continue
9402				}
9403				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
9404				v_1_0_1 := v_1_0.Args[1]
9405				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
9406					continue
9407				}
9408				k := auxIntToInt64(v_1_0_1.AuxInt)
9409				v_1_1 := v_1.Args[1]
9410				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 63 && kbar == 64-k) {
9411					continue
9412				}
9413				v.reset(OpEq64)
9414				v0 := b.NewValue0(v.Pos, OpAnd64, t)
9415				v1 := b.NewValue0(v.Pos, OpConst64, t)
9416				v1.AuxInt = int64ToAuxInt(1<<uint(k) - 1)
9417				v0.AddArg2(n, v1)
9418				v2 := b.NewValue0(v.Pos, OpConst64, t)
9419				v2.AuxInt = int64ToAuxInt(0)
9420				v.AddArg2(v0, v2)
9421				return true
9422			}
9423		}
9424		break
9425	}
9426	// match: (Eq64 s:(Sub64 x y) (Const64 [0]))
9427	// cond: s.Uses == 1
9428	// result: (Eq64 x y)
9429	for {
9430		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9431			s := v_0
9432			if s.Op != OpSub64 {
9433				continue
9434			}
9435			y := s.Args[1]
9436			x := s.Args[0]
9437			if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 || !(s.Uses == 1) {
9438				continue
9439			}
9440			v.reset(OpEq64)
9441			v.AddArg2(x, y)
9442			return true
9443		}
9444		break
9445	}
9446	// match: (Eq64 (And64 <t> x (Const64 <t> [y])) (Const64 <t> [y]))
9447	// cond: oneBit64(y)
9448	// result: (Neq64 (And64 <t> x (Const64 <t> [y])) (Const64 <t> [0]))
9449	for {
9450		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9451			if v_0.Op != OpAnd64 {
9452				continue
9453			}
9454			t := v_0.Type
9455			_ = v_0.Args[1]
9456			v_0_0 := v_0.Args[0]
9457			v_0_1 := v_0.Args[1]
9458			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
9459				x := v_0_0
9460				if v_0_1.Op != OpConst64 || v_0_1.Type != t {
9461					continue
9462				}
9463				y := auxIntToInt64(v_0_1.AuxInt)
9464				if v_1.Op != OpConst64 || v_1.Type != t || auxIntToInt64(v_1.AuxInt) != y || !(oneBit64(y)) {
9465					continue
9466				}
9467				v.reset(OpNeq64)
9468				v0 := b.NewValue0(v.Pos, OpAnd64, t)
9469				v1 := b.NewValue0(v.Pos, OpConst64, t)
9470				v1.AuxInt = int64ToAuxInt(y)
9471				v0.AddArg2(x, v1)
9472				v2 := b.NewValue0(v.Pos, OpConst64, t)
9473				v2.AuxInt = int64ToAuxInt(0)
9474				v.AddArg2(v0, v2)
9475				return true
9476			}
9477		}
9478		break
9479	}
9480	return false
9481}
9482func rewriteValuegeneric_OpEq64F(v *Value) bool {
9483	v_1 := v.Args[1]
9484	v_0 := v.Args[0]
9485	// match: (Eq64F (Const64F [c]) (Const64F [d]))
9486	// result: (ConstBool [c == d])
9487	for {
9488		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9489			if v_0.Op != OpConst64F {
9490				continue
9491			}
9492			c := auxIntToFloat64(v_0.AuxInt)
9493			if v_1.Op != OpConst64F {
9494				continue
9495			}
9496			d := auxIntToFloat64(v_1.AuxInt)
9497			v.reset(OpConstBool)
9498			v.AuxInt = boolToAuxInt(c == d)
9499			return true
9500		}
9501		break
9502	}
9503	return false
9504}
9505func rewriteValuegeneric_OpEq8(v *Value) bool {
9506	v_1 := v.Args[1]
9507	v_0 := v.Args[0]
9508	b := v.Block
9509	config := b.Func.Config
9510	typ := &b.Func.Config.Types
9511	// match: (Eq8 x x)
9512	// result: (ConstBool [true])
9513	for {
9514		x := v_0
9515		if x != v_1 {
9516			break
9517		}
9518		v.reset(OpConstBool)
9519		v.AuxInt = boolToAuxInt(true)
9520		return true
9521	}
9522	// match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
9523	// result: (Eq8 (Const8 <t> [c-d]) x)
9524	for {
9525		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9526			if v_0.Op != OpConst8 {
9527				continue
9528			}
9529			t := v_0.Type
9530			c := auxIntToInt8(v_0.AuxInt)
9531			if v_1.Op != OpAdd8 {
9532				continue
9533			}
9534			_ = v_1.Args[1]
9535			v_1_0 := v_1.Args[0]
9536			v_1_1 := v_1.Args[1]
9537			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
9538				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
9539					continue
9540				}
9541				d := auxIntToInt8(v_1_0.AuxInt)
9542				x := v_1_1
9543				v.reset(OpEq8)
9544				v0 := b.NewValue0(v.Pos, OpConst8, t)
9545				v0.AuxInt = int8ToAuxInt(c - d)
9546				v.AddArg2(v0, x)
9547				return true
9548			}
9549		}
9550		break
9551	}
9552	// match: (Eq8 (Const8 [c]) (Const8 [d]))
9553	// result: (ConstBool [c == d])
9554	for {
9555		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9556			if v_0.Op != OpConst8 {
9557				continue
9558			}
9559			c := auxIntToInt8(v_0.AuxInt)
9560			if v_1.Op != OpConst8 {
9561				continue
9562			}
9563			d := auxIntToInt8(v_1.AuxInt)
9564			v.reset(OpConstBool)
9565			v.AuxInt = boolToAuxInt(c == d)
9566			return true
9567		}
9568		break
9569	}
9570	// match: (Eq8 (Mod8u x (Const8 [c])) (Const8 [0]))
9571	// cond: x.Op != OpConst8 && udivisibleOK8(c) && !hasSmallRotate(config)
9572	// result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt8to32 <typ.UInt32> x) (Const32 <typ.UInt32> [int32(uint8(c))])) (Const32 <typ.UInt32> [0]))
9573	for {
9574		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9575			if v_0.Op != OpMod8u {
9576				continue
9577			}
9578			_ = v_0.Args[1]
9579			x := v_0.Args[0]
9580			v_0_1 := v_0.Args[1]
9581			if v_0_1.Op != OpConst8 {
9582				continue
9583			}
9584			c := auxIntToInt8(v_0_1.AuxInt)
9585			if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 || !(x.Op != OpConst8 && udivisibleOK8(c) && !hasSmallRotate(config)) {
9586				continue
9587			}
9588			v.reset(OpEq32)
9589			v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
9590			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
9591			v1.AddArg(x)
9592			v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
9593			v2.AuxInt = int32ToAuxInt(int32(uint8(c)))
9594			v0.AddArg2(v1, v2)
9595			v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
9596			v3.AuxInt = int32ToAuxInt(0)
9597			v.AddArg2(v0, v3)
9598			return true
9599		}
9600		break
9601	}
9602	// match: (Eq8 (Mod8 x (Const8 [c])) (Const8 [0]))
9603	// cond: x.Op != OpConst8 && sdivisibleOK8(c) && !hasSmallRotate(config)
9604	// result: (Eq32 (Mod32 <typ.Int32> (SignExt8to32 <typ.Int32> x) (Const32 <typ.Int32> [int32(c)])) (Const32 <typ.Int32> [0]))
9605	for {
9606		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9607			if v_0.Op != OpMod8 {
9608				continue
9609			}
9610			_ = v_0.Args[1]
9611			x := v_0.Args[0]
9612			v_0_1 := v_0.Args[1]
9613			if v_0_1.Op != OpConst8 {
9614				continue
9615			}
9616			c := auxIntToInt8(v_0_1.AuxInt)
9617			if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 || !(x.Op != OpConst8 && sdivisibleOK8(c) && !hasSmallRotate(config)) {
9618				continue
9619			}
9620			v.reset(OpEq32)
9621			v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
9622			v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
9623			v1.AddArg(x)
9624			v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
9625			v2.AuxInt = int32ToAuxInt(int32(c))
9626			v0.AddArg2(v1, v2)
9627			v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
9628			v3.AuxInt = int32ToAuxInt(0)
9629			v.AddArg2(v0, v3)
9630			return true
9631		}
9632		break
9633	}
9634	// match: (Eq8 x (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s]))) ) )
9635	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<8+umagic8(c).m) && s == 8+umagic8(c).s && x.Op != OpConst8 && udivisibleOK8(c)
9636	// result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int8(udivisible8(c).m)]) x) (Const8 <typ.UInt8> [int8(8-udivisible8(c).k)]) ) (Const8 <typ.UInt8> [int8(udivisible8(c).max)]) )
9637	for {
9638		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9639			x := v_0
9640			if v_1.Op != OpMul8 {
9641				continue
9642			}
9643			_ = v_1.Args[1]
9644			v_1_0 := v_1.Args[0]
9645			v_1_1 := v_1.Args[1]
9646			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
9647				if v_1_0.Op != OpConst8 {
9648					continue
9649				}
9650				c := auxIntToInt8(v_1_0.AuxInt)
9651				if v_1_1.Op != OpTrunc32to8 {
9652					continue
9653				}
9654				v_1_1_0 := v_1_1.Args[0]
9655				if v_1_1_0.Op != OpRsh32Ux64 {
9656					continue
9657				}
9658				_ = v_1_1_0.Args[1]
9659				mul := v_1_1_0.Args[0]
9660				if mul.Op != OpMul32 {
9661					continue
9662				}
9663				_ = mul.Args[1]
9664				mul_0 := mul.Args[0]
9665				mul_1 := mul.Args[1]
9666				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
9667					if mul_0.Op != OpConst32 {
9668						continue
9669					}
9670					m := auxIntToInt32(mul_0.AuxInt)
9671					if mul_1.Op != OpZeroExt8to32 || x != mul_1.Args[0] {
9672						continue
9673					}
9674					v_1_1_0_1 := v_1_1_0.Args[1]
9675					if v_1_1_0_1.Op != OpConst64 {
9676						continue
9677					}
9678					s := auxIntToInt64(v_1_1_0_1.AuxInt)
9679					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<8+umagic8(c).m) && s == 8+umagic8(c).s && x.Op != OpConst8 && udivisibleOK8(c)) {
9680						continue
9681					}
9682					v.reset(OpLeq8U)
9683					v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
9684					v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
9685					v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
9686					v2.AuxInt = int8ToAuxInt(int8(udivisible8(c).m))
9687					v1.AddArg2(v2, x)
9688					v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
9689					v3.AuxInt = int8ToAuxInt(int8(8 - udivisible8(c).k))
9690					v0.AddArg2(v1, v3)
9691					v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
9692					v4.AuxInt = int8ToAuxInt(int8(udivisible8(c).max))
9693					v.AddArg2(v0, v4)
9694					return true
9695				}
9696			}
9697		}
9698		break
9699	}
9700	// match: (Eq8 x (Mul8 (Const8 [c]) (Sub8 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt8to32 x)) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31]))) ) )
9701	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic8(c).m) && s == 8+smagic8(c).s && x.Op != OpConst8 && sdivisibleOK8(c)
9702	// result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int8(sdivisible8(c).m)]) x) (Const8 <typ.UInt8> [int8(sdivisible8(c).a)]) ) (Const8 <typ.UInt8> [int8(8-sdivisible8(c).k)]) ) (Const8 <typ.UInt8> [int8(sdivisible8(c).max)]) )
9703	for {
9704		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9705			x := v_0
9706			if v_1.Op != OpMul8 {
9707				continue
9708			}
9709			_ = v_1.Args[1]
9710			v_1_0 := v_1.Args[0]
9711			v_1_1 := v_1.Args[1]
9712			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
9713				if v_1_0.Op != OpConst8 {
9714					continue
9715				}
9716				c := auxIntToInt8(v_1_0.AuxInt)
9717				if v_1_1.Op != OpSub8 {
9718					continue
9719				}
9720				_ = v_1_1.Args[1]
9721				v_1_1_0 := v_1_1.Args[0]
9722				if v_1_1_0.Op != OpRsh32x64 {
9723					continue
9724				}
9725				_ = v_1_1_0.Args[1]
9726				mul := v_1_1_0.Args[0]
9727				if mul.Op != OpMul32 {
9728					continue
9729				}
9730				_ = mul.Args[1]
9731				mul_0 := mul.Args[0]
9732				mul_1 := mul.Args[1]
9733				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
9734					if mul_0.Op != OpConst32 {
9735						continue
9736					}
9737					m := auxIntToInt32(mul_0.AuxInt)
9738					if mul_1.Op != OpSignExt8to32 || x != mul_1.Args[0] {
9739						continue
9740					}
9741					v_1_1_0_1 := v_1_1_0.Args[1]
9742					if v_1_1_0_1.Op != OpConst64 {
9743						continue
9744					}
9745					s := auxIntToInt64(v_1_1_0_1.AuxInt)
9746					v_1_1_1 := v_1_1.Args[1]
9747					if v_1_1_1.Op != OpRsh32x64 {
9748						continue
9749					}
9750					_ = v_1_1_1.Args[1]
9751					v_1_1_1_0 := v_1_1_1.Args[0]
9752					if v_1_1_1_0.Op != OpSignExt8to32 || x != v_1_1_1_0.Args[0] {
9753						continue
9754					}
9755					v_1_1_1_1 := v_1_1_1.Args[1]
9756					if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 31 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic8(c).m) && s == 8+smagic8(c).s && x.Op != OpConst8 && sdivisibleOK8(c)) {
9757						continue
9758					}
9759					v.reset(OpLeq8U)
9760					v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
9761					v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
9762					v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
9763					v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
9764					v3.AuxInt = int8ToAuxInt(int8(sdivisible8(c).m))
9765					v2.AddArg2(v3, x)
9766					v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
9767					v4.AuxInt = int8ToAuxInt(int8(sdivisible8(c).a))
9768					v1.AddArg2(v2, v4)
9769					v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
9770					v5.AuxInt = int8ToAuxInt(int8(8 - sdivisible8(c).k))
9771					v0.AddArg2(v1, v5)
9772					v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
9773					v6.AuxInt = int8ToAuxInt(int8(sdivisible8(c).max))
9774					v.AddArg2(v0, v6)
9775					return true
9776				}
9777			}
9778		}
9779		break
9780	}
9781	// match: (Eq8 n (Lsh8x64 (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
9782	// cond: k > 0 && k < 7 && kbar == 8 - k
9783	// result: (Eq8 (And8 <t> n (Const8 <t> [1<<uint(k)-1])) (Const8 <t> [0]))
9784	for {
9785		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9786			n := v_0
9787			if v_1.Op != OpLsh8x64 {
9788				continue
9789			}
9790			_ = v_1.Args[1]
9791			v_1_0 := v_1.Args[0]
9792			if v_1_0.Op != OpRsh8x64 {
9793				continue
9794			}
9795			_ = v_1_0.Args[1]
9796			v_1_0_0 := v_1_0.Args[0]
9797			if v_1_0_0.Op != OpAdd8 {
9798				continue
9799			}
9800			t := v_1_0_0.Type
9801			_ = v_1_0_0.Args[1]
9802			v_1_0_0_0 := v_1_0_0.Args[0]
9803			v_1_0_0_1 := v_1_0_0.Args[1]
9804			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
9805				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh8Ux64 || v_1_0_0_1.Type != t {
9806					continue
9807				}
9808				_ = v_1_0_0_1.Args[1]
9809				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
9810				if v_1_0_0_1_0.Op != OpRsh8x64 || v_1_0_0_1_0.Type != t {
9811					continue
9812				}
9813				_ = v_1_0_0_1_0.Args[1]
9814				if n != v_1_0_0_1_0.Args[0] {
9815					continue
9816				}
9817				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
9818				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 7 {
9819					continue
9820				}
9821				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
9822				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
9823					continue
9824				}
9825				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
9826				v_1_0_1 := v_1_0.Args[1]
9827				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
9828					continue
9829				}
9830				k := auxIntToInt64(v_1_0_1.AuxInt)
9831				v_1_1 := v_1.Args[1]
9832				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 7 && kbar == 8-k) {
9833					continue
9834				}
9835				v.reset(OpEq8)
9836				v0 := b.NewValue0(v.Pos, OpAnd8, t)
9837				v1 := b.NewValue0(v.Pos, OpConst8, t)
9838				v1.AuxInt = int8ToAuxInt(1<<uint(k) - 1)
9839				v0.AddArg2(n, v1)
9840				v2 := b.NewValue0(v.Pos, OpConst8, t)
9841				v2.AuxInt = int8ToAuxInt(0)
9842				v.AddArg2(v0, v2)
9843				return true
9844			}
9845		}
9846		break
9847	}
9848	// match: (Eq8 s:(Sub8 x y) (Const8 [0]))
9849	// cond: s.Uses == 1
9850	// result: (Eq8 x y)
9851	for {
9852		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9853			s := v_0
9854			if s.Op != OpSub8 {
9855				continue
9856			}
9857			y := s.Args[1]
9858			x := s.Args[0]
9859			if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 || !(s.Uses == 1) {
9860				continue
9861			}
9862			v.reset(OpEq8)
9863			v.AddArg2(x, y)
9864			return true
9865		}
9866		break
9867	}
9868	// match: (Eq8 (And8 <t> x (Const8 <t> [y])) (Const8 <t> [y]))
9869	// cond: oneBit8(y)
9870	// result: (Neq8 (And8 <t> x (Const8 <t> [y])) (Const8 <t> [0]))
9871	for {
9872		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9873			if v_0.Op != OpAnd8 {
9874				continue
9875			}
9876			t := v_0.Type
9877			_ = v_0.Args[1]
9878			v_0_0 := v_0.Args[0]
9879			v_0_1 := v_0.Args[1]
9880			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
9881				x := v_0_0
9882				if v_0_1.Op != OpConst8 || v_0_1.Type != t {
9883					continue
9884				}
9885				y := auxIntToInt8(v_0_1.AuxInt)
9886				if v_1.Op != OpConst8 || v_1.Type != t || auxIntToInt8(v_1.AuxInt) != y || !(oneBit8(y)) {
9887					continue
9888				}
9889				v.reset(OpNeq8)
9890				v0 := b.NewValue0(v.Pos, OpAnd8, t)
9891				v1 := b.NewValue0(v.Pos, OpConst8, t)
9892				v1.AuxInt = int8ToAuxInt(y)
9893				v0.AddArg2(x, v1)
9894				v2 := b.NewValue0(v.Pos, OpConst8, t)
9895				v2.AuxInt = int8ToAuxInt(0)
9896				v.AddArg2(v0, v2)
9897				return true
9898			}
9899		}
9900		break
9901	}
9902	return false
9903}
9904func rewriteValuegeneric_OpEqB(v *Value) bool {
9905	v_1 := v.Args[1]
9906	v_0 := v.Args[0]
9907	// match: (EqB (ConstBool [c]) (ConstBool [d]))
9908	// result: (ConstBool [c == d])
9909	for {
9910		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9911			if v_0.Op != OpConstBool {
9912				continue
9913			}
9914			c := auxIntToBool(v_0.AuxInt)
9915			if v_1.Op != OpConstBool {
9916				continue
9917			}
9918			d := auxIntToBool(v_1.AuxInt)
9919			v.reset(OpConstBool)
9920			v.AuxInt = boolToAuxInt(c == d)
9921			return true
9922		}
9923		break
9924	}
9925	// match: (EqB (ConstBool [false]) x)
9926	// result: (Not x)
9927	for {
9928		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9929			if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != false {
9930				continue
9931			}
9932			x := v_1
9933			v.reset(OpNot)
9934			v.AddArg(x)
9935			return true
9936		}
9937		break
9938	}
9939	// match: (EqB (ConstBool [true]) x)
9940	// result: x
9941	for {
9942		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9943			if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != true {
9944				continue
9945			}
9946			x := v_1
9947			v.copyOf(x)
9948			return true
9949		}
9950		break
9951	}
9952	return false
9953}
9954func rewriteValuegeneric_OpEqInter(v *Value) bool {
9955	v_1 := v.Args[1]
9956	v_0 := v.Args[0]
9957	b := v.Block
9958	typ := &b.Func.Config.Types
9959	// match: (EqInter x y)
9960	// result: (EqPtr (ITab x) (ITab y))
9961	for {
9962		x := v_0
9963		y := v_1
9964		v.reset(OpEqPtr)
9965		v0 := b.NewValue0(v.Pos, OpITab, typ.Uintptr)
9966		v0.AddArg(x)
9967		v1 := b.NewValue0(v.Pos, OpITab, typ.Uintptr)
9968		v1.AddArg(y)
9969		v.AddArg2(v0, v1)
9970		return true
9971	}
9972}
9973func rewriteValuegeneric_OpEqPtr(v *Value) bool {
9974	v_1 := v.Args[1]
9975	v_0 := v.Args[0]
9976	b := v.Block
9977	typ := &b.Func.Config.Types
9978	// match: (EqPtr x x)
9979	// result: (ConstBool [true])
9980	for {
9981		x := v_0
9982		if x != v_1 {
9983			break
9984		}
9985		v.reset(OpConstBool)
9986		v.AuxInt = boolToAuxInt(true)
9987		return true
9988	}
9989	// match: (EqPtr (Addr {x} _) (Addr {y} _))
9990	// result: (ConstBool [x == y])
9991	for {
9992		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9993			if v_0.Op != OpAddr {
9994				continue
9995			}
9996			x := auxToSym(v_0.Aux)
9997			if v_1.Op != OpAddr {
9998				continue
9999			}
10000			y := auxToSym(v_1.Aux)
10001			v.reset(OpConstBool)
10002			v.AuxInt = boolToAuxInt(x == y)
10003			return true
10004		}
10005		break
10006	}
10007	// match: (EqPtr (Addr {x} _) (OffPtr [o] (Addr {y} _)))
10008	// result: (ConstBool [x == y && o == 0])
10009	for {
10010		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10011			if v_0.Op != OpAddr {
10012				continue
10013			}
10014			x := auxToSym(v_0.Aux)
10015			if v_1.Op != OpOffPtr {
10016				continue
10017			}
10018			o := auxIntToInt64(v_1.AuxInt)
10019			v_1_0 := v_1.Args[0]
10020			if v_1_0.Op != OpAddr {
10021				continue
10022			}
10023			y := auxToSym(v_1_0.Aux)
10024			v.reset(OpConstBool)
10025			v.AuxInt = boolToAuxInt(x == y && o == 0)
10026			return true
10027		}
10028		break
10029	}
10030	// match: (EqPtr (OffPtr [o1] (Addr {x} _)) (OffPtr [o2] (Addr {y} _)))
10031	// result: (ConstBool [x == y && o1 == o2])
10032	for {
10033		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10034			if v_0.Op != OpOffPtr {
10035				continue
10036			}
10037			o1 := auxIntToInt64(v_0.AuxInt)
10038			v_0_0 := v_0.Args[0]
10039			if v_0_0.Op != OpAddr {
10040				continue
10041			}
10042			x := auxToSym(v_0_0.Aux)
10043			if v_1.Op != OpOffPtr {
10044				continue
10045			}
10046			o2 := auxIntToInt64(v_1.AuxInt)
10047			v_1_0 := v_1.Args[0]
10048			if v_1_0.Op != OpAddr {
10049				continue
10050			}
10051			y := auxToSym(v_1_0.Aux)
10052			v.reset(OpConstBool)
10053			v.AuxInt = boolToAuxInt(x == y && o1 == o2)
10054			return true
10055		}
10056		break
10057	}
10058	// match: (EqPtr (LocalAddr {x} _ _) (LocalAddr {y} _ _))
10059	// result: (ConstBool [x == y])
10060	for {
10061		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10062			if v_0.Op != OpLocalAddr {
10063				continue
10064			}
10065			x := auxToSym(v_0.Aux)
10066			if v_1.Op != OpLocalAddr {
10067				continue
10068			}
10069			y := auxToSym(v_1.Aux)
10070			v.reset(OpConstBool)
10071			v.AuxInt = boolToAuxInt(x == y)
10072			return true
10073		}
10074		break
10075	}
10076	// match: (EqPtr (LocalAddr {x} _ _) (OffPtr [o] (LocalAddr {y} _ _)))
10077	// result: (ConstBool [x == y && o == 0])
10078	for {
10079		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10080			if v_0.Op != OpLocalAddr {
10081				continue
10082			}
10083			x := auxToSym(v_0.Aux)
10084			if v_1.Op != OpOffPtr {
10085				continue
10086			}
10087			o := auxIntToInt64(v_1.AuxInt)
10088			v_1_0 := v_1.Args[0]
10089			if v_1_0.Op != OpLocalAddr {
10090				continue
10091			}
10092			y := auxToSym(v_1_0.Aux)
10093			v.reset(OpConstBool)
10094			v.AuxInt = boolToAuxInt(x == y && o == 0)
10095			return true
10096		}
10097		break
10098	}
10099	// match: (EqPtr (OffPtr [o1] (LocalAddr {x} _ _)) (OffPtr [o2] (LocalAddr {y} _ _)))
10100	// result: (ConstBool [x == y && o1 == o2])
10101	for {
10102		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10103			if v_0.Op != OpOffPtr {
10104				continue
10105			}
10106			o1 := auxIntToInt64(v_0.AuxInt)
10107			v_0_0 := v_0.Args[0]
10108			if v_0_0.Op != OpLocalAddr {
10109				continue
10110			}
10111			x := auxToSym(v_0_0.Aux)
10112			if v_1.Op != OpOffPtr {
10113				continue
10114			}
10115			o2 := auxIntToInt64(v_1.AuxInt)
10116			v_1_0 := v_1.Args[0]
10117			if v_1_0.Op != OpLocalAddr {
10118				continue
10119			}
10120			y := auxToSym(v_1_0.Aux)
10121			v.reset(OpConstBool)
10122			v.AuxInt = boolToAuxInt(x == y && o1 == o2)
10123			return true
10124		}
10125		break
10126	}
10127	// match: (EqPtr (OffPtr [o1] p1) p2)
10128	// cond: isSamePtr(p1, p2)
10129	// result: (ConstBool [o1 == 0])
10130	for {
10131		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10132			if v_0.Op != OpOffPtr {
10133				continue
10134			}
10135			o1 := auxIntToInt64(v_0.AuxInt)
10136			p1 := v_0.Args[0]
10137			p2 := v_1
10138			if !(isSamePtr(p1, p2)) {
10139				continue
10140			}
10141			v.reset(OpConstBool)
10142			v.AuxInt = boolToAuxInt(o1 == 0)
10143			return true
10144		}
10145		break
10146	}
10147	// match: (EqPtr (OffPtr [o1] p1) (OffPtr [o2] p2))
10148	// cond: isSamePtr(p1, p2)
10149	// result: (ConstBool [o1 == o2])
10150	for {
10151		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10152			if v_0.Op != OpOffPtr {
10153				continue
10154			}
10155			o1 := auxIntToInt64(v_0.AuxInt)
10156			p1 := v_0.Args[0]
10157			if v_1.Op != OpOffPtr {
10158				continue
10159			}
10160			o2 := auxIntToInt64(v_1.AuxInt)
10161			p2 := v_1.Args[0]
10162			if !(isSamePtr(p1, p2)) {
10163				continue
10164			}
10165			v.reset(OpConstBool)
10166			v.AuxInt = boolToAuxInt(o1 == o2)
10167			return true
10168		}
10169		break
10170	}
10171	// match: (EqPtr (Const32 [c]) (Const32 [d]))
10172	// result: (ConstBool [c == d])
10173	for {
10174		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10175			if v_0.Op != OpConst32 {
10176				continue
10177			}
10178			c := auxIntToInt32(v_0.AuxInt)
10179			if v_1.Op != OpConst32 {
10180				continue
10181			}
10182			d := auxIntToInt32(v_1.AuxInt)
10183			v.reset(OpConstBool)
10184			v.AuxInt = boolToAuxInt(c == d)
10185			return true
10186		}
10187		break
10188	}
10189	// match: (EqPtr (Const64 [c]) (Const64 [d]))
10190	// result: (ConstBool [c == d])
10191	for {
10192		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10193			if v_0.Op != OpConst64 {
10194				continue
10195			}
10196			c := auxIntToInt64(v_0.AuxInt)
10197			if v_1.Op != OpConst64 {
10198				continue
10199			}
10200			d := auxIntToInt64(v_1.AuxInt)
10201			v.reset(OpConstBool)
10202			v.AuxInt = boolToAuxInt(c == d)
10203			return true
10204		}
10205		break
10206	}
10207	// match: (EqPtr (Convert (Addr {x} _) _) (Addr {y} _))
10208	// result: (ConstBool [x==y])
10209	for {
10210		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10211			if v_0.Op != OpConvert {
10212				continue
10213			}
10214			v_0_0 := v_0.Args[0]
10215			if v_0_0.Op != OpAddr {
10216				continue
10217			}
10218			x := auxToSym(v_0_0.Aux)
10219			if v_1.Op != OpAddr {
10220				continue
10221			}
10222			y := auxToSym(v_1.Aux)
10223			v.reset(OpConstBool)
10224			v.AuxInt = boolToAuxInt(x == y)
10225			return true
10226		}
10227		break
10228	}
10229	// match: (EqPtr (LocalAddr _ _) (Addr _))
10230	// result: (ConstBool [false])
10231	for {
10232		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10233			if v_0.Op != OpLocalAddr || v_1.Op != OpAddr {
10234				continue
10235			}
10236			v.reset(OpConstBool)
10237			v.AuxInt = boolToAuxInt(false)
10238			return true
10239		}
10240		break
10241	}
10242	// match: (EqPtr (OffPtr (LocalAddr _ _)) (Addr _))
10243	// result: (ConstBool [false])
10244	for {
10245		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10246			if v_0.Op != OpOffPtr {
10247				continue
10248			}
10249			v_0_0 := v_0.Args[0]
10250			if v_0_0.Op != OpLocalAddr || v_1.Op != OpAddr {
10251				continue
10252			}
10253			v.reset(OpConstBool)
10254			v.AuxInt = boolToAuxInt(false)
10255			return true
10256		}
10257		break
10258	}
10259	// match: (EqPtr (LocalAddr _ _) (OffPtr (Addr _)))
10260	// result: (ConstBool [false])
10261	for {
10262		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10263			if v_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
10264				continue
10265			}
10266			v_1_0 := v_1.Args[0]
10267			if v_1_0.Op != OpAddr {
10268				continue
10269			}
10270			v.reset(OpConstBool)
10271			v.AuxInt = boolToAuxInt(false)
10272			return true
10273		}
10274		break
10275	}
10276	// match: (EqPtr (OffPtr (LocalAddr _ _)) (OffPtr (Addr _)))
10277	// result: (ConstBool [false])
10278	for {
10279		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10280			if v_0.Op != OpOffPtr {
10281				continue
10282			}
10283			v_0_0 := v_0.Args[0]
10284			if v_0_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
10285				continue
10286			}
10287			v_1_0 := v_1.Args[0]
10288			if v_1_0.Op != OpAddr {
10289				continue
10290			}
10291			v.reset(OpConstBool)
10292			v.AuxInt = boolToAuxInt(false)
10293			return true
10294		}
10295		break
10296	}
10297	// match: (EqPtr (AddPtr p1 o1) p2)
10298	// cond: isSamePtr(p1, p2)
10299	// result: (Not (IsNonNil o1))
10300	for {
10301		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10302			if v_0.Op != OpAddPtr {
10303				continue
10304			}
10305			o1 := v_0.Args[1]
10306			p1 := v_0.Args[0]
10307			p2 := v_1
10308			if !(isSamePtr(p1, p2)) {
10309				continue
10310			}
10311			v.reset(OpNot)
10312			v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool)
10313			v0.AddArg(o1)
10314			v.AddArg(v0)
10315			return true
10316		}
10317		break
10318	}
10319	// match: (EqPtr (Const32 [0]) p)
10320	// result: (Not (IsNonNil p))
10321	for {
10322		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10323			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
10324				continue
10325			}
10326			p := v_1
10327			v.reset(OpNot)
10328			v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool)
10329			v0.AddArg(p)
10330			v.AddArg(v0)
10331			return true
10332		}
10333		break
10334	}
10335	// match: (EqPtr (Const64 [0]) p)
10336	// result: (Not (IsNonNil p))
10337	for {
10338		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10339			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
10340				continue
10341			}
10342			p := v_1
10343			v.reset(OpNot)
10344			v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool)
10345			v0.AddArg(p)
10346			v.AddArg(v0)
10347			return true
10348		}
10349		break
10350	}
10351	// match: (EqPtr (ConstNil) p)
10352	// result: (Not (IsNonNil p))
10353	for {
10354		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10355			if v_0.Op != OpConstNil {
10356				continue
10357			}
10358			p := v_1
10359			v.reset(OpNot)
10360			v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool)
10361			v0.AddArg(p)
10362			v.AddArg(v0)
10363			return true
10364		}
10365		break
10366	}
10367	return false
10368}
10369func rewriteValuegeneric_OpEqSlice(v *Value) bool {
10370	v_1 := v.Args[1]
10371	v_0 := v.Args[0]
10372	b := v.Block
10373	typ := &b.Func.Config.Types
10374	// match: (EqSlice x y)
10375	// result: (EqPtr (SlicePtr x) (SlicePtr y))
10376	for {
10377		x := v_0
10378		y := v_1
10379		v.reset(OpEqPtr)
10380		v0 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
10381		v0.AddArg(x)
10382		v1 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
10383		v1.AddArg(y)
10384		v.AddArg2(v0, v1)
10385		return true
10386	}
10387}
10388func rewriteValuegeneric_OpFloor(v *Value) bool {
10389	v_0 := v.Args[0]
10390	// match: (Floor (Const64F [c]))
10391	// result: (Const64F [math.Floor(c)])
10392	for {
10393		if v_0.Op != OpConst64F {
10394			break
10395		}
10396		c := auxIntToFloat64(v_0.AuxInt)
10397		v.reset(OpConst64F)
10398		v.AuxInt = float64ToAuxInt(math.Floor(c))
10399		return true
10400	}
10401	return false
10402}
10403func rewriteValuegeneric_OpIMake(v *Value) bool {
10404	v_1 := v.Args[1]
10405	v_0 := v.Args[0]
10406	// match: (IMake _typ (StructMake1 val))
10407	// result: (IMake _typ val)
10408	for {
10409		_typ := v_0
10410		if v_1.Op != OpStructMake1 {
10411			break
10412		}
10413		val := v_1.Args[0]
10414		v.reset(OpIMake)
10415		v.AddArg2(_typ, val)
10416		return true
10417	}
10418	// match: (IMake _typ (ArrayMake1 val))
10419	// result: (IMake _typ val)
10420	for {
10421		_typ := v_0
10422		if v_1.Op != OpArrayMake1 {
10423			break
10424		}
10425		val := v_1.Args[0]
10426		v.reset(OpIMake)
10427		v.AddArg2(_typ, val)
10428		return true
10429	}
10430	return false
10431}
10432func rewriteValuegeneric_OpInterLECall(v *Value) bool {
10433	// match: (InterLECall [argsize] {auxCall} (Addr {fn} (SB)) ___)
10434	// result: devirtLECall(v, fn.(*obj.LSym))
10435	for {
10436		if len(v.Args) < 1 {
10437			break
10438		}
10439		v_0 := v.Args[0]
10440		if v_0.Op != OpAddr {
10441			break
10442		}
10443		fn := auxToSym(v_0.Aux)
10444		v_0_0 := v_0.Args[0]
10445		if v_0_0.Op != OpSB {
10446			break
10447		}
10448		v.copyOf(devirtLECall(v, fn.(*obj.LSym)))
10449		return true
10450	}
10451	return false
10452}
10453func rewriteValuegeneric_OpIsInBounds(v *Value) bool {
10454	v_1 := v.Args[1]
10455	v_0 := v.Args[0]
10456	// match: (IsInBounds (ZeroExt8to32 _) (Const32 [c]))
10457	// cond: (1 << 8) <= c
10458	// result: (ConstBool [true])
10459	for {
10460		if v_0.Op != OpZeroExt8to32 || v_1.Op != OpConst32 {
10461			break
10462		}
10463		c := auxIntToInt32(v_1.AuxInt)
10464		if !((1 << 8) <= c) {
10465			break
10466		}
10467		v.reset(OpConstBool)
10468		v.AuxInt = boolToAuxInt(true)
10469		return true
10470	}
10471	// match: (IsInBounds (ZeroExt8to64 _) (Const64 [c]))
10472	// cond: (1 << 8) <= c
10473	// result: (ConstBool [true])
10474	for {
10475		if v_0.Op != OpZeroExt8to64 || v_1.Op != OpConst64 {
10476			break
10477		}
10478		c := auxIntToInt64(v_1.AuxInt)
10479		if !((1 << 8) <= c) {
10480			break
10481		}
10482		v.reset(OpConstBool)
10483		v.AuxInt = boolToAuxInt(true)
10484		return true
10485	}
10486	// match: (IsInBounds (ZeroExt16to32 _) (Const32 [c]))
10487	// cond: (1 << 16) <= c
10488	// result: (ConstBool [true])
10489	for {
10490		if v_0.Op != OpZeroExt16to32 || v_1.Op != OpConst32 {
10491			break
10492		}
10493		c := auxIntToInt32(v_1.AuxInt)
10494		if !((1 << 16) <= c) {
10495			break
10496		}
10497		v.reset(OpConstBool)
10498		v.AuxInt = boolToAuxInt(true)
10499		return true
10500	}
10501	// match: (IsInBounds (ZeroExt16to64 _) (Const64 [c]))
10502	// cond: (1 << 16) <= c
10503	// result: (ConstBool [true])
10504	for {
10505		if v_0.Op != OpZeroExt16to64 || v_1.Op != OpConst64 {
10506			break
10507		}
10508		c := auxIntToInt64(v_1.AuxInt)
10509		if !((1 << 16) <= c) {
10510			break
10511		}
10512		v.reset(OpConstBool)
10513		v.AuxInt = boolToAuxInt(true)
10514		return true
10515	}
10516	// match: (IsInBounds x x)
10517	// result: (ConstBool [false])
10518	for {
10519		x := v_0
10520		if x != v_1 {
10521			break
10522		}
10523		v.reset(OpConstBool)
10524		v.AuxInt = boolToAuxInt(false)
10525		return true
10526	}
10527	// match: (IsInBounds (And8 (Const8 [c]) _) (Const8 [d]))
10528	// cond: 0 <= c && c < d
10529	// result: (ConstBool [true])
10530	for {
10531		if v_0.Op != OpAnd8 {
10532			break
10533		}
10534		v_0_0 := v_0.Args[0]
10535		v_0_1 := v_0.Args[1]
10536		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
10537			if v_0_0.Op != OpConst8 {
10538				continue
10539			}
10540			c := auxIntToInt8(v_0_0.AuxInt)
10541			if v_1.Op != OpConst8 {
10542				continue
10543			}
10544			d := auxIntToInt8(v_1.AuxInt)
10545			if !(0 <= c && c < d) {
10546				continue
10547			}
10548			v.reset(OpConstBool)
10549			v.AuxInt = boolToAuxInt(true)
10550			return true
10551		}
10552		break
10553	}
10554	// match: (IsInBounds (ZeroExt8to16 (And8 (Const8 [c]) _)) (Const16 [d]))
10555	// cond: 0 <= c && int16(c) < d
10556	// result: (ConstBool [true])
10557	for {
10558		if v_0.Op != OpZeroExt8to16 {
10559			break
10560		}
10561		v_0_0 := v_0.Args[0]
10562		if v_0_0.Op != OpAnd8 {
10563			break
10564		}
10565		v_0_0_0 := v_0_0.Args[0]
10566		v_0_0_1 := v_0_0.Args[1]
10567		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
10568			if v_0_0_0.Op != OpConst8 {
10569				continue
10570			}
10571			c := auxIntToInt8(v_0_0_0.AuxInt)
10572			if v_1.Op != OpConst16 {
10573				continue
10574			}
10575			d := auxIntToInt16(v_1.AuxInt)
10576			if !(0 <= c && int16(c) < d) {
10577				continue
10578			}
10579			v.reset(OpConstBool)
10580			v.AuxInt = boolToAuxInt(true)
10581			return true
10582		}
10583		break
10584	}
10585	// match: (IsInBounds (ZeroExt8to32 (And8 (Const8 [c]) _)) (Const32 [d]))
10586	// cond: 0 <= c && int32(c) < d
10587	// result: (ConstBool [true])
10588	for {
10589		if v_0.Op != OpZeroExt8to32 {
10590			break
10591		}
10592		v_0_0 := v_0.Args[0]
10593		if v_0_0.Op != OpAnd8 {
10594			break
10595		}
10596		v_0_0_0 := v_0_0.Args[0]
10597		v_0_0_1 := v_0_0.Args[1]
10598		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
10599			if v_0_0_0.Op != OpConst8 {
10600				continue
10601			}
10602			c := auxIntToInt8(v_0_0_0.AuxInt)
10603			if v_1.Op != OpConst32 {
10604				continue
10605			}
10606			d := auxIntToInt32(v_1.AuxInt)
10607			if !(0 <= c && int32(c) < d) {
10608				continue
10609			}
10610			v.reset(OpConstBool)
10611			v.AuxInt = boolToAuxInt(true)
10612			return true
10613		}
10614		break
10615	}
10616	// match: (IsInBounds (ZeroExt8to64 (And8 (Const8 [c]) _)) (Const64 [d]))
10617	// cond: 0 <= c && int64(c) < d
10618	// result: (ConstBool [true])
10619	for {
10620		if v_0.Op != OpZeroExt8to64 {
10621			break
10622		}
10623		v_0_0 := v_0.Args[0]
10624		if v_0_0.Op != OpAnd8 {
10625			break
10626		}
10627		v_0_0_0 := v_0_0.Args[0]
10628		v_0_0_1 := v_0_0.Args[1]
10629		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
10630			if v_0_0_0.Op != OpConst8 {
10631				continue
10632			}
10633			c := auxIntToInt8(v_0_0_0.AuxInt)
10634			if v_1.Op != OpConst64 {
10635				continue
10636			}
10637			d := auxIntToInt64(v_1.AuxInt)
10638			if !(0 <= c && int64(c) < d) {
10639				continue
10640			}
10641			v.reset(OpConstBool)
10642			v.AuxInt = boolToAuxInt(true)
10643			return true
10644		}
10645		break
10646	}
10647	// match: (IsInBounds (And16 (Const16 [c]) _) (Const16 [d]))
10648	// cond: 0 <= c && c < d
10649	// result: (ConstBool [true])
10650	for {
10651		if v_0.Op != OpAnd16 {
10652			break
10653		}
10654		v_0_0 := v_0.Args[0]
10655		v_0_1 := v_0.Args[1]
10656		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
10657			if v_0_0.Op != OpConst16 {
10658				continue
10659			}
10660			c := auxIntToInt16(v_0_0.AuxInt)
10661			if v_1.Op != OpConst16 {
10662				continue
10663			}
10664			d := auxIntToInt16(v_1.AuxInt)
10665			if !(0 <= c && c < d) {
10666				continue
10667			}
10668			v.reset(OpConstBool)
10669			v.AuxInt = boolToAuxInt(true)
10670			return true
10671		}
10672		break
10673	}
10674	// match: (IsInBounds (ZeroExt16to32 (And16 (Const16 [c]) _)) (Const32 [d]))
10675	// cond: 0 <= c && int32(c) < d
10676	// result: (ConstBool [true])
10677	for {
10678		if v_0.Op != OpZeroExt16to32 {
10679			break
10680		}
10681		v_0_0 := v_0.Args[0]
10682		if v_0_0.Op != OpAnd16 {
10683			break
10684		}
10685		v_0_0_0 := v_0_0.Args[0]
10686		v_0_0_1 := v_0_0.Args[1]
10687		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
10688			if v_0_0_0.Op != OpConst16 {
10689				continue
10690			}
10691			c := auxIntToInt16(v_0_0_0.AuxInt)
10692			if v_1.Op != OpConst32 {
10693				continue
10694			}
10695			d := auxIntToInt32(v_1.AuxInt)
10696			if !(0 <= c && int32(c) < d) {
10697				continue
10698			}
10699			v.reset(OpConstBool)
10700			v.AuxInt = boolToAuxInt(true)
10701			return true
10702		}
10703		break
10704	}
10705	// match: (IsInBounds (ZeroExt16to64 (And16 (Const16 [c]) _)) (Const64 [d]))
10706	// cond: 0 <= c && int64(c) < d
10707	// result: (ConstBool [true])
10708	for {
10709		if v_0.Op != OpZeroExt16to64 {
10710			break
10711		}
10712		v_0_0 := v_0.Args[0]
10713		if v_0_0.Op != OpAnd16 {
10714			break
10715		}
10716		v_0_0_0 := v_0_0.Args[0]
10717		v_0_0_1 := v_0_0.Args[1]
10718		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
10719			if v_0_0_0.Op != OpConst16 {
10720				continue
10721			}
10722			c := auxIntToInt16(v_0_0_0.AuxInt)
10723			if v_1.Op != OpConst64 {
10724				continue
10725			}
10726			d := auxIntToInt64(v_1.AuxInt)
10727			if !(0 <= c && int64(c) < d) {
10728				continue
10729			}
10730			v.reset(OpConstBool)
10731			v.AuxInt = boolToAuxInt(true)
10732			return true
10733		}
10734		break
10735	}
10736	// match: (IsInBounds (And32 (Const32 [c]) _) (Const32 [d]))
10737	// cond: 0 <= c && c < d
10738	// result: (ConstBool [true])
10739	for {
10740		if v_0.Op != OpAnd32 {
10741			break
10742		}
10743		v_0_0 := v_0.Args[0]
10744		v_0_1 := v_0.Args[1]
10745		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
10746			if v_0_0.Op != OpConst32 {
10747				continue
10748			}
10749			c := auxIntToInt32(v_0_0.AuxInt)
10750			if v_1.Op != OpConst32 {
10751				continue
10752			}
10753			d := auxIntToInt32(v_1.AuxInt)
10754			if !(0 <= c && c < d) {
10755				continue
10756			}
10757			v.reset(OpConstBool)
10758			v.AuxInt = boolToAuxInt(true)
10759			return true
10760		}
10761		break
10762	}
10763	// match: (IsInBounds (ZeroExt32to64 (And32 (Const32 [c]) _)) (Const64 [d]))
10764	// cond: 0 <= c && int64(c) < d
10765	// result: (ConstBool [true])
10766	for {
10767		if v_0.Op != OpZeroExt32to64 {
10768			break
10769		}
10770		v_0_0 := v_0.Args[0]
10771		if v_0_0.Op != OpAnd32 {
10772			break
10773		}
10774		v_0_0_0 := v_0_0.Args[0]
10775		v_0_0_1 := v_0_0.Args[1]
10776		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
10777			if v_0_0_0.Op != OpConst32 {
10778				continue
10779			}
10780			c := auxIntToInt32(v_0_0_0.AuxInt)
10781			if v_1.Op != OpConst64 {
10782				continue
10783			}
10784			d := auxIntToInt64(v_1.AuxInt)
10785			if !(0 <= c && int64(c) < d) {
10786				continue
10787			}
10788			v.reset(OpConstBool)
10789			v.AuxInt = boolToAuxInt(true)
10790			return true
10791		}
10792		break
10793	}
10794	// match: (IsInBounds (And64 (Const64 [c]) _) (Const64 [d]))
10795	// cond: 0 <= c && c < d
10796	// result: (ConstBool [true])
10797	for {
10798		if v_0.Op != OpAnd64 {
10799			break
10800		}
10801		v_0_0 := v_0.Args[0]
10802		v_0_1 := v_0.Args[1]
10803		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
10804			if v_0_0.Op != OpConst64 {
10805				continue
10806			}
10807			c := auxIntToInt64(v_0_0.AuxInt)
10808			if v_1.Op != OpConst64 {
10809				continue
10810			}
10811			d := auxIntToInt64(v_1.AuxInt)
10812			if !(0 <= c && c < d) {
10813				continue
10814			}
10815			v.reset(OpConstBool)
10816			v.AuxInt = boolToAuxInt(true)
10817			return true
10818		}
10819		break
10820	}
10821	// match: (IsInBounds (Const32 [c]) (Const32 [d]))
10822	// result: (ConstBool [0 <= c && c < d])
10823	for {
10824		if v_0.Op != OpConst32 {
10825			break
10826		}
10827		c := auxIntToInt32(v_0.AuxInt)
10828		if v_1.Op != OpConst32 {
10829			break
10830		}
10831		d := auxIntToInt32(v_1.AuxInt)
10832		v.reset(OpConstBool)
10833		v.AuxInt = boolToAuxInt(0 <= c && c < d)
10834		return true
10835	}
10836	// match: (IsInBounds (Const64 [c]) (Const64 [d]))
10837	// result: (ConstBool [0 <= c && c < d])
10838	for {
10839		if v_0.Op != OpConst64 {
10840			break
10841		}
10842		c := auxIntToInt64(v_0.AuxInt)
10843		if v_1.Op != OpConst64 {
10844			break
10845		}
10846		d := auxIntToInt64(v_1.AuxInt)
10847		v.reset(OpConstBool)
10848		v.AuxInt = boolToAuxInt(0 <= c && c < d)
10849		return true
10850	}
10851	// match: (IsInBounds (Mod32u _ y) y)
10852	// result: (ConstBool [true])
10853	for {
10854		if v_0.Op != OpMod32u {
10855			break
10856		}
10857		y := v_0.Args[1]
10858		if y != v_1 {
10859			break
10860		}
10861		v.reset(OpConstBool)
10862		v.AuxInt = boolToAuxInt(true)
10863		return true
10864	}
10865	// match: (IsInBounds (Mod64u _ y) y)
10866	// result: (ConstBool [true])
10867	for {
10868		if v_0.Op != OpMod64u {
10869			break
10870		}
10871		y := v_0.Args[1]
10872		if y != v_1 {
10873			break
10874		}
10875		v.reset(OpConstBool)
10876		v.AuxInt = boolToAuxInt(true)
10877		return true
10878	}
10879	// match: (IsInBounds (ZeroExt8to64 (Rsh8Ux64 _ (Const64 [c]))) (Const64 [d]))
10880	// cond: 0 < c && c < 8 && 1<<uint( 8-c)-1 < d
10881	// result: (ConstBool [true])
10882	for {
10883		if v_0.Op != OpZeroExt8to64 {
10884			break
10885		}
10886		v_0_0 := v_0.Args[0]
10887		if v_0_0.Op != OpRsh8Ux64 {
10888			break
10889		}
10890		_ = v_0_0.Args[1]
10891		v_0_0_1 := v_0_0.Args[1]
10892		if v_0_0_1.Op != OpConst64 {
10893			break
10894		}
10895		c := auxIntToInt64(v_0_0_1.AuxInt)
10896		if v_1.Op != OpConst64 {
10897			break
10898		}
10899		d := auxIntToInt64(v_1.AuxInt)
10900		if !(0 < c && c < 8 && 1<<uint(8-c)-1 < d) {
10901			break
10902		}
10903		v.reset(OpConstBool)
10904		v.AuxInt = boolToAuxInt(true)
10905		return true
10906	}
10907	// match: (IsInBounds (ZeroExt8to32 (Rsh8Ux64 _ (Const64 [c]))) (Const32 [d]))
10908	// cond: 0 < c && c < 8 && 1<<uint( 8-c)-1 < d
10909	// result: (ConstBool [true])
10910	for {
10911		if v_0.Op != OpZeroExt8to32 {
10912			break
10913		}
10914		v_0_0 := v_0.Args[0]
10915		if v_0_0.Op != OpRsh8Ux64 {
10916			break
10917		}
10918		_ = v_0_0.Args[1]
10919		v_0_0_1 := v_0_0.Args[1]
10920		if v_0_0_1.Op != OpConst64 {
10921			break
10922		}
10923		c := auxIntToInt64(v_0_0_1.AuxInt)
10924		if v_1.Op != OpConst32 {
10925			break
10926		}
10927		d := auxIntToInt32(v_1.AuxInt)
10928		if !(0 < c && c < 8 && 1<<uint(8-c)-1 < d) {
10929			break
10930		}
10931		v.reset(OpConstBool)
10932		v.AuxInt = boolToAuxInt(true)
10933		return true
10934	}
10935	// match: (IsInBounds (ZeroExt8to16 (Rsh8Ux64 _ (Const64 [c]))) (Const16 [d]))
10936	// cond: 0 < c && c < 8 && 1<<uint( 8-c)-1 < d
10937	// result: (ConstBool [true])
10938	for {
10939		if v_0.Op != OpZeroExt8to16 {
10940			break
10941		}
10942		v_0_0 := v_0.Args[0]
10943		if v_0_0.Op != OpRsh8Ux64 {
10944			break
10945		}
10946		_ = v_0_0.Args[1]
10947		v_0_0_1 := v_0_0.Args[1]
10948		if v_0_0_1.Op != OpConst64 {
10949			break
10950		}
10951		c := auxIntToInt64(v_0_0_1.AuxInt)
10952		if v_1.Op != OpConst16 {
10953			break
10954		}
10955		d := auxIntToInt16(v_1.AuxInt)
10956		if !(0 < c && c < 8 && 1<<uint(8-c)-1 < d) {
10957			break
10958		}
10959		v.reset(OpConstBool)
10960		v.AuxInt = boolToAuxInt(true)
10961		return true
10962	}
10963	// match: (IsInBounds (Rsh8Ux64 _ (Const64 [c])) (Const64 [d]))
10964	// cond: 0 < c && c < 8 && 1<<uint( 8-c)-1 < d
10965	// result: (ConstBool [true])
10966	for {
10967		if v_0.Op != OpRsh8Ux64 {
10968			break
10969		}
10970		_ = v_0.Args[1]
10971		v_0_1 := v_0.Args[1]
10972		if v_0_1.Op != OpConst64 {
10973			break
10974		}
10975		c := auxIntToInt64(v_0_1.AuxInt)
10976		if v_1.Op != OpConst64 {
10977			break
10978		}
10979		d := auxIntToInt64(v_1.AuxInt)
10980		if !(0 < c && c < 8 && 1<<uint(8-c)-1 < d) {
10981			break
10982		}
10983		v.reset(OpConstBool)
10984		v.AuxInt = boolToAuxInt(true)
10985		return true
10986	}
10987	// match: (IsInBounds (ZeroExt16to64 (Rsh16Ux64 _ (Const64 [c]))) (Const64 [d]))
10988	// cond: 0 < c && c < 16 && 1<<uint(16-c)-1 < d
10989	// result: (ConstBool [true])
10990	for {
10991		if v_0.Op != OpZeroExt16to64 {
10992			break
10993		}
10994		v_0_0 := v_0.Args[0]
10995		if v_0_0.Op != OpRsh16Ux64 {
10996			break
10997		}
10998		_ = v_0_0.Args[1]
10999		v_0_0_1 := v_0_0.Args[1]
11000		if v_0_0_1.Op != OpConst64 {
11001			break
11002		}
11003		c := auxIntToInt64(v_0_0_1.AuxInt)
11004		if v_1.Op != OpConst64 {
11005			break
11006		}
11007		d := auxIntToInt64(v_1.AuxInt)
11008		if !(0 < c && c < 16 && 1<<uint(16-c)-1 < d) {
11009			break
11010		}
11011		v.reset(OpConstBool)
11012		v.AuxInt = boolToAuxInt(true)
11013		return true
11014	}
11015	// match: (IsInBounds (ZeroExt16to32 (Rsh16Ux64 _ (Const64 [c]))) (Const64 [d]))
11016	// cond: 0 < c && c < 16 && 1<<uint(16-c)-1 < d
11017	// result: (ConstBool [true])
11018	for {
11019		if v_0.Op != OpZeroExt16to32 {
11020			break
11021		}
11022		v_0_0 := v_0.Args[0]
11023		if v_0_0.Op != OpRsh16Ux64 {
11024			break
11025		}
11026		_ = v_0_0.Args[1]
11027		v_0_0_1 := v_0_0.Args[1]
11028		if v_0_0_1.Op != OpConst64 {
11029			break
11030		}
11031		c := auxIntToInt64(v_0_0_1.AuxInt)
11032		if v_1.Op != OpConst64 {
11033			break
11034		}
11035		d := auxIntToInt64(v_1.AuxInt)
11036		if !(0 < c && c < 16 && 1<<uint(16-c)-1 < d) {
11037			break
11038		}
11039		v.reset(OpConstBool)
11040		v.AuxInt = boolToAuxInt(true)
11041		return true
11042	}
11043	// match: (IsInBounds (Rsh16Ux64 _ (Const64 [c])) (Const64 [d]))
11044	// cond: 0 < c && c < 16 && 1<<uint(16-c)-1 < d
11045	// result: (ConstBool [true])
11046	for {
11047		if v_0.Op != OpRsh16Ux64 {
11048			break
11049		}
11050		_ = v_0.Args[1]
11051		v_0_1 := v_0.Args[1]
11052		if v_0_1.Op != OpConst64 {
11053			break
11054		}
11055		c := auxIntToInt64(v_0_1.AuxInt)
11056		if v_1.Op != OpConst64 {
11057			break
11058		}
11059		d := auxIntToInt64(v_1.AuxInt)
11060		if !(0 < c && c < 16 && 1<<uint(16-c)-1 < d) {
11061			break
11062		}
11063		v.reset(OpConstBool)
11064		v.AuxInt = boolToAuxInt(true)
11065		return true
11066	}
11067	// match: (IsInBounds (ZeroExt32to64 (Rsh32Ux64 _ (Const64 [c]))) (Const64 [d]))
11068	// cond: 0 < c && c < 32 && 1<<uint(32-c)-1 < d
11069	// result: (ConstBool [true])
11070	for {
11071		if v_0.Op != OpZeroExt32to64 {
11072			break
11073		}
11074		v_0_0 := v_0.Args[0]
11075		if v_0_0.Op != OpRsh32Ux64 {
11076			break
11077		}
11078		_ = v_0_0.Args[1]
11079		v_0_0_1 := v_0_0.Args[1]
11080		if v_0_0_1.Op != OpConst64 {
11081			break
11082		}
11083		c := auxIntToInt64(v_0_0_1.AuxInt)
11084		if v_1.Op != OpConst64 {
11085			break
11086		}
11087		d := auxIntToInt64(v_1.AuxInt)
11088		if !(0 < c && c < 32 && 1<<uint(32-c)-1 < d) {
11089			break
11090		}
11091		v.reset(OpConstBool)
11092		v.AuxInt = boolToAuxInt(true)
11093		return true
11094	}
11095	// match: (IsInBounds (Rsh32Ux64 _ (Const64 [c])) (Const64 [d]))
11096	// cond: 0 < c && c < 32 && 1<<uint(32-c)-1 < d
11097	// result: (ConstBool [true])
11098	for {
11099		if v_0.Op != OpRsh32Ux64 {
11100			break
11101		}
11102		_ = v_0.Args[1]
11103		v_0_1 := v_0.Args[1]
11104		if v_0_1.Op != OpConst64 {
11105			break
11106		}
11107		c := auxIntToInt64(v_0_1.AuxInt)
11108		if v_1.Op != OpConst64 {
11109			break
11110		}
11111		d := auxIntToInt64(v_1.AuxInt)
11112		if !(0 < c && c < 32 && 1<<uint(32-c)-1 < d) {
11113			break
11114		}
11115		v.reset(OpConstBool)
11116		v.AuxInt = boolToAuxInt(true)
11117		return true
11118	}
11119	// match: (IsInBounds (Rsh64Ux64 _ (Const64 [c])) (Const64 [d]))
11120	// cond: 0 < c && c < 64 && 1<<uint(64-c)-1 < d
11121	// result: (ConstBool [true])
11122	for {
11123		if v_0.Op != OpRsh64Ux64 {
11124			break
11125		}
11126		_ = v_0.Args[1]
11127		v_0_1 := v_0.Args[1]
11128		if v_0_1.Op != OpConst64 {
11129			break
11130		}
11131		c := auxIntToInt64(v_0_1.AuxInt)
11132		if v_1.Op != OpConst64 {
11133			break
11134		}
11135		d := auxIntToInt64(v_1.AuxInt)
11136		if !(0 < c && c < 64 && 1<<uint(64-c)-1 < d) {
11137			break
11138		}
11139		v.reset(OpConstBool)
11140		v.AuxInt = boolToAuxInt(true)
11141		return true
11142	}
11143	return false
11144}
11145func rewriteValuegeneric_OpIsNonNil(v *Value) bool {
11146	v_0 := v.Args[0]
11147	// match: (IsNonNil (ConstNil))
11148	// result: (ConstBool [false])
11149	for {
11150		if v_0.Op != OpConstNil {
11151			break
11152		}
11153		v.reset(OpConstBool)
11154		v.AuxInt = boolToAuxInt(false)
11155		return true
11156	}
11157	// match: (IsNonNil (Const32 [c]))
11158	// result: (ConstBool [c != 0])
11159	for {
11160		if v_0.Op != OpConst32 {
11161			break
11162		}
11163		c := auxIntToInt32(v_0.AuxInt)
11164		v.reset(OpConstBool)
11165		v.AuxInt = boolToAuxInt(c != 0)
11166		return true
11167	}
11168	// match: (IsNonNil (Const64 [c]))
11169	// result: (ConstBool [c != 0])
11170	for {
11171		if v_0.Op != OpConst64 {
11172			break
11173		}
11174		c := auxIntToInt64(v_0.AuxInt)
11175		v.reset(OpConstBool)
11176		v.AuxInt = boolToAuxInt(c != 0)
11177		return true
11178	}
11179	// match: (IsNonNil (Addr _) )
11180	// result: (ConstBool [true])
11181	for {
11182		if v_0.Op != OpAddr {
11183			break
11184		}
11185		v.reset(OpConstBool)
11186		v.AuxInt = boolToAuxInt(true)
11187		return true
11188	}
11189	// match: (IsNonNil (Convert (Addr _) _))
11190	// result: (ConstBool [true])
11191	for {
11192		if v_0.Op != OpConvert {
11193			break
11194		}
11195		v_0_0 := v_0.Args[0]
11196		if v_0_0.Op != OpAddr {
11197			break
11198		}
11199		v.reset(OpConstBool)
11200		v.AuxInt = boolToAuxInt(true)
11201		return true
11202	}
11203	// match: (IsNonNil (LocalAddr _ _))
11204	// result: (ConstBool [true])
11205	for {
11206		if v_0.Op != OpLocalAddr {
11207			break
11208		}
11209		v.reset(OpConstBool)
11210		v.AuxInt = boolToAuxInt(true)
11211		return true
11212	}
11213	return false
11214}
11215func rewriteValuegeneric_OpIsSliceInBounds(v *Value) bool {
11216	v_1 := v.Args[1]
11217	v_0 := v.Args[0]
11218	// match: (IsSliceInBounds x x)
11219	// result: (ConstBool [true])
11220	for {
11221		x := v_0
11222		if x != v_1 {
11223			break
11224		}
11225		v.reset(OpConstBool)
11226		v.AuxInt = boolToAuxInt(true)
11227		return true
11228	}
11229	// match: (IsSliceInBounds (And32 (Const32 [c]) _) (Const32 [d]))
11230	// cond: 0 <= c && c <= d
11231	// result: (ConstBool [true])
11232	for {
11233		if v_0.Op != OpAnd32 {
11234			break
11235		}
11236		v_0_0 := v_0.Args[0]
11237		v_0_1 := v_0.Args[1]
11238		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
11239			if v_0_0.Op != OpConst32 {
11240				continue
11241			}
11242			c := auxIntToInt32(v_0_0.AuxInt)
11243			if v_1.Op != OpConst32 {
11244				continue
11245			}
11246			d := auxIntToInt32(v_1.AuxInt)
11247			if !(0 <= c && c <= d) {
11248				continue
11249			}
11250			v.reset(OpConstBool)
11251			v.AuxInt = boolToAuxInt(true)
11252			return true
11253		}
11254		break
11255	}
11256	// match: (IsSliceInBounds (And64 (Const64 [c]) _) (Const64 [d]))
11257	// cond: 0 <= c && c <= d
11258	// result: (ConstBool [true])
11259	for {
11260		if v_0.Op != OpAnd64 {
11261			break
11262		}
11263		v_0_0 := v_0.Args[0]
11264		v_0_1 := v_0.Args[1]
11265		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
11266			if v_0_0.Op != OpConst64 {
11267				continue
11268			}
11269			c := auxIntToInt64(v_0_0.AuxInt)
11270			if v_1.Op != OpConst64 {
11271				continue
11272			}
11273			d := auxIntToInt64(v_1.AuxInt)
11274			if !(0 <= c && c <= d) {
11275				continue
11276			}
11277			v.reset(OpConstBool)
11278			v.AuxInt = boolToAuxInt(true)
11279			return true
11280		}
11281		break
11282	}
11283	// match: (IsSliceInBounds (Const32 [0]) _)
11284	// result: (ConstBool [true])
11285	for {
11286		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
11287			break
11288		}
11289		v.reset(OpConstBool)
11290		v.AuxInt = boolToAuxInt(true)
11291		return true
11292	}
11293	// match: (IsSliceInBounds (Const64 [0]) _)
11294	// result: (ConstBool [true])
11295	for {
11296		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
11297			break
11298		}
11299		v.reset(OpConstBool)
11300		v.AuxInt = boolToAuxInt(true)
11301		return true
11302	}
11303	// match: (IsSliceInBounds (Const32 [c]) (Const32 [d]))
11304	// result: (ConstBool [0 <= c && c <= d])
11305	for {
11306		if v_0.Op != OpConst32 {
11307			break
11308		}
11309		c := auxIntToInt32(v_0.AuxInt)
11310		if v_1.Op != OpConst32 {
11311			break
11312		}
11313		d := auxIntToInt32(v_1.AuxInt)
11314		v.reset(OpConstBool)
11315		v.AuxInt = boolToAuxInt(0 <= c && c <= d)
11316		return true
11317	}
11318	// match: (IsSliceInBounds (Const64 [c]) (Const64 [d]))
11319	// result: (ConstBool [0 <= c && c <= d])
11320	for {
11321		if v_0.Op != OpConst64 {
11322			break
11323		}
11324		c := auxIntToInt64(v_0.AuxInt)
11325		if v_1.Op != OpConst64 {
11326			break
11327		}
11328		d := auxIntToInt64(v_1.AuxInt)
11329		v.reset(OpConstBool)
11330		v.AuxInt = boolToAuxInt(0 <= c && c <= d)
11331		return true
11332	}
11333	// match: (IsSliceInBounds (SliceLen x) (SliceCap x))
11334	// result: (ConstBool [true])
11335	for {
11336		if v_0.Op != OpSliceLen {
11337			break
11338		}
11339		x := v_0.Args[0]
11340		if v_1.Op != OpSliceCap || x != v_1.Args[0] {
11341			break
11342		}
11343		v.reset(OpConstBool)
11344		v.AuxInt = boolToAuxInt(true)
11345		return true
11346	}
11347	return false
11348}
11349func rewriteValuegeneric_OpLeq16(v *Value) bool {
11350	v_1 := v.Args[1]
11351	v_0 := v.Args[0]
11352	b := v.Block
11353	// match: (Leq16 (Const16 [c]) (Const16 [d]))
11354	// result: (ConstBool [c <= d])
11355	for {
11356		if v_0.Op != OpConst16 {
11357			break
11358		}
11359		c := auxIntToInt16(v_0.AuxInt)
11360		if v_1.Op != OpConst16 {
11361			break
11362		}
11363		d := auxIntToInt16(v_1.AuxInt)
11364		v.reset(OpConstBool)
11365		v.AuxInt = boolToAuxInt(c <= d)
11366		return true
11367	}
11368	// match: (Leq16 (Const16 [0]) (And16 _ (Const16 [c])))
11369	// cond: c >= 0
11370	// result: (ConstBool [true])
11371	for {
11372		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 || v_1.Op != OpAnd16 {
11373			break
11374		}
11375		_ = v_1.Args[1]
11376		v_1_0 := v_1.Args[0]
11377		v_1_1 := v_1.Args[1]
11378		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
11379			if v_1_1.Op != OpConst16 {
11380				continue
11381			}
11382			c := auxIntToInt16(v_1_1.AuxInt)
11383			if !(c >= 0) {
11384				continue
11385			}
11386			v.reset(OpConstBool)
11387			v.AuxInt = boolToAuxInt(true)
11388			return true
11389		}
11390		break
11391	}
11392	// match: (Leq16 (Const16 [0]) (Rsh16Ux64 _ (Const64 [c])))
11393	// cond: c > 0
11394	// result: (ConstBool [true])
11395	for {
11396		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 || v_1.Op != OpRsh16Ux64 {
11397			break
11398		}
11399		_ = v_1.Args[1]
11400		v_1_1 := v_1.Args[1]
11401		if v_1_1.Op != OpConst64 {
11402			break
11403		}
11404		c := auxIntToInt64(v_1_1.AuxInt)
11405		if !(c > 0) {
11406			break
11407		}
11408		v.reset(OpConstBool)
11409		v.AuxInt = boolToAuxInt(true)
11410		return true
11411	}
11412	// match: (Leq16 x (Const16 <t> [-1]))
11413	// result: (Less16 x (Const16 <t> [0]))
11414	for {
11415		x := v_0
11416		if v_1.Op != OpConst16 {
11417			break
11418		}
11419		t := v_1.Type
11420		if auxIntToInt16(v_1.AuxInt) != -1 {
11421			break
11422		}
11423		v.reset(OpLess16)
11424		v0 := b.NewValue0(v.Pos, OpConst16, t)
11425		v0.AuxInt = int16ToAuxInt(0)
11426		v.AddArg2(x, v0)
11427		return true
11428	}
11429	// match: (Leq16 (Const16 <t> [1]) x)
11430	// result: (Less16 (Const16 <t> [0]) x)
11431	for {
11432		if v_0.Op != OpConst16 {
11433			break
11434		}
11435		t := v_0.Type
11436		if auxIntToInt16(v_0.AuxInt) != 1 {
11437			break
11438		}
11439		x := v_1
11440		v.reset(OpLess16)
11441		v0 := b.NewValue0(v.Pos, OpConst16, t)
11442		v0.AuxInt = int16ToAuxInt(0)
11443		v.AddArg2(v0, x)
11444		return true
11445	}
11446	// match: (Leq16 (Const16 [math.MinInt16]) _)
11447	// result: (ConstBool [true])
11448	for {
11449		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != math.MinInt16 {
11450			break
11451		}
11452		v.reset(OpConstBool)
11453		v.AuxInt = boolToAuxInt(true)
11454		return true
11455	}
11456	// match: (Leq16 _ (Const16 [math.MaxInt16]))
11457	// result: (ConstBool [true])
11458	for {
11459		if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != math.MaxInt16 {
11460			break
11461		}
11462		v.reset(OpConstBool)
11463		v.AuxInt = boolToAuxInt(true)
11464		return true
11465	}
11466	// match: (Leq16 x c:(Const16 [math.MinInt16]))
11467	// result: (Eq16 x c)
11468	for {
11469		x := v_0
11470		c := v_1
11471		if c.Op != OpConst16 || auxIntToInt16(c.AuxInt) != math.MinInt16 {
11472			break
11473		}
11474		v.reset(OpEq16)
11475		v.AddArg2(x, c)
11476		return true
11477	}
11478	// match: (Leq16 c:(Const16 [math.MaxInt16]) x)
11479	// result: (Eq16 x c)
11480	for {
11481		c := v_0
11482		if c.Op != OpConst16 || auxIntToInt16(c.AuxInt) != math.MaxInt16 {
11483			break
11484		}
11485		x := v_1
11486		v.reset(OpEq16)
11487		v.AddArg2(x, c)
11488		return true
11489	}
11490	return false
11491}
11492func rewriteValuegeneric_OpLeq16U(v *Value) bool {
11493	v_1 := v.Args[1]
11494	v_0 := v.Args[0]
11495	b := v.Block
11496	// match: (Leq16U (Const16 [c]) (Const16 [d]))
11497	// result: (ConstBool [uint16(c) <= uint16(d)])
11498	for {
11499		if v_0.Op != OpConst16 {
11500			break
11501		}
11502		c := auxIntToInt16(v_0.AuxInt)
11503		if v_1.Op != OpConst16 {
11504			break
11505		}
11506		d := auxIntToInt16(v_1.AuxInt)
11507		v.reset(OpConstBool)
11508		v.AuxInt = boolToAuxInt(uint16(c) <= uint16(d))
11509		return true
11510	}
11511	// match: (Leq16U (Const16 <t> [1]) x)
11512	// result: (Neq16 (Const16 <t> [0]) x)
11513	for {
11514		if v_0.Op != OpConst16 {
11515			break
11516		}
11517		t := v_0.Type
11518		if auxIntToInt16(v_0.AuxInt) != 1 {
11519			break
11520		}
11521		x := v_1
11522		v.reset(OpNeq16)
11523		v0 := b.NewValue0(v.Pos, OpConst16, t)
11524		v0.AuxInt = int16ToAuxInt(0)
11525		v.AddArg2(v0, x)
11526		return true
11527	}
11528	// match: (Leq16U (Const16 [0]) _)
11529	// result: (ConstBool [true])
11530	for {
11531		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
11532			break
11533		}
11534		v.reset(OpConstBool)
11535		v.AuxInt = boolToAuxInt(true)
11536		return true
11537	}
11538	// match: (Leq16U _ (Const16 [-1]))
11539	// result: (ConstBool [true])
11540	for {
11541		if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != -1 {
11542			break
11543		}
11544		v.reset(OpConstBool)
11545		v.AuxInt = boolToAuxInt(true)
11546		return true
11547	}
11548	// match: (Leq16U x c:(Const16 [0]))
11549	// result: (Eq16 x c)
11550	for {
11551		x := v_0
11552		c := v_1
11553		if c.Op != OpConst16 || auxIntToInt16(c.AuxInt) != 0 {
11554			break
11555		}
11556		v.reset(OpEq16)
11557		v.AddArg2(x, c)
11558		return true
11559	}
11560	// match: (Leq16U c:(Const16 [-1]) x)
11561	// result: (Eq16 x c)
11562	for {
11563		c := v_0
11564		if c.Op != OpConst16 || auxIntToInt16(c.AuxInt) != -1 {
11565			break
11566		}
11567		x := v_1
11568		v.reset(OpEq16)
11569		v.AddArg2(x, c)
11570		return true
11571	}
11572	return false
11573}
11574func rewriteValuegeneric_OpLeq32(v *Value) bool {
11575	v_1 := v.Args[1]
11576	v_0 := v.Args[0]
11577	b := v.Block
11578	// match: (Leq32 (Const32 [c]) (Const32 [d]))
11579	// result: (ConstBool [c <= d])
11580	for {
11581		if v_0.Op != OpConst32 {
11582			break
11583		}
11584		c := auxIntToInt32(v_0.AuxInt)
11585		if v_1.Op != OpConst32 {
11586			break
11587		}
11588		d := auxIntToInt32(v_1.AuxInt)
11589		v.reset(OpConstBool)
11590		v.AuxInt = boolToAuxInt(c <= d)
11591		return true
11592	}
11593	// match: (Leq32 (Const32 [0]) (And32 _ (Const32 [c])))
11594	// cond: c >= 0
11595	// result: (ConstBool [true])
11596	for {
11597		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 || v_1.Op != OpAnd32 {
11598			break
11599		}
11600		_ = v_1.Args[1]
11601		v_1_0 := v_1.Args[0]
11602		v_1_1 := v_1.Args[1]
11603		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
11604			if v_1_1.Op != OpConst32 {
11605				continue
11606			}
11607			c := auxIntToInt32(v_1_1.AuxInt)
11608			if !(c >= 0) {
11609				continue
11610			}
11611			v.reset(OpConstBool)
11612			v.AuxInt = boolToAuxInt(true)
11613			return true
11614		}
11615		break
11616	}
11617	// match: (Leq32 (Const32 [0]) (Rsh32Ux64 _ (Const64 [c])))
11618	// cond: c > 0
11619	// result: (ConstBool [true])
11620	for {
11621		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 || v_1.Op != OpRsh32Ux64 {
11622			break
11623		}
11624		_ = v_1.Args[1]
11625		v_1_1 := v_1.Args[1]
11626		if v_1_1.Op != OpConst64 {
11627			break
11628		}
11629		c := auxIntToInt64(v_1_1.AuxInt)
11630		if !(c > 0) {
11631			break
11632		}
11633		v.reset(OpConstBool)
11634		v.AuxInt = boolToAuxInt(true)
11635		return true
11636	}
11637	// match: (Leq32 x (Const32 <t> [-1]))
11638	// result: (Less32 x (Const32 <t> [0]))
11639	for {
11640		x := v_0
11641		if v_1.Op != OpConst32 {
11642			break
11643		}
11644		t := v_1.Type
11645		if auxIntToInt32(v_1.AuxInt) != -1 {
11646			break
11647		}
11648		v.reset(OpLess32)
11649		v0 := b.NewValue0(v.Pos, OpConst32, t)
11650		v0.AuxInt = int32ToAuxInt(0)
11651		v.AddArg2(x, v0)
11652		return true
11653	}
11654	// match: (Leq32 (Const32 <t> [1]) x)
11655	// result: (Less32 (Const32 <t> [0]) x)
11656	for {
11657		if v_0.Op != OpConst32 {
11658			break
11659		}
11660		t := v_0.Type
11661		if auxIntToInt32(v_0.AuxInt) != 1 {
11662			break
11663		}
11664		x := v_1
11665		v.reset(OpLess32)
11666		v0 := b.NewValue0(v.Pos, OpConst32, t)
11667		v0.AuxInt = int32ToAuxInt(0)
11668		v.AddArg2(v0, x)
11669		return true
11670	}
11671	// match: (Leq32 (Const32 [math.MinInt32]) _)
11672	// result: (ConstBool [true])
11673	for {
11674		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != math.MinInt32 {
11675			break
11676		}
11677		v.reset(OpConstBool)
11678		v.AuxInt = boolToAuxInt(true)
11679		return true
11680	}
11681	// match: (Leq32 _ (Const32 [math.MaxInt32]))
11682	// result: (ConstBool [true])
11683	for {
11684		if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != math.MaxInt32 {
11685			break
11686		}
11687		v.reset(OpConstBool)
11688		v.AuxInt = boolToAuxInt(true)
11689		return true
11690	}
11691	// match: (Leq32 x c:(Const32 [math.MinInt32]))
11692	// result: (Eq32 x c)
11693	for {
11694		x := v_0
11695		c := v_1
11696		if c.Op != OpConst32 || auxIntToInt32(c.AuxInt) != math.MinInt32 {
11697			break
11698		}
11699		v.reset(OpEq32)
11700		v.AddArg2(x, c)
11701		return true
11702	}
11703	// match: (Leq32 c:(Const32 [math.MaxInt32]) x)
11704	// result: (Eq32 x c)
11705	for {
11706		c := v_0
11707		if c.Op != OpConst32 || auxIntToInt32(c.AuxInt) != math.MaxInt32 {
11708			break
11709		}
11710		x := v_1
11711		v.reset(OpEq32)
11712		v.AddArg2(x, c)
11713		return true
11714	}
11715	return false
11716}
11717func rewriteValuegeneric_OpLeq32F(v *Value) bool {
11718	v_1 := v.Args[1]
11719	v_0 := v.Args[0]
11720	// match: (Leq32F (Const32F [c]) (Const32F [d]))
11721	// result: (ConstBool [c <= d])
11722	for {
11723		if v_0.Op != OpConst32F {
11724			break
11725		}
11726		c := auxIntToFloat32(v_0.AuxInt)
11727		if v_1.Op != OpConst32F {
11728			break
11729		}
11730		d := auxIntToFloat32(v_1.AuxInt)
11731		v.reset(OpConstBool)
11732		v.AuxInt = boolToAuxInt(c <= d)
11733		return true
11734	}
11735	return false
11736}
11737func rewriteValuegeneric_OpLeq32U(v *Value) bool {
11738	v_1 := v.Args[1]
11739	v_0 := v.Args[0]
11740	b := v.Block
11741	// match: (Leq32U (Const32 [c]) (Const32 [d]))
11742	// result: (ConstBool [uint32(c) <= uint32(d)])
11743	for {
11744		if v_0.Op != OpConst32 {
11745			break
11746		}
11747		c := auxIntToInt32(v_0.AuxInt)
11748		if v_1.Op != OpConst32 {
11749			break
11750		}
11751		d := auxIntToInt32(v_1.AuxInt)
11752		v.reset(OpConstBool)
11753		v.AuxInt = boolToAuxInt(uint32(c) <= uint32(d))
11754		return true
11755	}
11756	// match: (Leq32U (Const32 <t> [1]) x)
11757	// result: (Neq32 (Const32 <t> [0]) x)
11758	for {
11759		if v_0.Op != OpConst32 {
11760			break
11761		}
11762		t := v_0.Type
11763		if auxIntToInt32(v_0.AuxInt) != 1 {
11764			break
11765		}
11766		x := v_1
11767		v.reset(OpNeq32)
11768		v0 := b.NewValue0(v.Pos, OpConst32, t)
11769		v0.AuxInt = int32ToAuxInt(0)
11770		v.AddArg2(v0, x)
11771		return true
11772	}
11773	// match: (Leq32U (Const32 [0]) _)
11774	// result: (ConstBool [true])
11775	for {
11776		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
11777			break
11778		}
11779		v.reset(OpConstBool)
11780		v.AuxInt = boolToAuxInt(true)
11781		return true
11782	}
11783	// match: (Leq32U _ (Const32 [-1]))
11784	// result: (ConstBool [true])
11785	for {
11786		if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != -1 {
11787			break
11788		}
11789		v.reset(OpConstBool)
11790		v.AuxInt = boolToAuxInt(true)
11791		return true
11792	}
11793	// match: (Leq32U x c:(Const32 [0]))
11794	// result: (Eq32 x c)
11795	for {
11796		x := v_0
11797		c := v_1
11798		if c.Op != OpConst32 || auxIntToInt32(c.AuxInt) != 0 {
11799			break
11800		}
11801		v.reset(OpEq32)
11802		v.AddArg2(x, c)
11803		return true
11804	}
11805	// match: (Leq32U c:(Const32 [-1]) x)
11806	// result: (Eq32 x c)
11807	for {
11808		c := v_0
11809		if c.Op != OpConst32 || auxIntToInt32(c.AuxInt) != -1 {
11810			break
11811		}
11812		x := v_1
11813		v.reset(OpEq32)
11814		v.AddArg2(x, c)
11815		return true
11816	}
11817	return false
11818}
11819func rewriteValuegeneric_OpLeq64(v *Value) bool {
11820	v_1 := v.Args[1]
11821	v_0 := v.Args[0]
11822	b := v.Block
11823	// match: (Leq64 (Const64 [c]) (Const64 [d]))
11824	// result: (ConstBool [c <= d])
11825	for {
11826		if v_0.Op != OpConst64 {
11827			break
11828		}
11829		c := auxIntToInt64(v_0.AuxInt)
11830		if v_1.Op != OpConst64 {
11831			break
11832		}
11833		d := auxIntToInt64(v_1.AuxInt)
11834		v.reset(OpConstBool)
11835		v.AuxInt = boolToAuxInt(c <= d)
11836		return true
11837	}
11838	// match: (Leq64 (Const64 [0]) (And64 _ (Const64 [c])))
11839	// cond: c >= 0
11840	// result: (ConstBool [true])
11841	for {
11842		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 || v_1.Op != OpAnd64 {
11843			break
11844		}
11845		_ = v_1.Args[1]
11846		v_1_0 := v_1.Args[0]
11847		v_1_1 := v_1.Args[1]
11848		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
11849			if v_1_1.Op != OpConst64 {
11850				continue
11851			}
11852			c := auxIntToInt64(v_1_1.AuxInt)
11853			if !(c >= 0) {
11854				continue
11855			}
11856			v.reset(OpConstBool)
11857			v.AuxInt = boolToAuxInt(true)
11858			return true
11859		}
11860		break
11861	}
11862	// match: (Leq64 (Const64 [0]) (Rsh64Ux64 _ (Const64 [c])))
11863	// cond: c > 0
11864	// result: (ConstBool [true])
11865	for {
11866		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 || v_1.Op != OpRsh64Ux64 {
11867			break
11868		}
11869		_ = v_1.Args[1]
11870		v_1_1 := v_1.Args[1]
11871		if v_1_1.Op != OpConst64 {
11872			break
11873		}
11874		c := auxIntToInt64(v_1_1.AuxInt)
11875		if !(c > 0) {
11876			break
11877		}
11878		v.reset(OpConstBool)
11879		v.AuxInt = boolToAuxInt(true)
11880		return true
11881	}
11882	// match: (Leq64 x (Const64 <t> [-1]))
11883	// result: (Less64 x (Const64 <t> [0]))
11884	for {
11885		x := v_0
11886		if v_1.Op != OpConst64 {
11887			break
11888		}
11889		t := v_1.Type
11890		if auxIntToInt64(v_1.AuxInt) != -1 {
11891			break
11892		}
11893		v.reset(OpLess64)
11894		v0 := b.NewValue0(v.Pos, OpConst64, t)
11895		v0.AuxInt = int64ToAuxInt(0)
11896		v.AddArg2(x, v0)
11897		return true
11898	}
11899	// match: (Leq64 (Const64 <t> [1]) x)
11900	// result: (Less64 (Const64 <t> [0]) x)
11901	for {
11902		if v_0.Op != OpConst64 {
11903			break
11904		}
11905		t := v_0.Type
11906		if auxIntToInt64(v_0.AuxInt) != 1 {
11907			break
11908		}
11909		x := v_1
11910		v.reset(OpLess64)
11911		v0 := b.NewValue0(v.Pos, OpConst64, t)
11912		v0.AuxInt = int64ToAuxInt(0)
11913		v.AddArg2(v0, x)
11914		return true
11915	}
11916	// match: (Leq64 (Const64 [math.MinInt64]) _)
11917	// result: (ConstBool [true])
11918	for {
11919		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != math.MinInt64 {
11920			break
11921		}
11922		v.reset(OpConstBool)
11923		v.AuxInt = boolToAuxInt(true)
11924		return true
11925	}
11926	// match: (Leq64 _ (Const64 [math.MaxInt64]))
11927	// result: (ConstBool [true])
11928	for {
11929		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != math.MaxInt64 {
11930			break
11931		}
11932		v.reset(OpConstBool)
11933		v.AuxInt = boolToAuxInt(true)
11934		return true
11935	}
11936	// match: (Leq64 x c:(Const64 [math.MinInt64]))
11937	// result: (Eq64 x c)
11938	for {
11939		x := v_0
11940		c := v_1
11941		if c.Op != OpConst64 || auxIntToInt64(c.AuxInt) != math.MinInt64 {
11942			break
11943		}
11944		v.reset(OpEq64)
11945		v.AddArg2(x, c)
11946		return true
11947	}
11948	// match: (Leq64 c:(Const64 [math.MaxInt64]) x)
11949	// result: (Eq64 x c)
11950	for {
11951		c := v_0
11952		if c.Op != OpConst64 || auxIntToInt64(c.AuxInt) != math.MaxInt64 {
11953			break
11954		}
11955		x := v_1
11956		v.reset(OpEq64)
11957		v.AddArg2(x, c)
11958		return true
11959	}
11960	return false
11961}
11962func rewriteValuegeneric_OpLeq64F(v *Value) bool {
11963	v_1 := v.Args[1]
11964	v_0 := v.Args[0]
11965	// match: (Leq64F (Const64F [c]) (Const64F [d]))
11966	// result: (ConstBool [c <= d])
11967	for {
11968		if v_0.Op != OpConst64F {
11969			break
11970		}
11971		c := auxIntToFloat64(v_0.AuxInt)
11972		if v_1.Op != OpConst64F {
11973			break
11974		}
11975		d := auxIntToFloat64(v_1.AuxInt)
11976		v.reset(OpConstBool)
11977		v.AuxInt = boolToAuxInt(c <= d)
11978		return true
11979	}
11980	return false
11981}
11982func rewriteValuegeneric_OpLeq64U(v *Value) bool {
11983	v_1 := v.Args[1]
11984	v_0 := v.Args[0]
11985	b := v.Block
11986	// match: (Leq64U (Const64 [c]) (Const64 [d]))
11987	// result: (ConstBool [uint64(c) <= uint64(d)])
11988	for {
11989		if v_0.Op != OpConst64 {
11990			break
11991		}
11992		c := auxIntToInt64(v_0.AuxInt)
11993		if v_1.Op != OpConst64 {
11994			break
11995		}
11996		d := auxIntToInt64(v_1.AuxInt)
11997		v.reset(OpConstBool)
11998		v.AuxInt = boolToAuxInt(uint64(c) <= uint64(d))
11999		return true
12000	}
12001	// match: (Leq64U (Const64 <t> [1]) x)
12002	// result: (Neq64 (Const64 <t> [0]) x)
12003	for {
12004		if v_0.Op != OpConst64 {
12005			break
12006		}
12007		t := v_0.Type
12008		if auxIntToInt64(v_0.AuxInt) != 1 {
12009			break
12010		}
12011		x := v_1
12012		v.reset(OpNeq64)
12013		v0 := b.NewValue0(v.Pos, OpConst64, t)
12014		v0.AuxInt = int64ToAuxInt(0)
12015		v.AddArg2(v0, x)
12016		return true
12017	}
12018	// match: (Leq64U (Const64 [0]) _)
12019	// result: (ConstBool [true])
12020	for {
12021		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
12022			break
12023		}
12024		v.reset(OpConstBool)
12025		v.AuxInt = boolToAuxInt(true)
12026		return true
12027	}
12028	// match: (Leq64U _ (Const64 [-1]))
12029	// result: (ConstBool [true])
12030	for {
12031		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1 {
12032			break
12033		}
12034		v.reset(OpConstBool)
12035		v.AuxInt = boolToAuxInt(true)
12036		return true
12037	}
12038	// match: (Leq64U x c:(Const64 [0]))
12039	// result: (Eq64 x c)
12040	for {
12041		x := v_0
12042		c := v_1
12043		if c.Op != OpConst64 || auxIntToInt64(c.AuxInt) != 0 {
12044			break
12045		}
12046		v.reset(OpEq64)
12047		v.AddArg2(x, c)
12048		return true
12049	}
12050	// match: (Leq64U c:(Const64 [-1]) x)
12051	// result: (Eq64 x c)
12052	for {
12053		c := v_0
12054		if c.Op != OpConst64 || auxIntToInt64(c.AuxInt) != -1 {
12055			break
12056		}
12057		x := v_1
12058		v.reset(OpEq64)
12059		v.AddArg2(x, c)
12060		return true
12061	}
12062	return false
12063}
12064func rewriteValuegeneric_OpLeq8(v *Value) bool {
12065	v_1 := v.Args[1]
12066	v_0 := v.Args[0]
12067	b := v.Block
12068	// match: (Leq8 (Const8 [c]) (Const8 [d]))
12069	// result: (ConstBool [c <= d])
12070	for {
12071		if v_0.Op != OpConst8 {
12072			break
12073		}
12074		c := auxIntToInt8(v_0.AuxInt)
12075		if v_1.Op != OpConst8 {
12076			break
12077		}
12078		d := auxIntToInt8(v_1.AuxInt)
12079		v.reset(OpConstBool)
12080		v.AuxInt = boolToAuxInt(c <= d)
12081		return true
12082	}
12083	// match: (Leq8 (Const8 [0]) (And8 _ (Const8 [c])))
12084	// cond: c >= 0
12085	// result: (ConstBool [true])
12086	for {
12087		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 || v_1.Op != OpAnd8 {
12088			break
12089		}
12090		_ = v_1.Args[1]
12091		v_1_0 := v_1.Args[0]
12092		v_1_1 := v_1.Args[1]
12093		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
12094			if v_1_1.Op != OpConst8 {
12095				continue
12096			}
12097			c := auxIntToInt8(v_1_1.AuxInt)
12098			if !(c >= 0) {
12099				continue
12100			}
12101			v.reset(OpConstBool)
12102			v.AuxInt = boolToAuxInt(true)
12103			return true
12104		}
12105		break
12106	}
12107	// match: (Leq8 (Const8 [0]) (Rsh8Ux64 _ (Const64 [c])))
12108	// cond: c > 0
12109	// result: (ConstBool [true])
12110	for {
12111		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 || v_1.Op != OpRsh8Ux64 {
12112			break
12113		}
12114		_ = v_1.Args[1]
12115		v_1_1 := v_1.Args[1]
12116		if v_1_1.Op != OpConst64 {
12117			break
12118		}
12119		c := auxIntToInt64(v_1_1.AuxInt)
12120		if !(c > 0) {
12121			break
12122		}
12123		v.reset(OpConstBool)
12124		v.AuxInt = boolToAuxInt(true)
12125		return true
12126	}
12127	// match: (Leq8 x (Const8 <t> [-1]))
12128	// result: (Less8 x (Const8 <t> [0]))
12129	for {
12130		x := v_0
12131		if v_1.Op != OpConst8 {
12132			break
12133		}
12134		t := v_1.Type
12135		if auxIntToInt8(v_1.AuxInt) != -1 {
12136			break
12137		}
12138		v.reset(OpLess8)
12139		v0 := b.NewValue0(v.Pos, OpConst8, t)
12140		v0.AuxInt = int8ToAuxInt(0)
12141		v.AddArg2(x, v0)
12142		return true
12143	}
12144	// match: (Leq8 (Const8 <t> [1]) x)
12145	// result: (Less8 (Const8 <t> [0]) x)
12146	for {
12147		if v_0.Op != OpConst8 {
12148			break
12149		}
12150		t := v_0.Type
12151		if auxIntToInt8(v_0.AuxInt) != 1 {
12152			break
12153		}
12154		x := v_1
12155		v.reset(OpLess8)
12156		v0 := b.NewValue0(v.Pos, OpConst8, t)
12157		v0.AuxInt = int8ToAuxInt(0)
12158		v.AddArg2(v0, x)
12159		return true
12160	}
12161	// match: (Leq8 (Const8 [math.MinInt8 ]) _)
12162	// result: (ConstBool [true])
12163	for {
12164		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != math.MinInt8 {
12165			break
12166		}
12167		v.reset(OpConstBool)
12168		v.AuxInt = boolToAuxInt(true)
12169		return true
12170	}
12171	// match: (Leq8 _ (Const8 [math.MaxInt8 ]))
12172	// result: (ConstBool [true])
12173	for {
12174		if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != math.MaxInt8 {
12175			break
12176		}
12177		v.reset(OpConstBool)
12178		v.AuxInt = boolToAuxInt(true)
12179		return true
12180	}
12181	// match: (Leq8 x c:(Const8 [math.MinInt8 ]))
12182	// result: (Eq8 x c)
12183	for {
12184		x := v_0
12185		c := v_1
12186		if c.Op != OpConst8 || auxIntToInt8(c.AuxInt) != math.MinInt8 {
12187			break
12188		}
12189		v.reset(OpEq8)
12190		v.AddArg2(x, c)
12191		return true
12192	}
12193	// match: (Leq8 c:(Const8 [math.MaxInt8 ]) x)
12194	// result: (Eq8 x c)
12195	for {
12196		c := v_0
12197		if c.Op != OpConst8 || auxIntToInt8(c.AuxInt) != math.MaxInt8 {
12198			break
12199		}
12200		x := v_1
12201		v.reset(OpEq8)
12202		v.AddArg2(x, c)
12203		return true
12204	}
12205	return false
12206}
12207func rewriteValuegeneric_OpLeq8U(v *Value) bool {
12208	v_1 := v.Args[1]
12209	v_0 := v.Args[0]
12210	b := v.Block
12211	// match: (Leq8U (Const8 [c]) (Const8 [d]))
12212	// result: (ConstBool [ uint8(c) <= uint8(d)])
12213	for {
12214		if v_0.Op != OpConst8 {
12215			break
12216		}
12217		c := auxIntToInt8(v_0.AuxInt)
12218		if v_1.Op != OpConst8 {
12219			break
12220		}
12221		d := auxIntToInt8(v_1.AuxInt)
12222		v.reset(OpConstBool)
12223		v.AuxInt = boolToAuxInt(uint8(c) <= uint8(d))
12224		return true
12225	}
12226	// match: (Leq8U (Const8 <t> [1]) x)
12227	// result: (Neq8 (Const8 <t> [0]) x)
12228	for {
12229		if v_0.Op != OpConst8 {
12230			break
12231		}
12232		t := v_0.Type
12233		if auxIntToInt8(v_0.AuxInt) != 1 {
12234			break
12235		}
12236		x := v_1
12237		v.reset(OpNeq8)
12238		v0 := b.NewValue0(v.Pos, OpConst8, t)
12239		v0.AuxInt = int8ToAuxInt(0)
12240		v.AddArg2(v0, x)
12241		return true
12242	}
12243	// match: (Leq8U (Const8 [0]) _)
12244	// result: (ConstBool [true])
12245	for {
12246		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
12247			break
12248		}
12249		v.reset(OpConstBool)
12250		v.AuxInt = boolToAuxInt(true)
12251		return true
12252	}
12253	// match: (Leq8U _ (Const8 [-1]))
12254	// result: (ConstBool [true])
12255	for {
12256		if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != -1 {
12257			break
12258		}
12259		v.reset(OpConstBool)
12260		v.AuxInt = boolToAuxInt(true)
12261		return true
12262	}
12263	// match: (Leq8U x c:(Const8 [0]))
12264	// result: (Eq8 x c)
12265	for {
12266		x := v_0
12267		c := v_1
12268		if c.Op != OpConst8 || auxIntToInt8(c.AuxInt) != 0 {
12269			break
12270		}
12271		v.reset(OpEq8)
12272		v.AddArg2(x, c)
12273		return true
12274	}
12275	// match: (Leq8U c:(Const8 [-1]) x)
12276	// result: (Eq8 x c)
12277	for {
12278		c := v_0
12279		if c.Op != OpConst8 || auxIntToInt8(c.AuxInt) != -1 {
12280			break
12281		}
12282		x := v_1
12283		v.reset(OpEq8)
12284		v.AddArg2(x, c)
12285		return true
12286	}
12287	return false
12288}
12289func rewriteValuegeneric_OpLess16(v *Value) bool {
12290	v_1 := v.Args[1]
12291	v_0 := v.Args[0]
12292	b := v.Block
12293	// match: (Less16 (Const16 [c]) (Const16 [d]))
12294	// result: (ConstBool [c < d])
12295	for {
12296		if v_0.Op != OpConst16 {
12297			break
12298		}
12299		c := auxIntToInt16(v_0.AuxInt)
12300		if v_1.Op != OpConst16 {
12301			break
12302		}
12303		d := auxIntToInt16(v_1.AuxInt)
12304		v.reset(OpConstBool)
12305		v.AuxInt = boolToAuxInt(c < d)
12306		return true
12307	}
12308	// match: (Less16 (Const16 <t> [0]) x)
12309	// cond: isNonNegative(x)
12310	// result: (Neq16 (Const16 <t> [0]) x)
12311	for {
12312		if v_0.Op != OpConst16 {
12313			break
12314		}
12315		t := v_0.Type
12316		if auxIntToInt16(v_0.AuxInt) != 0 {
12317			break
12318		}
12319		x := v_1
12320		if !(isNonNegative(x)) {
12321			break
12322		}
12323		v.reset(OpNeq16)
12324		v0 := b.NewValue0(v.Pos, OpConst16, t)
12325		v0.AuxInt = int16ToAuxInt(0)
12326		v.AddArg2(v0, x)
12327		return true
12328	}
12329	// match: (Less16 x (Const16 <t> [1]))
12330	// cond: isNonNegative(x)
12331	// result: (Eq16 (Const16 <t> [0]) x)
12332	for {
12333		x := v_0
12334		if v_1.Op != OpConst16 {
12335			break
12336		}
12337		t := v_1.Type
12338		if auxIntToInt16(v_1.AuxInt) != 1 || !(isNonNegative(x)) {
12339			break
12340		}
12341		v.reset(OpEq16)
12342		v0 := b.NewValue0(v.Pos, OpConst16, t)
12343		v0.AuxInt = int16ToAuxInt(0)
12344		v.AddArg2(v0, x)
12345		return true
12346	}
12347	// match: (Less16 x (Const16 <t> [1]))
12348	// result: (Leq16 x (Const16 <t> [0]))
12349	for {
12350		x := v_0
12351		if v_1.Op != OpConst16 {
12352			break
12353		}
12354		t := v_1.Type
12355		if auxIntToInt16(v_1.AuxInt) != 1 {
12356			break
12357		}
12358		v.reset(OpLeq16)
12359		v0 := b.NewValue0(v.Pos, OpConst16, t)
12360		v0.AuxInt = int16ToAuxInt(0)
12361		v.AddArg2(x, v0)
12362		return true
12363	}
12364	// match: (Less16 (Const16 <t> [-1]) x)
12365	// result: (Leq16 (Const16 <t> [0]) x)
12366	for {
12367		if v_0.Op != OpConst16 {
12368			break
12369		}
12370		t := v_0.Type
12371		if auxIntToInt16(v_0.AuxInt) != -1 {
12372			break
12373		}
12374		x := v_1
12375		v.reset(OpLeq16)
12376		v0 := b.NewValue0(v.Pos, OpConst16, t)
12377		v0.AuxInt = int16ToAuxInt(0)
12378		v.AddArg2(v0, x)
12379		return true
12380	}
12381	// match: (Less16 _ (Const16 [math.MinInt16]))
12382	// result: (ConstBool [false])
12383	for {
12384		if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != math.MinInt16 {
12385			break
12386		}
12387		v.reset(OpConstBool)
12388		v.AuxInt = boolToAuxInt(false)
12389		return true
12390	}
12391	// match: (Less16 (Const16 [math.MaxInt16]) _)
12392	// result: (ConstBool [false])
12393	for {
12394		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != math.MaxInt16 {
12395			break
12396		}
12397		v.reset(OpConstBool)
12398		v.AuxInt = boolToAuxInt(false)
12399		return true
12400	}
12401	// match: (Less16 x (Const16 <t> [math.MinInt16+1]))
12402	// result: (Eq16 x (Const16 <t> [math.MinInt16]))
12403	for {
12404		x := v_0
12405		if v_1.Op != OpConst16 {
12406			break
12407		}
12408		t := v_1.Type
12409		if auxIntToInt16(v_1.AuxInt) != math.MinInt16+1 {
12410			break
12411		}
12412		v.reset(OpEq16)
12413		v0 := b.NewValue0(v.Pos, OpConst16, t)
12414		v0.AuxInt = int16ToAuxInt(math.MinInt16)
12415		v.AddArg2(x, v0)
12416		return true
12417	}
12418	// match: (Less16 (Const16 <t> [math.MaxInt16-1]) x)
12419	// result: (Eq16 x (Const16 <t> [math.MaxInt16]))
12420	for {
12421		if v_0.Op != OpConst16 {
12422			break
12423		}
12424		t := v_0.Type
12425		if auxIntToInt16(v_0.AuxInt) != math.MaxInt16-1 {
12426			break
12427		}
12428		x := v_1
12429		v.reset(OpEq16)
12430		v0 := b.NewValue0(v.Pos, OpConst16, t)
12431		v0.AuxInt = int16ToAuxInt(math.MaxInt16)
12432		v.AddArg2(x, v0)
12433		return true
12434	}
12435	return false
12436}
12437func rewriteValuegeneric_OpLess16U(v *Value) bool {
12438	v_1 := v.Args[1]
12439	v_0 := v.Args[0]
12440	b := v.Block
12441	// match: (Less16U (Const16 [c]) (Const16 [d]))
12442	// result: (ConstBool [uint16(c) < uint16(d)])
12443	for {
12444		if v_0.Op != OpConst16 {
12445			break
12446		}
12447		c := auxIntToInt16(v_0.AuxInt)
12448		if v_1.Op != OpConst16 {
12449			break
12450		}
12451		d := auxIntToInt16(v_1.AuxInt)
12452		v.reset(OpConstBool)
12453		v.AuxInt = boolToAuxInt(uint16(c) < uint16(d))
12454		return true
12455	}
12456	// match: (Less16U x (Const16 <t> [1]))
12457	// result: (Eq16 (Const16 <t> [0]) x)
12458	for {
12459		x := v_0
12460		if v_1.Op != OpConst16 {
12461			break
12462		}
12463		t := v_1.Type
12464		if auxIntToInt16(v_1.AuxInt) != 1 {
12465			break
12466		}
12467		v.reset(OpEq16)
12468		v0 := b.NewValue0(v.Pos, OpConst16, t)
12469		v0.AuxInt = int16ToAuxInt(0)
12470		v.AddArg2(v0, x)
12471		return true
12472	}
12473	// match: (Less16U _ (Const16 [0]))
12474	// result: (ConstBool [false])
12475	for {
12476		if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 {
12477			break
12478		}
12479		v.reset(OpConstBool)
12480		v.AuxInt = boolToAuxInt(false)
12481		return true
12482	}
12483	// match: (Less16U (Const16 [-1]) _)
12484	// result: (ConstBool [false])
12485	for {
12486		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
12487			break
12488		}
12489		v.reset(OpConstBool)
12490		v.AuxInt = boolToAuxInt(false)
12491		return true
12492	}
12493	// match: (Less16U x (Const16 <t> [1]))
12494	// result: (Eq16 x (Const16 <t> [0]))
12495	for {
12496		x := v_0
12497		if v_1.Op != OpConst16 {
12498			break
12499		}
12500		t := v_1.Type
12501		if auxIntToInt16(v_1.AuxInt) != 1 {
12502			break
12503		}
12504		v.reset(OpEq16)
12505		v0 := b.NewValue0(v.Pos, OpConst16, t)
12506		v0.AuxInt = int16ToAuxInt(0)
12507		v.AddArg2(x, v0)
12508		return true
12509	}
12510	// match: (Less16U (Const16 <t> [-2]) x)
12511	// result: (Eq16 x (Const16 <t> [-1]))
12512	for {
12513		if v_0.Op != OpConst16 {
12514			break
12515		}
12516		t := v_0.Type
12517		if auxIntToInt16(v_0.AuxInt) != -2 {
12518			break
12519		}
12520		x := v_1
12521		v.reset(OpEq16)
12522		v0 := b.NewValue0(v.Pos, OpConst16, t)
12523		v0.AuxInt = int16ToAuxInt(-1)
12524		v.AddArg2(x, v0)
12525		return true
12526	}
12527	return false
12528}
12529func rewriteValuegeneric_OpLess32(v *Value) bool {
12530	v_1 := v.Args[1]
12531	v_0 := v.Args[0]
12532	b := v.Block
12533	// match: (Less32 (Const32 [c]) (Const32 [d]))
12534	// result: (ConstBool [c < d])
12535	for {
12536		if v_0.Op != OpConst32 {
12537			break
12538		}
12539		c := auxIntToInt32(v_0.AuxInt)
12540		if v_1.Op != OpConst32 {
12541			break
12542		}
12543		d := auxIntToInt32(v_1.AuxInt)
12544		v.reset(OpConstBool)
12545		v.AuxInt = boolToAuxInt(c < d)
12546		return true
12547	}
12548	// match: (Less32 (Const32 <t> [0]) x)
12549	// cond: isNonNegative(x)
12550	// result: (Neq32 (Const32 <t> [0]) x)
12551	for {
12552		if v_0.Op != OpConst32 {
12553			break
12554		}
12555		t := v_0.Type
12556		if auxIntToInt32(v_0.AuxInt) != 0 {
12557			break
12558		}
12559		x := v_1
12560		if !(isNonNegative(x)) {
12561			break
12562		}
12563		v.reset(OpNeq32)
12564		v0 := b.NewValue0(v.Pos, OpConst32, t)
12565		v0.AuxInt = int32ToAuxInt(0)
12566		v.AddArg2(v0, x)
12567		return true
12568	}
12569	// match: (Less32 x (Const32 <t> [1]))
12570	// cond: isNonNegative(x)
12571	// result: (Eq32 (Const32 <t> [0]) x)
12572	for {
12573		x := v_0
12574		if v_1.Op != OpConst32 {
12575			break
12576		}
12577		t := v_1.Type
12578		if auxIntToInt32(v_1.AuxInt) != 1 || !(isNonNegative(x)) {
12579			break
12580		}
12581		v.reset(OpEq32)
12582		v0 := b.NewValue0(v.Pos, OpConst32, t)
12583		v0.AuxInt = int32ToAuxInt(0)
12584		v.AddArg2(v0, x)
12585		return true
12586	}
12587	// match: (Less32 x (Const32 <t> [1]))
12588	// result: (Leq32 x (Const32 <t> [0]))
12589	for {
12590		x := v_0
12591		if v_1.Op != OpConst32 {
12592			break
12593		}
12594		t := v_1.Type
12595		if auxIntToInt32(v_1.AuxInt) != 1 {
12596			break
12597		}
12598		v.reset(OpLeq32)
12599		v0 := b.NewValue0(v.Pos, OpConst32, t)
12600		v0.AuxInt = int32ToAuxInt(0)
12601		v.AddArg2(x, v0)
12602		return true
12603	}
12604	// match: (Less32 (Const32 <t> [-1]) x)
12605	// result: (Leq32 (Const32 <t> [0]) x)
12606	for {
12607		if v_0.Op != OpConst32 {
12608			break
12609		}
12610		t := v_0.Type
12611		if auxIntToInt32(v_0.AuxInt) != -1 {
12612			break
12613		}
12614		x := v_1
12615		v.reset(OpLeq32)
12616		v0 := b.NewValue0(v.Pos, OpConst32, t)
12617		v0.AuxInt = int32ToAuxInt(0)
12618		v.AddArg2(v0, x)
12619		return true
12620	}
12621	// match: (Less32 _ (Const32 [math.MinInt32]))
12622	// result: (ConstBool [false])
12623	for {
12624		if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != math.MinInt32 {
12625			break
12626		}
12627		v.reset(OpConstBool)
12628		v.AuxInt = boolToAuxInt(false)
12629		return true
12630	}
12631	// match: (Less32 (Const32 [math.MaxInt32]) _)
12632	// result: (ConstBool [false])
12633	for {
12634		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != math.MaxInt32 {
12635			break
12636		}
12637		v.reset(OpConstBool)
12638		v.AuxInt = boolToAuxInt(false)
12639		return true
12640	}
12641	// match: (Less32 x (Const32 <t> [math.MinInt32+1]))
12642	// result: (Eq32 x (Const32 <t> [math.MinInt32]))
12643	for {
12644		x := v_0
12645		if v_1.Op != OpConst32 {
12646			break
12647		}
12648		t := v_1.Type
12649		if auxIntToInt32(v_1.AuxInt) != math.MinInt32+1 {
12650			break
12651		}
12652		v.reset(OpEq32)
12653		v0 := b.NewValue0(v.Pos, OpConst32, t)
12654		v0.AuxInt = int32ToAuxInt(math.MinInt32)
12655		v.AddArg2(x, v0)
12656		return true
12657	}
12658	// match: (Less32 (Const32 <t> [math.MaxInt32-1]) x)
12659	// result: (Eq32 x (Const32 <t> [math.MaxInt32]))
12660	for {
12661		if v_0.Op != OpConst32 {
12662			break
12663		}
12664		t := v_0.Type
12665		if auxIntToInt32(v_0.AuxInt) != math.MaxInt32-1 {
12666			break
12667		}
12668		x := v_1
12669		v.reset(OpEq32)
12670		v0 := b.NewValue0(v.Pos, OpConst32, t)
12671		v0.AuxInt = int32ToAuxInt(math.MaxInt32)
12672		v.AddArg2(x, v0)
12673		return true
12674	}
12675	return false
12676}
12677func rewriteValuegeneric_OpLess32F(v *Value) bool {
12678	v_1 := v.Args[1]
12679	v_0 := v.Args[0]
12680	// match: (Less32F (Const32F [c]) (Const32F [d]))
12681	// result: (ConstBool [c < d])
12682	for {
12683		if v_0.Op != OpConst32F {
12684			break
12685		}
12686		c := auxIntToFloat32(v_0.AuxInt)
12687		if v_1.Op != OpConst32F {
12688			break
12689		}
12690		d := auxIntToFloat32(v_1.AuxInt)
12691		v.reset(OpConstBool)
12692		v.AuxInt = boolToAuxInt(c < d)
12693		return true
12694	}
12695	return false
12696}
12697func rewriteValuegeneric_OpLess32U(v *Value) bool {
12698	v_1 := v.Args[1]
12699	v_0 := v.Args[0]
12700	b := v.Block
12701	// match: (Less32U (Const32 [c]) (Const32 [d]))
12702	// result: (ConstBool [uint32(c) < uint32(d)])
12703	for {
12704		if v_0.Op != OpConst32 {
12705			break
12706		}
12707		c := auxIntToInt32(v_0.AuxInt)
12708		if v_1.Op != OpConst32 {
12709			break
12710		}
12711		d := auxIntToInt32(v_1.AuxInt)
12712		v.reset(OpConstBool)
12713		v.AuxInt = boolToAuxInt(uint32(c) < uint32(d))
12714		return true
12715	}
12716	// match: (Less32U x (Const32 <t> [1]))
12717	// result: (Eq32 (Const32 <t> [0]) x)
12718	for {
12719		x := v_0
12720		if v_1.Op != OpConst32 {
12721			break
12722		}
12723		t := v_1.Type
12724		if auxIntToInt32(v_1.AuxInt) != 1 {
12725			break
12726		}
12727		v.reset(OpEq32)
12728		v0 := b.NewValue0(v.Pos, OpConst32, t)
12729		v0.AuxInt = int32ToAuxInt(0)
12730		v.AddArg2(v0, x)
12731		return true
12732	}
12733	// match: (Less32U _ (Const32 [0]))
12734	// result: (ConstBool [false])
12735	for {
12736		if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != 0 {
12737			break
12738		}
12739		v.reset(OpConstBool)
12740		v.AuxInt = boolToAuxInt(false)
12741		return true
12742	}
12743	// match: (Less32U (Const32 [-1]) _)
12744	// result: (ConstBool [false])
12745	for {
12746		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
12747			break
12748		}
12749		v.reset(OpConstBool)
12750		v.AuxInt = boolToAuxInt(false)
12751		return true
12752	}
12753	// match: (Less32U x (Const32 <t> [1]))
12754	// result: (Eq32 x (Const32 <t> [0]))
12755	for {
12756		x := v_0
12757		if v_1.Op != OpConst32 {
12758			break
12759		}
12760		t := v_1.Type
12761		if auxIntToInt32(v_1.AuxInt) != 1 {
12762			break
12763		}
12764		v.reset(OpEq32)
12765		v0 := b.NewValue0(v.Pos, OpConst32, t)
12766		v0.AuxInt = int32ToAuxInt(0)
12767		v.AddArg2(x, v0)
12768		return true
12769	}
12770	// match: (Less32U (Const32 <t> [-2]) x)
12771	// result: (Eq32 x (Const32 <t> [-1]))
12772	for {
12773		if v_0.Op != OpConst32 {
12774			break
12775		}
12776		t := v_0.Type
12777		if auxIntToInt32(v_0.AuxInt) != -2 {
12778			break
12779		}
12780		x := v_1
12781		v.reset(OpEq32)
12782		v0 := b.NewValue0(v.Pos, OpConst32, t)
12783		v0.AuxInt = int32ToAuxInt(-1)
12784		v.AddArg2(x, v0)
12785		return true
12786	}
12787	return false
12788}
12789func rewriteValuegeneric_OpLess64(v *Value) bool {
12790	v_1 := v.Args[1]
12791	v_0 := v.Args[0]
12792	b := v.Block
12793	// match: (Less64 (Const64 [c]) (Const64 [d]))
12794	// result: (ConstBool [c < d])
12795	for {
12796		if v_0.Op != OpConst64 {
12797			break
12798		}
12799		c := auxIntToInt64(v_0.AuxInt)
12800		if v_1.Op != OpConst64 {
12801			break
12802		}
12803		d := auxIntToInt64(v_1.AuxInt)
12804		v.reset(OpConstBool)
12805		v.AuxInt = boolToAuxInt(c < d)
12806		return true
12807	}
12808	// match: (Less64 (Const64 <t> [0]) x)
12809	// cond: isNonNegative(x)
12810	// result: (Neq64 (Const64 <t> [0]) x)
12811	for {
12812		if v_0.Op != OpConst64 {
12813			break
12814		}
12815		t := v_0.Type
12816		if auxIntToInt64(v_0.AuxInt) != 0 {
12817			break
12818		}
12819		x := v_1
12820		if !(isNonNegative(x)) {
12821			break
12822		}
12823		v.reset(OpNeq64)
12824		v0 := b.NewValue0(v.Pos, OpConst64, t)
12825		v0.AuxInt = int64ToAuxInt(0)
12826		v.AddArg2(v0, x)
12827		return true
12828	}
12829	// match: (Less64 x (Const64 <t> [1]))
12830	// cond: isNonNegative(x)
12831	// result: (Eq64 (Const64 <t> [0]) x)
12832	for {
12833		x := v_0
12834		if v_1.Op != OpConst64 {
12835			break
12836		}
12837		t := v_1.Type
12838		if auxIntToInt64(v_1.AuxInt) != 1 || !(isNonNegative(x)) {
12839			break
12840		}
12841		v.reset(OpEq64)
12842		v0 := b.NewValue0(v.Pos, OpConst64, t)
12843		v0.AuxInt = int64ToAuxInt(0)
12844		v.AddArg2(v0, x)
12845		return true
12846	}
12847	// match: (Less64 x (Const64 <t> [1]))
12848	// result: (Leq64 x (Const64 <t> [0]))
12849	for {
12850		x := v_0
12851		if v_1.Op != OpConst64 {
12852			break
12853		}
12854		t := v_1.Type
12855		if auxIntToInt64(v_1.AuxInt) != 1 {
12856			break
12857		}
12858		v.reset(OpLeq64)
12859		v0 := b.NewValue0(v.Pos, OpConst64, t)
12860		v0.AuxInt = int64ToAuxInt(0)
12861		v.AddArg2(x, v0)
12862		return true
12863	}
12864	// match: (Less64 (Const64 <t> [-1]) x)
12865	// result: (Leq64 (Const64 <t> [0]) x)
12866	for {
12867		if v_0.Op != OpConst64 {
12868			break
12869		}
12870		t := v_0.Type
12871		if auxIntToInt64(v_0.AuxInt) != -1 {
12872			break
12873		}
12874		x := v_1
12875		v.reset(OpLeq64)
12876		v0 := b.NewValue0(v.Pos, OpConst64, t)
12877		v0.AuxInt = int64ToAuxInt(0)
12878		v.AddArg2(v0, x)
12879		return true
12880	}
12881	// match: (Less64 _ (Const64 [math.MinInt64]))
12882	// result: (ConstBool [false])
12883	for {
12884		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != math.MinInt64 {
12885			break
12886		}
12887		v.reset(OpConstBool)
12888		v.AuxInt = boolToAuxInt(false)
12889		return true
12890	}
12891	// match: (Less64 (Const64 [math.MaxInt64]) _)
12892	// result: (ConstBool [false])
12893	for {
12894		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != math.MaxInt64 {
12895			break
12896		}
12897		v.reset(OpConstBool)
12898		v.AuxInt = boolToAuxInt(false)
12899		return true
12900	}
12901	// match: (Less64 x (Const64 <t> [math.MinInt64+1]))
12902	// result: (Eq64 x (Const64 <t> [math.MinInt64]))
12903	for {
12904		x := v_0
12905		if v_1.Op != OpConst64 {
12906			break
12907		}
12908		t := v_1.Type
12909		if auxIntToInt64(v_1.AuxInt) != math.MinInt64+1 {
12910			break
12911		}
12912		v.reset(OpEq64)
12913		v0 := b.NewValue0(v.Pos, OpConst64, t)
12914		v0.AuxInt = int64ToAuxInt(math.MinInt64)
12915		v.AddArg2(x, v0)
12916		return true
12917	}
12918	// match: (Less64 (Const64 <t> [math.MaxInt64-1]) x)
12919	// result: (Eq64 x (Const64 <t> [math.MaxInt64]))
12920	for {
12921		if v_0.Op != OpConst64 {
12922			break
12923		}
12924		t := v_0.Type
12925		if auxIntToInt64(v_0.AuxInt) != math.MaxInt64-1 {
12926			break
12927		}
12928		x := v_1
12929		v.reset(OpEq64)
12930		v0 := b.NewValue0(v.Pos, OpConst64, t)
12931		v0.AuxInt = int64ToAuxInt(math.MaxInt64)
12932		v.AddArg2(x, v0)
12933		return true
12934	}
12935	return false
12936}
12937func rewriteValuegeneric_OpLess64F(v *Value) bool {
12938	v_1 := v.Args[1]
12939	v_0 := v.Args[0]
12940	// match: (Less64F (Const64F [c]) (Const64F [d]))
12941	// result: (ConstBool [c < d])
12942	for {
12943		if v_0.Op != OpConst64F {
12944			break
12945		}
12946		c := auxIntToFloat64(v_0.AuxInt)
12947		if v_1.Op != OpConst64F {
12948			break
12949		}
12950		d := auxIntToFloat64(v_1.AuxInt)
12951		v.reset(OpConstBool)
12952		v.AuxInt = boolToAuxInt(c < d)
12953		return true
12954	}
12955	return false
12956}
12957func rewriteValuegeneric_OpLess64U(v *Value) bool {
12958	v_1 := v.Args[1]
12959	v_0 := v.Args[0]
12960	b := v.Block
12961	// match: (Less64U (Const64 [c]) (Const64 [d]))
12962	// result: (ConstBool [uint64(c) < uint64(d)])
12963	for {
12964		if v_0.Op != OpConst64 {
12965			break
12966		}
12967		c := auxIntToInt64(v_0.AuxInt)
12968		if v_1.Op != OpConst64 {
12969			break
12970		}
12971		d := auxIntToInt64(v_1.AuxInt)
12972		v.reset(OpConstBool)
12973		v.AuxInt = boolToAuxInt(uint64(c) < uint64(d))
12974		return true
12975	}
12976	// match: (Less64U x (Const64 <t> [1]))
12977	// result: (Eq64 (Const64 <t> [0]) x)
12978	for {
12979		x := v_0
12980		if v_1.Op != OpConst64 {
12981			break
12982		}
12983		t := v_1.Type
12984		if auxIntToInt64(v_1.AuxInt) != 1 {
12985			break
12986		}
12987		v.reset(OpEq64)
12988		v0 := b.NewValue0(v.Pos, OpConst64, t)
12989		v0.AuxInt = int64ToAuxInt(0)
12990		v.AddArg2(v0, x)
12991		return true
12992	}
12993	// match: (Less64U _ (Const64 [0]))
12994	// result: (ConstBool [false])
12995	for {
12996		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
12997			break
12998		}
12999		v.reset(OpConstBool)
13000		v.AuxInt = boolToAuxInt(false)
13001		return true
13002	}
13003	// match: (Less64U (Const64 [-1]) _)
13004	// result: (ConstBool [false])
13005	for {
13006		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
13007			break
13008		}
13009		v.reset(OpConstBool)
13010		v.AuxInt = boolToAuxInt(false)
13011		return true
13012	}
13013	// match: (Less64U x (Const64 <t> [1]))
13014	// result: (Eq64 x (Const64 <t> [0]))
13015	for {
13016		x := v_0
13017		if v_1.Op != OpConst64 {
13018			break
13019		}
13020		t := v_1.Type
13021		if auxIntToInt64(v_1.AuxInt) != 1 {
13022			break
13023		}
13024		v.reset(OpEq64)
13025		v0 := b.NewValue0(v.Pos, OpConst64, t)
13026		v0.AuxInt = int64ToAuxInt(0)
13027		v.AddArg2(x, v0)
13028		return true
13029	}
13030	// match: (Less64U (Const64 <t> [-2]) x)
13031	// result: (Eq64 x (Const64 <t> [-1]))
13032	for {
13033		if v_0.Op != OpConst64 {
13034			break
13035		}
13036		t := v_0.Type
13037		if auxIntToInt64(v_0.AuxInt) != -2 {
13038			break
13039		}
13040		x := v_1
13041		v.reset(OpEq64)
13042		v0 := b.NewValue0(v.Pos, OpConst64, t)
13043		v0.AuxInt = int64ToAuxInt(-1)
13044		v.AddArg2(x, v0)
13045		return true
13046	}
13047	return false
13048}
13049func rewriteValuegeneric_OpLess8(v *Value) bool {
13050	v_1 := v.Args[1]
13051	v_0 := v.Args[0]
13052	b := v.Block
13053	// match: (Less8 (Const8 [c]) (Const8 [d]))
13054	// result: (ConstBool [c < d])
13055	for {
13056		if v_0.Op != OpConst8 {
13057			break
13058		}
13059		c := auxIntToInt8(v_0.AuxInt)
13060		if v_1.Op != OpConst8 {
13061			break
13062		}
13063		d := auxIntToInt8(v_1.AuxInt)
13064		v.reset(OpConstBool)
13065		v.AuxInt = boolToAuxInt(c < d)
13066		return true
13067	}
13068	// match: (Less8 (Const8 <t> [0]) x)
13069	// cond: isNonNegative(x)
13070	// result: (Neq8 (Const8 <t> [0]) x)
13071	for {
13072		if v_0.Op != OpConst8 {
13073			break
13074		}
13075		t := v_0.Type
13076		if auxIntToInt8(v_0.AuxInt) != 0 {
13077			break
13078		}
13079		x := v_1
13080		if !(isNonNegative(x)) {
13081			break
13082		}
13083		v.reset(OpNeq8)
13084		v0 := b.NewValue0(v.Pos, OpConst8, t)
13085		v0.AuxInt = int8ToAuxInt(0)
13086		v.AddArg2(v0, x)
13087		return true
13088	}
13089	// match: (Less8 x (Const8 <t> [1]))
13090	// cond: isNonNegative(x)
13091	// result: (Eq8 (Const8 <t> [0]) x)
13092	for {
13093		x := v_0
13094		if v_1.Op != OpConst8 {
13095			break
13096		}
13097		t := v_1.Type
13098		if auxIntToInt8(v_1.AuxInt) != 1 || !(isNonNegative(x)) {
13099			break
13100		}
13101		v.reset(OpEq8)
13102		v0 := b.NewValue0(v.Pos, OpConst8, t)
13103		v0.AuxInt = int8ToAuxInt(0)
13104		v.AddArg2(v0, x)
13105		return true
13106	}
13107	// match: (Less8 x (Const8 <t> [1]))
13108	// result: (Leq8 x (Const8 <t> [0]))
13109	for {
13110		x := v_0
13111		if v_1.Op != OpConst8 {
13112			break
13113		}
13114		t := v_1.Type
13115		if auxIntToInt8(v_1.AuxInt) != 1 {
13116			break
13117		}
13118		v.reset(OpLeq8)
13119		v0 := b.NewValue0(v.Pos, OpConst8, t)
13120		v0.AuxInt = int8ToAuxInt(0)
13121		v.AddArg2(x, v0)
13122		return true
13123	}
13124	// match: (Less8 (Const8 <t> [-1]) x)
13125	// result: (Leq8 (Const8 <t> [0]) x)
13126	for {
13127		if v_0.Op != OpConst8 {
13128			break
13129		}
13130		t := v_0.Type
13131		if auxIntToInt8(v_0.AuxInt) != -1 {
13132			break
13133		}
13134		x := v_1
13135		v.reset(OpLeq8)
13136		v0 := b.NewValue0(v.Pos, OpConst8, t)
13137		v0.AuxInt = int8ToAuxInt(0)
13138		v.AddArg2(v0, x)
13139		return true
13140	}
13141	// match: (Less8 _ (Const8 [math.MinInt8 ]))
13142	// result: (ConstBool [false])
13143	for {
13144		if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != math.MinInt8 {
13145			break
13146		}
13147		v.reset(OpConstBool)
13148		v.AuxInt = boolToAuxInt(false)
13149		return true
13150	}
13151	// match: (Less8 (Const8 [math.MaxInt8 ]) _)
13152	// result: (ConstBool [false])
13153	for {
13154		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != math.MaxInt8 {
13155			break
13156		}
13157		v.reset(OpConstBool)
13158		v.AuxInt = boolToAuxInt(false)
13159		return true
13160	}
13161	// match: (Less8 x (Const8 <t> [math.MinInt8 +1]))
13162	// result: (Eq8 x (Const8 <t> [math.MinInt8 ]))
13163	for {
13164		x := v_0
13165		if v_1.Op != OpConst8 {
13166			break
13167		}
13168		t := v_1.Type
13169		if auxIntToInt8(v_1.AuxInt) != math.MinInt8+1 {
13170			break
13171		}
13172		v.reset(OpEq8)
13173		v0 := b.NewValue0(v.Pos, OpConst8, t)
13174		v0.AuxInt = int8ToAuxInt(math.MinInt8)
13175		v.AddArg2(x, v0)
13176		return true
13177	}
13178	// match: (Less8 (Const8 <t> [math.MaxInt8 -1]) x)
13179	// result: (Eq8 x (Const8 <t> [math.MaxInt8 ]))
13180	for {
13181		if v_0.Op != OpConst8 {
13182			break
13183		}
13184		t := v_0.Type
13185		if auxIntToInt8(v_0.AuxInt) != math.MaxInt8-1 {
13186			break
13187		}
13188		x := v_1
13189		v.reset(OpEq8)
13190		v0 := b.NewValue0(v.Pos, OpConst8, t)
13191		v0.AuxInt = int8ToAuxInt(math.MaxInt8)
13192		v.AddArg2(x, v0)
13193		return true
13194	}
13195	return false
13196}
13197func rewriteValuegeneric_OpLess8U(v *Value) bool {
13198	v_1 := v.Args[1]
13199	v_0 := v.Args[0]
13200	b := v.Block
13201	// match: (Less8U (Const8 [c]) (Const8 [d]))
13202	// result: (ConstBool [ uint8(c) < uint8(d)])
13203	for {
13204		if v_0.Op != OpConst8 {
13205			break
13206		}
13207		c := auxIntToInt8(v_0.AuxInt)
13208		if v_1.Op != OpConst8 {
13209			break
13210		}
13211		d := auxIntToInt8(v_1.AuxInt)
13212		v.reset(OpConstBool)
13213		v.AuxInt = boolToAuxInt(uint8(c) < uint8(d))
13214		return true
13215	}
13216	// match: (Less8U x (Const8 <t> [1]))
13217	// result: (Eq8 (Const8 <t> [0]) x)
13218	for {
13219		x := v_0
13220		if v_1.Op != OpConst8 {
13221			break
13222		}
13223		t := v_1.Type
13224		if auxIntToInt8(v_1.AuxInt) != 1 {
13225			break
13226		}
13227		v.reset(OpEq8)
13228		v0 := b.NewValue0(v.Pos, OpConst8, t)
13229		v0.AuxInt = int8ToAuxInt(0)
13230		v.AddArg2(v0, x)
13231		return true
13232	}
13233	// match: (Less8U _ (Const8 [0]))
13234	// result: (ConstBool [false])
13235	for {
13236		if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 {
13237			break
13238		}
13239		v.reset(OpConstBool)
13240		v.AuxInt = boolToAuxInt(false)
13241		return true
13242	}
13243	// match: (Less8U (Const8 [-1]) _)
13244	// result: (ConstBool [false])
13245	for {
13246		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
13247			break
13248		}
13249		v.reset(OpConstBool)
13250		v.AuxInt = boolToAuxInt(false)
13251		return true
13252	}
13253	// match: (Less8U x (Const8 <t> [1]))
13254	// result: (Eq8 x (Const8 <t> [0]))
13255	for {
13256		x := v_0
13257		if v_1.Op != OpConst8 {
13258			break
13259		}
13260		t := v_1.Type
13261		if auxIntToInt8(v_1.AuxInt) != 1 {
13262			break
13263		}
13264		v.reset(OpEq8)
13265		v0 := b.NewValue0(v.Pos, OpConst8, t)
13266		v0.AuxInt = int8ToAuxInt(0)
13267		v.AddArg2(x, v0)
13268		return true
13269	}
13270	// match: (Less8U (Const8 <t> [-2]) x)
13271	// result: (Eq8 x (Const8 <t> [-1]))
13272	for {
13273		if v_0.Op != OpConst8 {
13274			break
13275		}
13276		t := v_0.Type
13277		if auxIntToInt8(v_0.AuxInt) != -2 {
13278			break
13279		}
13280		x := v_1
13281		v.reset(OpEq8)
13282		v0 := b.NewValue0(v.Pos, OpConst8, t)
13283		v0.AuxInt = int8ToAuxInt(-1)
13284		v.AddArg2(x, v0)
13285		return true
13286	}
13287	return false
13288}
13289func rewriteValuegeneric_OpLoad(v *Value) bool {
13290	v_1 := v.Args[1]
13291	v_0 := v.Args[0]
13292	b := v.Block
13293	config := b.Func.Config
13294	// match: (Load <t1> p1 (Store {t2} p2 x _))
13295	// cond: isSamePtr(p1, p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size()
13296	// result: x
13297	for {
13298		t1 := v.Type
13299		p1 := v_0
13300		if v_1.Op != OpStore {
13301			break
13302		}
13303		t2 := auxToType(v_1.Aux)
13304		x := v_1.Args[1]
13305		p2 := v_1.Args[0]
13306		if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size()) {
13307			break
13308		}
13309		v.copyOf(x)
13310		return true
13311	}
13312	// match: (Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 x _)))
13313	// cond: isSamePtr(p1, p3) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p3, t3.Size(), p2, t2.Size())
13314	// result: x
13315	for {
13316		t1 := v.Type
13317		p1 := v_0
13318		if v_1.Op != OpStore {
13319			break
13320		}
13321		t2 := auxToType(v_1.Aux)
13322		_ = v_1.Args[2]
13323		p2 := v_1.Args[0]
13324		v_1_2 := v_1.Args[2]
13325		if v_1_2.Op != OpStore {
13326			break
13327		}
13328		t3 := auxToType(v_1_2.Aux)
13329		x := v_1_2.Args[1]
13330		p3 := v_1_2.Args[0]
13331		if !(isSamePtr(p1, p3) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p3, t3.Size(), p2, t2.Size())) {
13332			break
13333		}
13334		v.copyOf(x)
13335		return true
13336	}
13337	// match: (Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 x _))))
13338	// cond: isSamePtr(p1, p4) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p4, t4.Size(), p2, t2.Size()) && disjoint(p4, t4.Size(), p3, t3.Size())
13339	// result: x
13340	for {
13341		t1 := v.Type
13342		p1 := v_0
13343		if v_1.Op != OpStore {
13344			break
13345		}
13346		t2 := auxToType(v_1.Aux)
13347		_ = v_1.Args[2]
13348		p2 := v_1.Args[0]
13349		v_1_2 := v_1.Args[2]
13350		if v_1_2.Op != OpStore {
13351			break
13352		}
13353		t3 := auxToType(v_1_2.Aux)
13354		_ = v_1_2.Args[2]
13355		p3 := v_1_2.Args[0]
13356		v_1_2_2 := v_1_2.Args[2]
13357		if v_1_2_2.Op != OpStore {
13358			break
13359		}
13360		t4 := auxToType(v_1_2_2.Aux)
13361		x := v_1_2_2.Args[1]
13362		p4 := v_1_2_2.Args[0]
13363		if !(isSamePtr(p1, p4) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p4, t4.Size(), p2, t2.Size()) && disjoint(p4, t4.Size(), p3, t3.Size())) {
13364			break
13365		}
13366		v.copyOf(x)
13367		return true
13368	}
13369	// match: (Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ (Store {t5} p5 x _)))))
13370	// cond: isSamePtr(p1, p5) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p5, t5.Size(), p2, t2.Size()) && disjoint(p5, t5.Size(), p3, t3.Size()) && disjoint(p5, t5.Size(), p4, t4.Size())
13371	// result: x
13372	for {
13373		t1 := v.Type
13374		p1 := v_0
13375		if v_1.Op != OpStore {
13376			break
13377		}
13378		t2 := auxToType(v_1.Aux)
13379		_ = v_1.Args[2]
13380		p2 := v_1.Args[0]
13381		v_1_2 := v_1.Args[2]
13382		if v_1_2.Op != OpStore {
13383			break
13384		}
13385		t3 := auxToType(v_1_2.Aux)
13386		_ = v_1_2.Args[2]
13387		p3 := v_1_2.Args[0]
13388		v_1_2_2 := v_1_2.Args[2]
13389		if v_1_2_2.Op != OpStore {
13390			break
13391		}
13392		t4 := auxToType(v_1_2_2.Aux)
13393		_ = v_1_2_2.Args[2]
13394		p4 := v_1_2_2.Args[0]
13395		v_1_2_2_2 := v_1_2_2.Args[2]
13396		if v_1_2_2_2.Op != OpStore {
13397			break
13398		}
13399		t5 := auxToType(v_1_2_2_2.Aux)
13400		x := v_1_2_2_2.Args[1]
13401		p5 := v_1_2_2_2.Args[0]
13402		if !(isSamePtr(p1, p5) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p5, t5.Size(), p2, t2.Size()) && disjoint(p5, t5.Size(), p3, t3.Size()) && disjoint(p5, t5.Size(), p4, t4.Size())) {
13403			break
13404		}
13405		v.copyOf(x)
13406		return true
13407	}
13408	// match: (Load <t1> p1 (Store {t2} p2 (Const64 [x]) _))
13409	// cond: isSamePtr(p1,p2) && sizeof(t2) == 8 && is64BitFloat(t1) && !math.IsNaN(math.Float64frombits(uint64(x)))
13410	// result: (Const64F [math.Float64frombits(uint64(x))])
13411	for {
13412		t1 := v.Type
13413		p1 := v_0
13414		if v_1.Op != OpStore {
13415			break
13416		}
13417		t2 := auxToType(v_1.Aux)
13418		_ = v_1.Args[1]
13419		p2 := v_1.Args[0]
13420		v_1_1 := v_1.Args[1]
13421		if v_1_1.Op != OpConst64 {
13422			break
13423		}
13424		x := auxIntToInt64(v_1_1.AuxInt)
13425		if !(isSamePtr(p1, p2) && sizeof(t2) == 8 && is64BitFloat(t1) && !math.IsNaN(math.Float64frombits(uint64(x)))) {
13426			break
13427		}
13428		v.reset(OpConst64F)
13429		v.AuxInt = float64ToAuxInt(math.Float64frombits(uint64(x)))
13430		return true
13431	}
13432	// match: (Load <t1> p1 (Store {t2} p2 (Const32 [x]) _))
13433	// cond: isSamePtr(p1,p2) && sizeof(t2) == 4 && is32BitFloat(t1) && !math.IsNaN(float64(math.Float32frombits(uint32(x))))
13434	// result: (Const32F [math.Float32frombits(uint32(x))])
13435	for {
13436		t1 := v.Type
13437		p1 := v_0
13438		if v_1.Op != OpStore {
13439			break
13440		}
13441		t2 := auxToType(v_1.Aux)
13442		_ = v_1.Args[1]
13443		p2 := v_1.Args[0]
13444		v_1_1 := v_1.Args[1]
13445		if v_1_1.Op != OpConst32 {
13446			break
13447		}
13448		x := auxIntToInt32(v_1_1.AuxInt)
13449		if !(isSamePtr(p1, p2) && sizeof(t2) == 4 && is32BitFloat(t1) && !math.IsNaN(float64(math.Float32frombits(uint32(x))))) {
13450			break
13451		}
13452		v.reset(OpConst32F)
13453		v.AuxInt = float32ToAuxInt(math.Float32frombits(uint32(x)))
13454		return true
13455	}
13456	// match: (Load <t1> p1 (Store {t2} p2 (Const64F [x]) _))
13457	// cond: isSamePtr(p1,p2) && sizeof(t2) == 8 && is64BitInt(t1)
13458	// result: (Const64 [int64(math.Float64bits(x))])
13459	for {
13460		t1 := v.Type
13461		p1 := v_0
13462		if v_1.Op != OpStore {
13463			break
13464		}
13465		t2 := auxToType(v_1.Aux)
13466		_ = v_1.Args[1]
13467		p2 := v_1.Args[0]
13468		v_1_1 := v_1.Args[1]
13469		if v_1_1.Op != OpConst64F {
13470			break
13471		}
13472		x := auxIntToFloat64(v_1_1.AuxInt)
13473		if !(isSamePtr(p1, p2) && sizeof(t2) == 8 && is64BitInt(t1)) {
13474			break
13475		}
13476		v.reset(OpConst64)
13477		v.AuxInt = int64ToAuxInt(int64(math.Float64bits(x)))
13478		return true
13479	}
13480	// match: (Load <t1> p1 (Store {t2} p2 (Const32F [x]) _))
13481	// cond: isSamePtr(p1,p2) && sizeof(t2) == 4 && is32BitInt(t1)
13482	// result: (Const32 [int32(math.Float32bits(x))])
13483	for {
13484		t1 := v.Type
13485		p1 := v_0
13486		if v_1.Op != OpStore {
13487			break
13488		}
13489		t2 := auxToType(v_1.Aux)
13490		_ = v_1.Args[1]
13491		p2 := v_1.Args[0]
13492		v_1_1 := v_1.Args[1]
13493		if v_1_1.Op != OpConst32F {
13494			break
13495		}
13496		x := auxIntToFloat32(v_1_1.AuxInt)
13497		if !(isSamePtr(p1, p2) && sizeof(t2) == 4 && is32BitInt(t1)) {
13498			break
13499		}
13500		v.reset(OpConst32)
13501		v.AuxInt = int32ToAuxInt(int32(math.Float32bits(x)))
13502		return true
13503	}
13504	// match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ mem:(Zero [n] p3 _)))
13505	// cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p3) && CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size())
13506	// result: @mem.Block (Load <t1> (OffPtr <op.Type> [o1] p3) mem)
13507	for {
13508		t1 := v.Type
13509		op := v_0
13510		if op.Op != OpOffPtr {
13511			break
13512		}
13513		o1 := auxIntToInt64(op.AuxInt)
13514		p1 := op.Args[0]
13515		if v_1.Op != OpStore {
13516			break
13517		}
13518		t2 := auxToType(v_1.Aux)
13519		_ = v_1.Args[2]
13520		p2 := v_1.Args[0]
13521		mem := v_1.Args[2]
13522		if mem.Op != OpZero {
13523			break
13524		}
13525		n := auxIntToInt64(mem.AuxInt)
13526		p3 := mem.Args[0]
13527		if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p3) && CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size())) {
13528			break
13529		}
13530		b = mem.Block
13531		v0 := b.NewValue0(v.Pos, OpLoad, t1)
13532		v.copyOf(v0)
13533		v1 := b.NewValue0(v.Pos, OpOffPtr, op.Type)
13534		v1.AuxInt = int64ToAuxInt(o1)
13535		v1.AddArg(p3)
13536		v0.AddArg2(v1, mem)
13537		return true
13538	}
13539	// match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ (Store {t3} p3 _ mem:(Zero [n] p4 _))))
13540	// cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p4) && CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size())
13541	// result: @mem.Block (Load <t1> (OffPtr <op.Type> [o1] p4) mem)
13542	for {
13543		t1 := v.Type
13544		op := v_0
13545		if op.Op != OpOffPtr {
13546			break
13547		}
13548		o1 := auxIntToInt64(op.AuxInt)
13549		p1 := op.Args[0]
13550		if v_1.Op != OpStore {
13551			break
13552		}
13553		t2 := auxToType(v_1.Aux)
13554		_ = v_1.Args[2]
13555		p2 := v_1.Args[0]
13556		v_1_2 := v_1.Args[2]
13557		if v_1_2.Op != OpStore {
13558			break
13559		}
13560		t3 := auxToType(v_1_2.Aux)
13561		_ = v_1_2.Args[2]
13562		p3 := v_1_2.Args[0]
13563		mem := v_1_2.Args[2]
13564		if mem.Op != OpZero {
13565			break
13566		}
13567		n := auxIntToInt64(mem.AuxInt)
13568		p4 := mem.Args[0]
13569		if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p4) && CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size())) {
13570			break
13571		}
13572		b = mem.Block
13573		v0 := b.NewValue0(v.Pos, OpLoad, t1)
13574		v.copyOf(v0)
13575		v1 := b.NewValue0(v.Pos, OpOffPtr, op.Type)
13576		v1.AuxInt = int64ToAuxInt(o1)
13577		v1.AddArg(p4)
13578		v0.AddArg2(v1, mem)
13579		return true
13580	}
13581	// match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ mem:(Zero [n] p5 _)))))
13582	// cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p5) && CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size())
13583	// result: @mem.Block (Load <t1> (OffPtr <op.Type> [o1] p5) mem)
13584	for {
13585		t1 := v.Type
13586		op := v_0
13587		if op.Op != OpOffPtr {
13588			break
13589		}
13590		o1 := auxIntToInt64(op.AuxInt)
13591		p1 := op.Args[0]
13592		if v_1.Op != OpStore {
13593			break
13594		}
13595		t2 := auxToType(v_1.Aux)
13596		_ = v_1.Args[2]
13597		p2 := v_1.Args[0]
13598		v_1_2 := v_1.Args[2]
13599		if v_1_2.Op != OpStore {
13600			break
13601		}
13602		t3 := auxToType(v_1_2.Aux)
13603		_ = v_1_2.Args[2]
13604		p3 := v_1_2.Args[0]
13605		v_1_2_2 := v_1_2.Args[2]
13606		if v_1_2_2.Op != OpStore {
13607			break
13608		}
13609		t4 := auxToType(v_1_2_2.Aux)
13610		_ = v_1_2_2.Args[2]
13611		p4 := v_1_2_2.Args[0]
13612		mem := v_1_2_2.Args[2]
13613		if mem.Op != OpZero {
13614			break
13615		}
13616		n := auxIntToInt64(mem.AuxInt)
13617		p5 := mem.Args[0]
13618		if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p5) && CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size())) {
13619			break
13620		}
13621		b = mem.Block
13622		v0 := b.NewValue0(v.Pos, OpLoad, t1)
13623		v.copyOf(v0)
13624		v1 := b.NewValue0(v.Pos, OpOffPtr, op.Type)
13625		v1.AuxInt = int64ToAuxInt(o1)
13626		v1.AddArg(p5)
13627		v0.AddArg2(v1, mem)
13628		return true
13629	}
13630	// match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ (Store {t5} p5 _ mem:(Zero [n] p6 _))))))
13631	// cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p6) && CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size()) && disjoint(op, t1.Size(), p5, t5.Size())
13632	// result: @mem.Block (Load <t1> (OffPtr <op.Type> [o1] p6) mem)
13633	for {
13634		t1 := v.Type
13635		op := v_0
13636		if op.Op != OpOffPtr {
13637			break
13638		}
13639		o1 := auxIntToInt64(op.AuxInt)
13640		p1 := op.Args[0]
13641		if v_1.Op != OpStore {
13642			break
13643		}
13644		t2 := auxToType(v_1.Aux)
13645		_ = v_1.Args[2]
13646		p2 := v_1.Args[0]
13647		v_1_2 := v_1.Args[2]
13648		if v_1_2.Op != OpStore {
13649			break
13650		}
13651		t3 := auxToType(v_1_2.Aux)
13652		_ = v_1_2.Args[2]
13653		p3 := v_1_2.Args[0]
13654		v_1_2_2 := v_1_2.Args[2]
13655		if v_1_2_2.Op != OpStore {
13656			break
13657		}
13658		t4 := auxToType(v_1_2_2.Aux)
13659		_ = v_1_2_2.Args[2]
13660		p4 := v_1_2_2.Args[0]
13661		v_1_2_2_2 := v_1_2_2.Args[2]
13662		if v_1_2_2_2.Op != OpStore {
13663			break
13664		}
13665		t5 := auxToType(v_1_2_2_2.Aux)
13666		_ = v_1_2_2_2.Args[2]
13667		p5 := v_1_2_2_2.Args[0]
13668		mem := v_1_2_2_2.Args[2]
13669		if mem.Op != OpZero {
13670			break
13671		}
13672		n := auxIntToInt64(mem.AuxInt)
13673		p6 := mem.Args[0]
13674		if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p6) && CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size()) && disjoint(op, t1.Size(), p5, t5.Size())) {
13675			break
13676		}
13677		b = mem.Block
13678		v0 := b.NewValue0(v.Pos, OpLoad, t1)
13679		v.copyOf(v0)
13680		v1 := b.NewValue0(v.Pos, OpOffPtr, op.Type)
13681		v1.AuxInt = int64ToAuxInt(o1)
13682		v1.AddArg(p6)
13683		v0.AddArg2(v1, mem)
13684		return true
13685	}
13686	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
13687	// cond: t1.IsBoolean() && isSamePtr(p1, p2) && n >= o + 1
13688	// result: (ConstBool [false])
13689	for {
13690		t1 := v.Type
13691		if v_0.Op != OpOffPtr {
13692			break
13693		}
13694		o := auxIntToInt64(v_0.AuxInt)
13695		p1 := v_0.Args[0]
13696		if v_1.Op != OpZero {
13697			break
13698		}
13699		n := auxIntToInt64(v_1.AuxInt)
13700		p2 := v_1.Args[0]
13701		if !(t1.IsBoolean() && isSamePtr(p1, p2) && n >= o+1) {
13702			break
13703		}
13704		v.reset(OpConstBool)
13705		v.AuxInt = boolToAuxInt(false)
13706		return true
13707	}
13708	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
13709	// cond: is8BitInt(t1) && isSamePtr(p1, p2) && n >= o + 1
13710	// result: (Const8 [0])
13711	for {
13712		t1 := v.Type
13713		if v_0.Op != OpOffPtr {
13714			break
13715		}
13716		o := auxIntToInt64(v_0.AuxInt)
13717		p1 := v_0.Args[0]
13718		if v_1.Op != OpZero {
13719			break
13720		}
13721		n := auxIntToInt64(v_1.AuxInt)
13722		p2 := v_1.Args[0]
13723		if !(is8BitInt(t1) && isSamePtr(p1, p2) && n >= o+1) {
13724			break
13725		}
13726		v.reset(OpConst8)
13727		v.AuxInt = int8ToAuxInt(0)
13728		return true
13729	}
13730	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
13731	// cond: is16BitInt(t1) && isSamePtr(p1, p2) && n >= o + 2
13732	// result: (Const16 [0])
13733	for {
13734		t1 := v.Type
13735		if v_0.Op != OpOffPtr {
13736			break
13737		}
13738		o := auxIntToInt64(v_0.AuxInt)
13739		p1 := v_0.Args[0]
13740		if v_1.Op != OpZero {
13741			break
13742		}
13743		n := auxIntToInt64(v_1.AuxInt)
13744		p2 := v_1.Args[0]
13745		if !(is16BitInt(t1) && isSamePtr(p1, p2) && n >= o+2) {
13746			break
13747		}
13748		v.reset(OpConst16)
13749		v.AuxInt = int16ToAuxInt(0)
13750		return true
13751	}
13752	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
13753	// cond: is32BitInt(t1) && isSamePtr(p1, p2) && n >= o + 4
13754	// result: (Const32 [0])
13755	for {
13756		t1 := v.Type
13757		if v_0.Op != OpOffPtr {
13758			break
13759		}
13760		o := auxIntToInt64(v_0.AuxInt)
13761		p1 := v_0.Args[0]
13762		if v_1.Op != OpZero {
13763			break
13764		}
13765		n := auxIntToInt64(v_1.AuxInt)
13766		p2 := v_1.Args[0]
13767		if !(is32BitInt(t1) && isSamePtr(p1, p2) && n >= o+4) {
13768			break
13769		}
13770		v.reset(OpConst32)
13771		v.AuxInt = int32ToAuxInt(0)
13772		return true
13773	}
13774	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
13775	// cond: is64BitInt(t1) && isSamePtr(p1, p2) && n >= o + 8
13776	// result: (Const64 [0])
13777	for {
13778		t1 := v.Type
13779		if v_0.Op != OpOffPtr {
13780			break
13781		}
13782		o := auxIntToInt64(v_0.AuxInt)
13783		p1 := v_0.Args[0]
13784		if v_1.Op != OpZero {
13785			break
13786		}
13787		n := auxIntToInt64(v_1.AuxInt)
13788		p2 := v_1.Args[0]
13789		if !(is64BitInt(t1) && isSamePtr(p1, p2) && n >= o+8) {
13790			break
13791		}
13792		v.reset(OpConst64)
13793		v.AuxInt = int64ToAuxInt(0)
13794		return true
13795	}
13796	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
13797	// cond: is32BitFloat(t1) && isSamePtr(p1, p2) && n >= o + 4
13798	// result: (Const32F [0])
13799	for {
13800		t1 := v.Type
13801		if v_0.Op != OpOffPtr {
13802			break
13803		}
13804		o := auxIntToInt64(v_0.AuxInt)
13805		p1 := v_0.Args[0]
13806		if v_1.Op != OpZero {
13807			break
13808		}
13809		n := auxIntToInt64(v_1.AuxInt)
13810		p2 := v_1.Args[0]
13811		if !(is32BitFloat(t1) && isSamePtr(p1, p2) && n >= o+4) {
13812			break
13813		}
13814		v.reset(OpConst32F)
13815		v.AuxInt = float32ToAuxInt(0)
13816		return true
13817	}
13818	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
13819	// cond: is64BitFloat(t1) && isSamePtr(p1, p2) && n >= o + 8
13820	// result: (Const64F [0])
13821	for {
13822		t1 := v.Type
13823		if v_0.Op != OpOffPtr {
13824			break
13825		}
13826		o := auxIntToInt64(v_0.AuxInt)
13827		p1 := v_0.Args[0]
13828		if v_1.Op != OpZero {
13829			break
13830		}
13831		n := auxIntToInt64(v_1.AuxInt)
13832		p2 := v_1.Args[0]
13833		if !(is64BitFloat(t1) && isSamePtr(p1, p2) && n >= o+8) {
13834			break
13835		}
13836		v.reset(OpConst64F)
13837		v.AuxInt = float64ToAuxInt(0)
13838		return true
13839	}
13840	// match: (Load <t> _ _)
13841	// cond: t.IsStruct() && t.NumFields() == 0 && CanSSA(t)
13842	// result: (StructMake0)
13843	for {
13844		t := v.Type
13845		if !(t.IsStruct() && t.NumFields() == 0 && CanSSA(t)) {
13846			break
13847		}
13848		v.reset(OpStructMake0)
13849		return true
13850	}
13851	// match: (Load <t> ptr mem)
13852	// cond: t.IsStruct() && t.NumFields() == 1 && CanSSA(t)
13853	// result: (StructMake1 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem))
13854	for {
13855		t := v.Type
13856		ptr := v_0
13857		mem := v_1
13858		if !(t.IsStruct() && t.NumFields() == 1 && CanSSA(t)) {
13859			break
13860		}
13861		v.reset(OpStructMake1)
13862		v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
13863		v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
13864		v1.AuxInt = int64ToAuxInt(0)
13865		v1.AddArg(ptr)
13866		v0.AddArg2(v1, mem)
13867		v.AddArg(v0)
13868		return true
13869	}
13870	// match: (Load <t> ptr mem)
13871	// cond: t.IsStruct() && t.NumFields() == 2 && CanSSA(t)
13872	// result: (StructMake2 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem))
13873	for {
13874		t := v.Type
13875		ptr := v_0
13876		mem := v_1
13877		if !(t.IsStruct() && t.NumFields() == 2 && CanSSA(t)) {
13878			break
13879		}
13880		v.reset(OpStructMake2)
13881		v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
13882		v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
13883		v1.AuxInt = int64ToAuxInt(0)
13884		v1.AddArg(ptr)
13885		v0.AddArg2(v1, mem)
13886		v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
13887		v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
13888		v3.AuxInt = int64ToAuxInt(t.FieldOff(1))
13889		v3.AddArg(ptr)
13890		v2.AddArg2(v3, mem)
13891		v.AddArg2(v0, v2)
13892		return true
13893	}
13894	// match: (Load <t> ptr mem)
13895	// cond: t.IsStruct() && t.NumFields() == 3 && CanSSA(t)
13896	// result: (StructMake3 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem))
13897	for {
13898		t := v.Type
13899		ptr := v_0
13900		mem := v_1
13901		if !(t.IsStruct() && t.NumFields() == 3 && CanSSA(t)) {
13902			break
13903		}
13904		v.reset(OpStructMake3)
13905		v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
13906		v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
13907		v1.AuxInt = int64ToAuxInt(0)
13908		v1.AddArg(ptr)
13909		v0.AddArg2(v1, mem)
13910		v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
13911		v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
13912		v3.AuxInt = int64ToAuxInt(t.FieldOff(1))
13913		v3.AddArg(ptr)
13914		v2.AddArg2(v3, mem)
13915		v4 := b.NewValue0(v.Pos, OpLoad, t.FieldType(2))
13916		v5 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
13917		v5.AuxInt = int64ToAuxInt(t.FieldOff(2))
13918		v5.AddArg(ptr)
13919		v4.AddArg2(v5, mem)
13920		v.AddArg3(v0, v2, v4)
13921		return true
13922	}
13923	// match: (Load <t> ptr mem)
13924	// cond: t.IsStruct() && t.NumFields() == 4 && CanSSA(t)
13925	// result: (StructMake4 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem) (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem))
13926	for {
13927		t := v.Type
13928		ptr := v_0
13929		mem := v_1
13930		if !(t.IsStruct() && t.NumFields() == 4 && CanSSA(t)) {
13931			break
13932		}
13933		v.reset(OpStructMake4)
13934		v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
13935		v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
13936		v1.AuxInt = int64ToAuxInt(0)
13937		v1.AddArg(ptr)
13938		v0.AddArg2(v1, mem)
13939		v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
13940		v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
13941		v3.AuxInt = int64ToAuxInt(t.FieldOff(1))
13942		v3.AddArg(ptr)
13943		v2.AddArg2(v3, mem)
13944		v4 := b.NewValue0(v.Pos, OpLoad, t.FieldType(2))
13945		v5 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
13946		v5.AuxInt = int64ToAuxInt(t.FieldOff(2))
13947		v5.AddArg(ptr)
13948		v4.AddArg2(v5, mem)
13949		v6 := b.NewValue0(v.Pos, OpLoad, t.FieldType(3))
13950		v7 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(3).PtrTo())
13951		v7.AuxInt = int64ToAuxInt(t.FieldOff(3))
13952		v7.AddArg(ptr)
13953		v6.AddArg2(v7, mem)
13954		v.AddArg4(v0, v2, v4, v6)
13955		return true
13956	}
13957	// match: (Load <t> _ _)
13958	// cond: t.IsArray() && t.NumElem() == 0
13959	// result: (ArrayMake0)
13960	for {
13961		t := v.Type
13962		if !(t.IsArray() && t.NumElem() == 0) {
13963			break
13964		}
13965		v.reset(OpArrayMake0)
13966		return true
13967	}
13968	// match: (Load <t> ptr mem)
13969	// cond: t.IsArray() && t.NumElem() == 1 && CanSSA(t)
13970	// result: (ArrayMake1 (Load <t.Elem()> ptr mem))
13971	for {
13972		t := v.Type
13973		ptr := v_0
13974		mem := v_1
13975		if !(t.IsArray() && t.NumElem() == 1 && CanSSA(t)) {
13976			break
13977		}
13978		v.reset(OpArrayMake1)
13979		v0 := b.NewValue0(v.Pos, OpLoad, t.Elem())
13980		v0.AddArg2(ptr, mem)
13981		v.AddArg(v0)
13982		return true
13983	}
13984	// match: (Load <t> (OffPtr [off] (Addr {s} sb) ) _)
13985	// cond: t.IsUintptr() && isFixedSym(s, off)
13986	// result: (Addr {fixedSym(b.Func, s, off)} sb)
13987	for {
13988		t := v.Type
13989		if v_0.Op != OpOffPtr {
13990			break
13991		}
13992		off := auxIntToInt64(v_0.AuxInt)
13993		v_0_0 := v_0.Args[0]
13994		if v_0_0.Op != OpAddr {
13995			break
13996		}
13997		s := auxToSym(v_0_0.Aux)
13998		sb := v_0_0.Args[0]
13999		if !(t.IsUintptr() && isFixedSym(s, off)) {
14000			break
14001		}
14002		v.reset(OpAddr)
14003		v.Aux = symToAux(fixedSym(b.Func, s, off))
14004		v.AddArg(sb)
14005		return true
14006	}
14007	// match: (Load <t> (OffPtr [off] (Convert (Addr {s} sb) _) ) _)
14008	// cond: t.IsUintptr() && isFixedSym(s, off)
14009	// result: (Addr {fixedSym(b.Func, s, off)} sb)
14010	for {
14011		t := v.Type
14012		if v_0.Op != OpOffPtr {
14013			break
14014		}
14015		off := auxIntToInt64(v_0.AuxInt)
14016		v_0_0 := v_0.Args[0]
14017		if v_0_0.Op != OpConvert {
14018			break
14019		}
14020		v_0_0_0 := v_0_0.Args[0]
14021		if v_0_0_0.Op != OpAddr {
14022			break
14023		}
14024		s := auxToSym(v_0_0_0.Aux)
14025		sb := v_0_0_0.Args[0]
14026		if !(t.IsUintptr() && isFixedSym(s, off)) {
14027			break
14028		}
14029		v.reset(OpAddr)
14030		v.Aux = symToAux(fixedSym(b.Func, s, off))
14031		v.AddArg(sb)
14032		return true
14033	}
14034	// match: (Load <t> (OffPtr [off] (ITab (IMake (Addr {s} sb) _))) _)
14035	// cond: t.IsUintptr() && isFixedSym(s, off)
14036	// result: (Addr {fixedSym(b.Func, s, off)} sb)
14037	for {
14038		t := v.Type
14039		if v_0.Op != OpOffPtr {
14040			break
14041		}
14042		off := auxIntToInt64(v_0.AuxInt)
14043		v_0_0 := v_0.Args[0]
14044		if v_0_0.Op != OpITab {
14045			break
14046		}
14047		v_0_0_0 := v_0_0.Args[0]
14048		if v_0_0_0.Op != OpIMake {
14049			break
14050		}
14051		v_0_0_0_0 := v_0_0_0.Args[0]
14052		if v_0_0_0_0.Op != OpAddr {
14053			break
14054		}
14055		s := auxToSym(v_0_0_0_0.Aux)
14056		sb := v_0_0_0_0.Args[0]
14057		if !(t.IsUintptr() && isFixedSym(s, off)) {
14058			break
14059		}
14060		v.reset(OpAddr)
14061		v.Aux = symToAux(fixedSym(b.Func, s, off))
14062		v.AddArg(sb)
14063		return true
14064	}
14065	// match: (Load <t> (OffPtr [off] (ITab (IMake (Convert (Addr {s} sb) _) _))) _)
14066	// cond: t.IsUintptr() && isFixedSym(s, off)
14067	// result: (Addr {fixedSym(b.Func, s, off)} sb)
14068	for {
14069		t := v.Type
14070		if v_0.Op != OpOffPtr {
14071			break
14072		}
14073		off := auxIntToInt64(v_0.AuxInt)
14074		v_0_0 := v_0.Args[0]
14075		if v_0_0.Op != OpITab {
14076			break
14077		}
14078		v_0_0_0 := v_0_0.Args[0]
14079		if v_0_0_0.Op != OpIMake {
14080			break
14081		}
14082		v_0_0_0_0 := v_0_0_0.Args[0]
14083		if v_0_0_0_0.Op != OpConvert {
14084			break
14085		}
14086		v_0_0_0_0_0 := v_0_0_0_0.Args[0]
14087		if v_0_0_0_0_0.Op != OpAddr {
14088			break
14089		}
14090		s := auxToSym(v_0_0_0_0_0.Aux)
14091		sb := v_0_0_0_0_0.Args[0]
14092		if !(t.IsUintptr() && isFixedSym(s, off)) {
14093			break
14094		}
14095		v.reset(OpAddr)
14096		v.Aux = symToAux(fixedSym(b.Func, s, off))
14097		v.AddArg(sb)
14098		return true
14099	}
14100	// match: (Load <t> (OffPtr [off] (Addr {sym} _) ) _)
14101	// cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
14102	// result: (Const32 [fixed32(config, sym, off)])
14103	for {
14104		t := v.Type
14105		if v_0.Op != OpOffPtr {
14106			break
14107		}
14108		off := auxIntToInt64(v_0.AuxInt)
14109		v_0_0 := v_0.Args[0]
14110		if v_0_0.Op != OpAddr {
14111			break
14112		}
14113		sym := auxToSym(v_0_0.Aux)
14114		if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
14115			break
14116		}
14117		v.reset(OpConst32)
14118		v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
14119		return true
14120	}
14121	// match: (Load <t> (OffPtr [off] (Convert (Addr {sym} _) _) ) _)
14122	// cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
14123	// result: (Const32 [fixed32(config, sym, off)])
14124	for {
14125		t := v.Type
14126		if v_0.Op != OpOffPtr {
14127			break
14128		}
14129		off := auxIntToInt64(v_0.AuxInt)
14130		v_0_0 := v_0.Args[0]
14131		if v_0_0.Op != OpConvert {
14132			break
14133		}
14134		v_0_0_0 := v_0_0.Args[0]
14135		if v_0_0_0.Op != OpAddr {
14136			break
14137		}
14138		sym := auxToSym(v_0_0_0.Aux)
14139		if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
14140			break
14141		}
14142		v.reset(OpConst32)
14143		v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
14144		return true
14145	}
14146	// match: (Load <t> (OffPtr [off] (ITab (IMake (Addr {sym} _) _))) _)
14147	// cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
14148	// result: (Const32 [fixed32(config, sym, off)])
14149	for {
14150		t := v.Type
14151		if v_0.Op != OpOffPtr {
14152			break
14153		}
14154		off := auxIntToInt64(v_0.AuxInt)
14155		v_0_0 := v_0.Args[0]
14156		if v_0_0.Op != OpITab {
14157			break
14158		}
14159		v_0_0_0 := v_0_0.Args[0]
14160		if v_0_0_0.Op != OpIMake {
14161			break
14162		}
14163		v_0_0_0_0 := v_0_0_0.Args[0]
14164		if v_0_0_0_0.Op != OpAddr {
14165			break
14166		}
14167		sym := auxToSym(v_0_0_0_0.Aux)
14168		if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
14169			break
14170		}
14171		v.reset(OpConst32)
14172		v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
14173		return true
14174	}
14175	// match: (Load <t> (OffPtr [off] (ITab (IMake (Convert (Addr {sym} _) _) _))) _)
14176	// cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
14177	// result: (Const32 [fixed32(config, sym, off)])
14178	for {
14179		t := v.Type
14180		if v_0.Op != OpOffPtr {
14181			break
14182		}
14183		off := auxIntToInt64(v_0.AuxInt)
14184		v_0_0 := v_0.Args[0]
14185		if v_0_0.Op != OpITab {
14186			break
14187		}
14188		v_0_0_0 := v_0_0.Args[0]
14189		if v_0_0_0.Op != OpIMake {
14190			break
14191		}
14192		v_0_0_0_0 := v_0_0_0.Args[0]
14193		if v_0_0_0_0.Op != OpConvert {
14194			break
14195		}
14196		v_0_0_0_0_0 := v_0_0_0_0.Args[0]
14197		if v_0_0_0_0_0.Op != OpAddr {
14198			break
14199		}
14200		sym := auxToSym(v_0_0_0_0_0.Aux)
14201		if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
14202			break
14203		}
14204		v.reset(OpConst32)
14205		v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
14206		return true
14207	}
14208	return false
14209}
14210func rewriteValuegeneric_OpLsh16x16(v *Value) bool {
14211	v_1 := v.Args[1]
14212	v_0 := v.Args[0]
14213	b := v.Block
14214	// match: (Lsh16x16 <t> x (Const16 [c]))
14215	// result: (Lsh16x64 x (Const64 <t> [int64(uint16(c))]))
14216	for {
14217		t := v.Type
14218		x := v_0
14219		if v_1.Op != OpConst16 {
14220			break
14221		}
14222		c := auxIntToInt16(v_1.AuxInt)
14223		v.reset(OpLsh16x64)
14224		v0 := b.NewValue0(v.Pos, OpConst64, t)
14225		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
14226		v.AddArg2(x, v0)
14227		return true
14228	}
14229	// match: (Lsh16x16 (Const16 [0]) _)
14230	// result: (Const16 [0])
14231	for {
14232		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
14233			break
14234		}
14235		v.reset(OpConst16)
14236		v.AuxInt = int16ToAuxInt(0)
14237		return true
14238	}
14239	return false
14240}
14241func rewriteValuegeneric_OpLsh16x32(v *Value) bool {
14242	v_1 := v.Args[1]
14243	v_0 := v.Args[0]
14244	b := v.Block
14245	// match: (Lsh16x32 <t> x (Const32 [c]))
14246	// result: (Lsh16x64 x (Const64 <t> [int64(uint32(c))]))
14247	for {
14248		t := v.Type
14249		x := v_0
14250		if v_1.Op != OpConst32 {
14251			break
14252		}
14253		c := auxIntToInt32(v_1.AuxInt)
14254		v.reset(OpLsh16x64)
14255		v0 := b.NewValue0(v.Pos, OpConst64, t)
14256		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
14257		v.AddArg2(x, v0)
14258		return true
14259	}
14260	// match: (Lsh16x32 (Const16 [0]) _)
14261	// result: (Const16 [0])
14262	for {
14263		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
14264			break
14265		}
14266		v.reset(OpConst16)
14267		v.AuxInt = int16ToAuxInt(0)
14268		return true
14269	}
14270	return false
14271}
14272func rewriteValuegeneric_OpLsh16x64(v *Value) bool {
14273	v_1 := v.Args[1]
14274	v_0 := v.Args[0]
14275	b := v.Block
14276	typ := &b.Func.Config.Types
14277	// match: (Lsh16x64 (Const16 [c]) (Const64 [d]))
14278	// result: (Const16 [c << uint64(d)])
14279	for {
14280		if v_0.Op != OpConst16 {
14281			break
14282		}
14283		c := auxIntToInt16(v_0.AuxInt)
14284		if v_1.Op != OpConst64 {
14285			break
14286		}
14287		d := auxIntToInt64(v_1.AuxInt)
14288		v.reset(OpConst16)
14289		v.AuxInt = int16ToAuxInt(c << uint64(d))
14290		return true
14291	}
14292	// match: (Lsh16x64 x (Const64 [0]))
14293	// result: x
14294	for {
14295		x := v_0
14296		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
14297			break
14298		}
14299		v.copyOf(x)
14300		return true
14301	}
14302	// match: (Lsh16x64 (Const16 [0]) _)
14303	// result: (Const16 [0])
14304	for {
14305		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
14306			break
14307		}
14308		v.reset(OpConst16)
14309		v.AuxInt = int16ToAuxInt(0)
14310		return true
14311	}
14312	// match: (Lsh16x64 _ (Const64 [c]))
14313	// cond: uint64(c) >= 16
14314	// result: (Const16 [0])
14315	for {
14316		if v_1.Op != OpConst64 {
14317			break
14318		}
14319		c := auxIntToInt64(v_1.AuxInt)
14320		if !(uint64(c) >= 16) {
14321			break
14322		}
14323		v.reset(OpConst16)
14324		v.AuxInt = int16ToAuxInt(0)
14325		return true
14326	}
14327	// match: (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d]))
14328	// cond: !uaddOvf(c,d)
14329	// result: (Lsh16x64 x (Const64 <t> [c+d]))
14330	for {
14331		t := v.Type
14332		if v_0.Op != OpLsh16x64 {
14333			break
14334		}
14335		_ = v_0.Args[1]
14336		x := v_0.Args[0]
14337		v_0_1 := v_0.Args[1]
14338		if v_0_1.Op != OpConst64 {
14339			break
14340		}
14341		c := auxIntToInt64(v_0_1.AuxInt)
14342		if v_1.Op != OpConst64 {
14343			break
14344		}
14345		d := auxIntToInt64(v_1.AuxInt)
14346		if !(!uaddOvf(c, d)) {
14347			break
14348		}
14349		v.reset(OpLsh16x64)
14350		v0 := b.NewValue0(v.Pos, OpConst64, t)
14351		v0.AuxInt = int64ToAuxInt(c + d)
14352		v.AddArg2(x, v0)
14353		return true
14354	}
14355	// match: (Lsh16x64 i:(Rsh16x64 x (Const64 [c])) (Const64 [c]))
14356	// cond: c >= 0 && c < 16 && i.Uses == 1
14357	// result: (And16 x (Const16 <v.Type> [int16(-1) << c]))
14358	for {
14359		i := v_0
14360		if i.Op != OpRsh16x64 {
14361			break
14362		}
14363		_ = i.Args[1]
14364		x := i.Args[0]
14365		i_1 := i.Args[1]
14366		if i_1.Op != OpConst64 {
14367			break
14368		}
14369		c := auxIntToInt64(i_1.AuxInt)
14370		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 16 && i.Uses == 1) {
14371			break
14372		}
14373		v.reset(OpAnd16)
14374		v0 := b.NewValue0(v.Pos, OpConst16, v.Type)
14375		v0.AuxInt = int16ToAuxInt(int16(-1) << c)
14376		v.AddArg2(x, v0)
14377		return true
14378	}
14379	// match: (Lsh16x64 i:(Rsh16Ux64 x (Const64 [c])) (Const64 [c]))
14380	// cond: c >= 0 && c < 16 && i.Uses == 1
14381	// result: (And16 x (Const16 <v.Type> [int16(-1) << c]))
14382	for {
14383		i := v_0
14384		if i.Op != OpRsh16Ux64 {
14385			break
14386		}
14387		_ = i.Args[1]
14388		x := i.Args[0]
14389		i_1 := i.Args[1]
14390		if i_1.Op != OpConst64 {
14391			break
14392		}
14393		c := auxIntToInt64(i_1.AuxInt)
14394		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 16 && i.Uses == 1) {
14395			break
14396		}
14397		v.reset(OpAnd16)
14398		v0 := b.NewValue0(v.Pos, OpConst16, v.Type)
14399		v0.AuxInt = int16ToAuxInt(int16(-1) << c)
14400		v.AddArg2(x, v0)
14401		return true
14402	}
14403	// match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
14404	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
14405	// result: (Lsh16x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
14406	for {
14407		if v_0.Op != OpRsh16Ux64 {
14408			break
14409		}
14410		_ = v_0.Args[1]
14411		v_0_0 := v_0.Args[0]
14412		if v_0_0.Op != OpLsh16x64 {
14413			break
14414		}
14415		_ = v_0_0.Args[1]
14416		x := v_0_0.Args[0]
14417		v_0_0_1 := v_0_0.Args[1]
14418		if v_0_0_1.Op != OpConst64 {
14419			break
14420		}
14421		c1 := auxIntToInt64(v_0_0_1.AuxInt)
14422		v_0_1 := v_0.Args[1]
14423		if v_0_1.Op != OpConst64 {
14424			break
14425		}
14426		c2 := auxIntToInt64(v_0_1.AuxInt)
14427		if v_1.Op != OpConst64 {
14428			break
14429		}
14430		c3 := auxIntToInt64(v_1.AuxInt)
14431		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
14432			break
14433		}
14434		v.reset(OpLsh16x64)
14435		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
14436		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
14437		v.AddArg2(x, v0)
14438		return true
14439	}
14440	return false
14441}
14442func rewriteValuegeneric_OpLsh16x8(v *Value) bool {
14443	v_1 := v.Args[1]
14444	v_0 := v.Args[0]
14445	b := v.Block
14446	// match: (Lsh16x8 <t> x (Const8 [c]))
14447	// result: (Lsh16x64 x (Const64 <t> [int64(uint8(c))]))
14448	for {
14449		t := v.Type
14450		x := v_0
14451		if v_1.Op != OpConst8 {
14452			break
14453		}
14454		c := auxIntToInt8(v_1.AuxInt)
14455		v.reset(OpLsh16x64)
14456		v0 := b.NewValue0(v.Pos, OpConst64, t)
14457		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
14458		v.AddArg2(x, v0)
14459		return true
14460	}
14461	// match: (Lsh16x8 (Const16 [0]) _)
14462	// result: (Const16 [0])
14463	for {
14464		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
14465			break
14466		}
14467		v.reset(OpConst16)
14468		v.AuxInt = int16ToAuxInt(0)
14469		return true
14470	}
14471	return false
14472}
14473func rewriteValuegeneric_OpLsh32x16(v *Value) bool {
14474	v_1 := v.Args[1]
14475	v_0 := v.Args[0]
14476	b := v.Block
14477	// match: (Lsh32x16 <t> x (Const16 [c]))
14478	// result: (Lsh32x64 x (Const64 <t> [int64(uint16(c))]))
14479	for {
14480		t := v.Type
14481		x := v_0
14482		if v_1.Op != OpConst16 {
14483			break
14484		}
14485		c := auxIntToInt16(v_1.AuxInt)
14486		v.reset(OpLsh32x64)
14487		v0 := b.NewValue0(v.Pos, OpConst64, t)
14488		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
14489		v.AddArg2(x, v0)
14490		return true
14491	}
14492	// match: (Lsh32x16 (Const32 [0]) _)
14493	// result: (Const32 [0])
14494	for {
14495		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
14496			break
14497		}
14498		v.reset(OpConst32)
14499		v.AuxInt = int32ToAuxInt(0)
14500		return true
14501	}
14502	return false
14503}
14504func rewriteValuegeneric_OpLsh32x32(v *Value) bool {
14505	v_1 := v.Args[1]
14506	v_0 := v.Args[0]
14507	b := v.Block
14508	// match: (Lsh32x32 <t> x (Const32 [c]))
14509	// result: (Lsh32x64 x (Const64 <t> [int64(uint32(c))]))
14510	for {
14511		t := v.Type
14512		x := v_0
14513		if v_1.Op != OpConst32 {
14514			break
14515		}
14516		c := auxIntToInt32(v_1.AuxInt)
14517		v.reset(OpLsh32x64)
14518		v0 := b.NewValue0(v.Pos, OpConst64, t)
14519		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
14520		v.AddArg2(x, v0)
14521		return true
14522	}
14523	// match: (Lsh32x32 (Const32 [0]) _)
14524	// result: (Const32 [0])
14525	for {
14526		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
14527			break
14528		}
14529		v.reset(OpConst32)
14530		v.AuxInt = int32ToAuxInt(0)
14531		return true
14532	}
14533	return false
14534}
14535func rewriteValuegeneric_OpLsh32x64(v *Value) bool {
14536	v_1 := v.Args[1]
14537	v_0 := v.Args[0]
14538	b := v.Block
14539	typ := &b.Func.Config.Types
14540	// match: (Lsh32x64 (Const32 [c]) (Const64 [d]))
14541	// result: (Const32 [c << uint64(d)])
14542	for {
14543		if v_0.Op != OpConst32 {
14544			break
14545		}
14546		c := auxIntToInt32(v_0.AuxInt)
14547		if v_1.Op != OpConst64 {
14548			break
14549		}
14550		d := auxIntToInt64(v_1.AuxInt)
14551		v.reset(OpConst32)
14552		v.AuxInt = int32ToAuxInt(c << uint64(d))
14553		return true
14554	}
14555	// match: (Lsh32x64 x (Const64 [0]))
14556	// result: x
14557	for {
14558		x := v_0
14559		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
14560			break
14561		}
14562		v.copyOf(x)
14563		return true
14564	}
14565	// match: (Lsh32x64 (Const32 [0]) _)
14566	// result: (Const32 [0])
14567	for {
14568		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
14569			break
14570		}
14571		v.reset(OpConst32)
14572		v.AuxInt = int32ToAuxInt(0)
14573		return true
14574	}
14575	// match: (Lsh32x64 _ (Const64 [c]))
14576	// cond: uint64(c) >= 32
14577	// result: (Const32 [0])
14578	for {
14579		if v_1.Op != OpConst64 {
14580			break
14581		}
14582		c := auxIntToInt64(v_1.AuxInt)
14583		if !(uint64(c) >= 32) {
14584			break
14585		}
14586		v.reset(OpConst32)
14587		v.AuxInt = int32ToAuxInt(0)
14588		return true
14589	}
14590	// match: (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d]))
14591	// cond: !uaddOvf(c,d)
14592	// result: (Lsh32x64 x (Const64 <t> [c+d]))
14593	for {
14594		t := v.Type
14595		if v_0.Op != OpLsh32x64 {
14596			break
14597		}
14598		_ = v_0.Args[1]
14599		x := v_0.Args[0]
14600		v_0_1 := v_0.Args[1]
14601		if v_0_1.Op != OpConst64 {
14602			break
14603		}
14604		c := auxIntToInt64(v_0_1.AuxInt)
14605		if v_1.Op != OpConst64 {
14606			break
14607		}
14608		d := auxIntToInt64(v_1.AuxInt)
14609		if !(!uaddOvf(c, d)) {
14610			break
14611		}
14612		v.reset(OpLsh32x64)
14613		v0 := b.NewValue0(v.Pos, OpConst64, t)
14614		v0.AuxInt = int64ToAuxInt(c + d)
14615		v.AddArg2(x, v0)
14616		return true
14617	}
14618	// match: (Lsh32x64 i:(Rsh32x64 x (Const64 [c])) (Const64 [c]))
14619	// cond: c >= 0 && c < 32 && i.Uses == 1
14620	// result: (And32 x (Const32 <v.Type> [int32(-1) << c]))
14621	for {
14622		i := v_0
14623		if i.Op != OpRsh32x64 {
14624			break
14625		}
14626		_ = i.Args[1]
14627		x := i.Args[0]
14628		i_1 := i.Args[1]
14629		if i_1.Op != OpConst64 {
14630			break
14631		}
14632		c := auxIntToInt64(i_1.AuxInt)
14633		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 32 && i.Uses == 1) {
14634			break
14635		}
14636		v.reset(OpAnd32)
14637		v0 := b.NewValue0(v.Pos, OpConst32, v.Type)
14638		v0.AuxInt = int32ToAuxInt(int32(-1) << c)
14639		v.AddArg2(x, v0)
14640		return true
14641	}
14642	// match: (Lsh32x64 i:(Rsh32Ux64 x (Const64 [c])) (Const64 [c]))
14643	// cond: c >= 0 && c < 32 && i.Uses == 1
14644	// result: (And32 x (Const32 <v.Type> [int32(-1) << c]))
14645	for {
14646		i := v_0
14647		if i.Op != OpRsh32Ux64 {
14648			break
14649		}
14650		_ = i.Args[1]
14651		x := i.Args[0]
14652		i_1 := i.Args[1]
14653		if i_1.Op != OpConst64 {
14654			break
14655		}
14656		c := auxIntToInt64(i_1.AuxInt)
14657		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 32 && i.Uses == 1) {
14658			break
14659		}
14660		v.reset(OpAnd32)
14661		v0 := b.NewValue0(v.Pos, OpConst32, v.Type)
14662		v0.AuxInt = int32ToAuxInt(int32(-1) << c)
14663		v.AddArg2(x, v0)
14664		return true
14665	}
14666	// match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
14667	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
14668	// result: (Lsh32x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
14669	for {
14670		if v_0.Op != OpRsh32Ux64 {
14671			break
14672		}
14673		_ = v_0.Args[1]
14674		v_0_0 := v_0.Args[0]
14675		if v_0_0.Op != OpLsh32x64 {
14676			break
14677		}
14678		_ = v_0_0.Args[1]
14679		x := v_0_0.Args[0]
14680		v_0_0_1 := v_0_0.Args[1]
14681		if v_0_0_1.Op != OpConst64 {
14682			break
14683		}
14684		c1 := auxIntToInt64(v_0_0_1.AuxInt)
14685		v_0_1 := v_0.Args[1]
14686		if v_0_1.Op != OpConst64 {
14687			break
14688		}
14689		c2 := auxIntToInt64(v_0_1.AuxInt)
14690		if v_1.Op != OpConst64 {
14691			break
14692		}
14693		c3 := auxIntToInt64(v_1.AuxInt)
14694		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
14695			break
14696		}
14697		v.reset(OpLsh32x64)
14698		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
14699		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
14700		v.AddArg2(x, v0)
14701		return true
14702	}
14703	return false
14704}
14705func rewriteValuegeneric_OpLsh32x8(v *Value) bool {
14706	v_1 := v.Args[1]
14707	v_0 := v.Args[0]
14708	b := v.Block
14709	// match: (Lsh32x8 <t> x (Const8 [c]))
14710	// result: (Lsh32x64 x (Const64 <t> [int64(uint8(c))]))
14711	for {
14712		t := v.Type
14713		x := v_0
14714		if v_1.Op != OpConst8 {
14715			break
14716		}
14717		c := auxIntToInt8(v_1.AuxInt)
14718		v.reset(OpLsh32x64)
14719		v0 := b.NewValue0(v.Pos, OpConst64, t)
14720		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
14721		v.AddArg2(x, v0)
14722		return true
14723	}
14724	// match: (Lsh32x8 (Const32 [0]) _)
14725	// result: (Const32 [0])
14726	for {
14727		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
14728			break
14729		}
14730		v.reset(OpConst32)
14731		v.AuxInt = int32ToAuxInt(0)
14732		return true
14733	}
14734	return false
14735}
14736func rewriteValuegeneric_OpLsh64x16(v *Value) bool {
14737	v_1 := v.Args[1]
14738	v_0 := v.Args[0]
14739	b := v.Block
14740	// match: (Lsh64x16 <t> x (Const16 [c]))
14741	// result: (Lsh64x64 x (Const64 <t> [int64(uint16(c))]))
14742	for {
14743		t := v.Type
14744		x := v_0
14745		if v_1.Op != OpConst16 {
14746			break
14747		}
14748		c := auxIntToInt16(v_1.AuxInt)
14749		v.reset(OpLsh64x64)
14750		v0 := b.NewValue0(v.Pos, OpConst64, t)
14751		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
14752		v.AddArg2(x, v0)
14753		return true
14754	}
14755	// match: (Lsh64x16 (Const64 [0]) _)
14756	// result: (Const64 [0])
14757	for {
14758		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
14759			break
14760		}
14761		v.reset(OpConst64)
14762		v.AuxInt = int64ToAuxInt(0)
14763		return true
14764	}
14765	return false
14766}
14767func rewriteValuegeneric_OpLsh64x32(v *Value) bool {
14768	v_1 := v.Args[1]
14769	v_0 := v.Args[0]
14770	b := v.Block
14771	// match: (Lsh64x32 <t> x (Const32 [c]))
14772	// result: (Lsh64x64 x (Const64 <t> [int64(uint32(c))]))
14773	for {
14774		t := v.Type
14775		x := v_0
14776		if v_1.Op != OpConst32 {
14777			break
14778		}
14779		c := auxIntToInt32(v_1.AuxInt)
14780		v.reset(OpLsh64x64)
14781		v0 := b.NewValue0(v.Pos, OpConst64, t)
14782		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
14783		v.AddArg2(x, v0)
14784		return true
14785	}
14786	// match: (Lsh64x32 (Const64 [0]) _)
14787	// result: (Const64 [0])
14788	for {
14789		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
14790			break
14791		}
14792		v.reset(OpConst64)
14793		v.AuxInt = int64ToAuxInt(0)
14794		return true
14795	}
14796	return false
14797}
14798func rewriteValuegeneric_OpLsh64x64(v *Value) bool {
14799	v_1 := v.Args[1]
14800	v_0 := v.Args[0]
14801	b := v.Block
14802	typ := &b.Func.Config.Types
14803	// match: (Lsh64x64 (Const64 [c]) (Const64 [d]))
14804	// result: (Const64 [c << uint64(d)])
14805	for {
14806		if v_0.Op != OpConst64 {
14807			break
14808		}
14809		c := auxIntToInt64(v_0.AuxInt)
14810		if v_1.Op != OpConst64 {
14811			break
14812		}
14813		d := auxIntToInt64(v_1.AuxInt)
14814		v.reset(OpConst64)
14815		v.AuxInt = int64ToAuxInt(c << uint64(d))
14816		return true
14817	}
14818	// match: (Lsh64x64 x (Const64 [0]))
14819	// result: x
14820	for {
14821		x := v_0
14822		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
14823			break
14824		}
14825		v.copyOf(x)
14826		return true
14827	}
14828	// match: (Lsh64x64 (Const64 [0]) _)
14829	// result: (Const64 [0])
14830	for {
14831		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
14832			break
14833		}
14834		v.reset(OpConst64)
14835		v.AuxInt = int64ToAuxInt(0)
14836		return true
14837	}
14838	// match: (Lsh64x64 _ (Const64 [c]))
14839	// cond: uint64(c) >= 64
14840	// result: (Const64 [0])
14841	for {
14842		if v_1.Op != OpConst64 {
14843			break
14844		}
14845		c := auxIntToInt64(v_1.AuxInt)
14846		if !(uint64(c) >= 64) {
14847			break
14848		}
14849		v.reset(OpConst64)
14850		v.AuxInt = int64ToAuxInt(0)
14851		return true
14852	}
14853	// match: (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d]))
14854	// cond: !uaddOvf(c,d)
14855	// result: (Lsh64x64 x (Const64 <t> [c+d]))
14856	for {
14857		t := v.Type
14858		if v_0.Op != OpLsh64x64 {
14859			break
14860		}
14861		_ = v_0.Args[1]
14862		x := v_0.Args[0]
14863		v_0_1 := v_0.Args[1]
14864		if v_0_1.Op != OpConst64 {
14865			break
14866		}
14867		c := auxIntToInt64(v_0_1.AuxInt)
14868		if v_1.Op != OpConst64 {
14869			break
14870		}
14871		d := auxIntToInt64(v_1.AuxInt)
14872		if !(!uaddOvf(c, d)) {
14873			break
14874		}
14875		v.reset(OpLsh64x64)
14876		v0 := b.NewValue0(v.Pos, OpConst64, t)
14877		v0.AuxInt = int64ToAuxInt(c + d)
14878		v.AddArg2(x, v0)
14879		return true
14880	}
14881	// match: (Lsh64x64 i:(Rsh64x64 x (Const64 [c])) (Const64 [c]))
14882	// cond: c >= 0 && c < 64 && i.Uses == 1
14883	// result: (And64 x (Const64 <v.Type> [int64(-1) << c]))
14884	for {
14885		i := v_0
14886		if i.Op != OpRsh64x64 {
14887			break
14888		}
14889		_ = i.Args[1]
14890		x := i.Args[0]
14891		i_1 := i.Args[1]
14892		if i_1.Op != OpConst64 {
14893			break
14894		}
14895		c := auxIntToInt64(i_1.AuxInt)
14896		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 64 && i.Uses == 1) {
14897			break
14898		}
14899		v.reset(OpAnd64)
14900		v0 := b.NewValue0(v.Pos, OpConst64, v.Type)
14901		v0.AuxInt = int64ToAuxInt(int64(-1) << c)
14902		v.AddArg2(x, v0)
14903		return true
14904	}
14905	// match: (Lsh64x64 i:(Rsh64Ux64 x (Const64 [c])) (Const64 [c]))
14906	// cond: c >= 0 && c < 64 && i.Uses == 1
14907	// result: (And64 x (Const64 <v.Type> [int64(-1) << c]))
14908	for {
14909		i := v_0
14910		if i.Op != OpRsh64Ux64 {
14911			break
14912		}
14913		_ = i.Args[1]
14914		x := i.Args[0]
14915		i_1 := i.Args[1]
14916		if i_1.Op != OpConst64 {
14917			break
14918		}
14919		c := auxIntToInt64(i_1.AuxInt)
14920		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 64 && i.Uses == 1) {
14921			break
14922		}
14923		v.reset(OpAnd64)
14924		v0 := b.NewValue0(v.Pos, OpConst64, v.Type)
14925		v0.AuxInt = int64ToAuxInt(int64(-1) << c)
14926		v.AddArg2(x, v0)
14927		return true
14928	}
14929	// match: (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
14930	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
14931	// result: (Lsh64x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
14932	for {
14933		if v_0.Op != OpRsh64Ux64 {
14934			break
14935		}
14936		_ = v_0.Args[1]
14937		v_0_0 := v_0.Args[0]
14938		if v_0_0.Op != OpLsh64x64 {
14939			break
14940		}
14941		_ = v_0_0.Args[1]
14942		x := v_0_0.Args[0]
14943		v_0_0_1 := v_0_0.Args[1]
14944		if v_0_0_1.Op != OpConst64 {
14945			break
14946		}
14947		c1 := auxIntToInt64(v_0_0_1.AuxInt)
14948		v_0_1 := v_0.Args[1]
14949		if v_0_1.Op != OpConst64 {
14950			break
14951		}
14952		c2 := auxIntToInt64(v_0_1.AuxInt)
14953		if v_1.Op != OpConst64 {
14954			break
14955		}
14956		c3 := auxIntToInt64(v_1.AuxInt)
14957		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
14958			break
14959		}
14960		v.reset(OpLsh64x64)
14961		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
14962		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
14963		v.AddArg2(x, v0)
14964		return true
14965	}
14966	return false
14967}
14968func rewriteValuegeneric_OpLsh64x8(v *Value) bool {
14969	v_1 := v.Args[1]
14970	v_0 := v.Args[0]
14971	b := v.Block
14972	// match: (Lsh64x8 <t> x (Const8 [c]))
14973	// result: (Lsh64x64 x (Const64 <t> [int64(uint8(c))]))
14974	for {
14975		t := v.Type
14976		x := v_0
14977		if v_1.Op != OpConst8 {
14978			break
14979		}
14980		c := auxIntToInt8(v_1.AuxInt)
14981		v.reset(OpLsh64x64)
14982		v0 := b.NewValue0(v.Pos, OpConst64, t)
14983		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
14984		v.AddArg2(x, v0)
14985		return true
14986	}
14987	// match: (Lsh64x8 (Const64 [0]) _)
14988	// result: (Const64 [0])
14989	for {
14990		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
14991			break
14992		}
14993		v.reset(OpConst64)
14994		v.AuxInt = int64ToAuxInt(0)
14995		return true
14996	}
14997	return false
14998}
14999func rewriteValuegeneric_OpLsh8x16(v *Value) bool {
15000	v_1 := v.Args[1]
15001	v_0 := v.Args[0]
15002	b := v.Block
15003	// match: (Lsh8x16 <t> x (Const16 [c]))
15004	// result: (Lsh8x64 x (Const64 <t> [int64(uint16(c))]))
15005	for {
15006		t := v.Type
15007		x := v_0
15008		if v_1.Op != OpConst16 {
15009			break
15010		}
15011		c := auxIntToInt16(v_1.AuxInt)
15012		v.reset(OpLsh8x64)
15013		v0 := b.NewValue0(v.Pos, OpConst64, t)
15014		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
15015		v.AddArg2(x, v0)
15016		return true
15017	}
15018	// match: (Lsh8x16 (Const8 [0]) _)
15019	// result: (Const8 [0])
15020	for {
15021		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
15022			break
15023		}
15024		v.reset(OpConst8)
15025		v.AuxInt = int8ToAuxInt(0)
15026		return true
15027	}
15028	return false
15029}
15030func rewriteValuegeneric_OpLsh8x32(v *Value) bool {
15031	v_1 := v.Args[1]
15032	v_0 := v.Args[0]
15033	b := v.Block
15034	// match: (Lsh8x32 <t> x (Const32 [c]))
15035	// result: (Lsh8x64 x (Const64 <t> [int64(uint32(c))]))
15036	for {
15037		t := v.Type
15038		x := v_0
15039		if v_1.Op != OpConst32 {
15040			break
15041		}
15042		c := auxIntToInt32(v_1.AuxInt)
15043		v.reset(OpLsh8x64)
15044		v0 := b.NewValue0(v.Pos, OpConst64, t)
15045		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
15046		v.AddArg2(x, v0)
15047		return true
15048	}
15049	// match: (Lsh8x32 (Const8 [0]) _)
15050	// result: (Const8 [0])
15051	for {
15052		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
15053			break
15054		}
15055		v.reset(OpConst8)
15056		v.AuxInt = int8ToAuxInt(0)
15057		return true
15058	}
15059	return false
15060}
15061func rewriteValuegeneric_OpLsh8x64(v *Value) bool {
15062	v_1 := v.Args[1]
15063	v_0 := v.Args[0]
15064	b := v.Block
15065	typ := &b.Func.Config.Types
15066	// match: (Lsh8x64 (Const8 [c]) (Const64 [d]))
15067	// result: (Const8 [c << uint64(d)])
15068	for {
15069		if v_0.Op != OpConst8 {
15070			break
15071		}
15072		c := auxIntToInt8(v_0.AuxInt)
15073		if v_1.Op != OpConst64 {
15074			break
15075		}
15076		d := auxIntToInt64(v_1.AuxInt)
15077		v.reset(OpConst8)
15078		v.AuxInt = int8ToAuxInt(c << uint64(d))
15079		return true
15080	}
15081	// match: (Lsh8x64 x (Const64 [0]))
15082	// result: x
15083	for {
15084		x := v_0
15085		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
15086			break
15087		}
15088		v.copyOf(x)
15089		return true
15090	}
15091	// match: (Lsh8x64 (Const8 [0]) _)
15092	// result: (Const8 [0])
15093	for {
15094		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
15095			break
15096		}
15097		v.reset(OpConst8)
15098		v.AuxInt = int8ToAuxInt(0)
15099		return true
15100	}
15101	// match: (Lsh8x64 _ (Const64 [c]))
15102	// cond: uint64(c) >= 8
15103	// result: (Const8 [0])
15104	for {
15105		if v_1.Op != OpConst64 {
15106			break
15107		}
15108		c := auxIntToInt64(v_1.AuxInt)
15109		if !(uint64(c) >= 8) {
15110			break
15111		}
15112		v.reset(OpConst8)
15113		v.AuxInt = int8ToAuxInt(0)
15114		return true
15115	}
15116	// match: (Lsh8x64 <t> (Lsh8x64 x (Const64 [c])) (Const64 [d]))
15117	// cond: !uaddOvf(c,d)
15118	// result: (Lsh8x64 x (Const64 <t> [c+d]))
15119	for {
15120		t := v.Type
15121		if v_0.Op != OpLsh8x64 {
15122			break
15123		}
15124		_ = v_0.Args[1]
15125		x := v_0.Args[0]
15126		v_0_1 := v_0.Args[1]
15127		if v_0_1.Op != OpConst64 {
15128			break
15129		}
15130		c := auxIntToInt64(v_0_1.AuxInt)
15131		if v_1.Op != OpConst64 {
15132			break
15133		}
15134		d := auxIntToInt64(v_1.AuxInt)
15135		if !(!uaddOvf(c, d)) {
15136			break
15137		}
15138		v.reset(OpLsh8x64)
15139		v0 := b.NewValue0(v.Pos, OpConst64, t)
15140		v0.AuxInt = int64ToAuxInt(c + d)
15141		v.AddArg2(x, v0)
15142		return true
15143	}
15144	// match: (Lsh8x64 i:(Rsh8x64 x (Const64 [c])) (Const64 [c]))
15145	// cond: c >= 0 && c < 8 && i.Uses == 1
15146	// result: (And8 x (Const8 <v.Type> [int8(-1) << c]))
15147	for {
15148		i := v_0
15149		if i.Op != OpRsh8x64 {
15150			break
15151		}
15152		_ = i.Args[1]
15153		x := i.Args[0]
15154		i_1 := i.Args[1]
15155		if i_1.Op != OpConst64 {
15156			break
15157		}
15158		c := auxIntToInt64(i_1.AuxInt)
15159		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 8 && i.Uses == 1) {
15160			break
15161		}
15162		v.reset(OpAnd8)
15163		v0 := b.NewValue0(v.Pos, OpConst8, v.Type)
15164		v0.AuxInt = int8ToAuxInt(int8(-1) << c)
15165		v.AddArg2(x, v0)
15166		return true
15167	}
15168	// match: (Lsh8x64 i:(Rsh8Ux64 x (Const64 [c])) (Const64 [c]))
15169	// cond: c >= 0 && c < 8 && i.Uses == 1
15170	// result: (And8 x (Const8 <v.Type> [int8(-1) << c]))
15171	for {
15172		i := v_0
15173		if i.Op != OpRsh8Ux64 {
15174			break
15175		}
15176		_ = i.Args[1]
15177		x := i.Args[0]
15178		i_1 := i.Args[1]
15179		if i_1.Op != OpConst64 {
15180			break
15181		}
15182		c := auxIntToInt64(i_1.AuxInt)
15183		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 8 && i.Uses == 1) {
15184			break
15185		}
15186		v.reset(OpAnd8)
15187		v0 := b.NewValue0(v.Pos, OpConst8, v.Type)
15188		v0.AuxInt = int8ToAuxInt(int8(-1) << c)
15189		v.AddArg2(x, v0)
15190		return true
15191	}
15192	// match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
15193	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
15194	// result: (Lsh8x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
15195	for {
15196		if v_0.Op != OpRsh8Ux64 {
15197			break
15198		}
15199		_ = v_0.Args[1]
15200		v_0_0 := v_0.Args[0]
15201		if v_0_0.Op != OpLsh8x64 {
15202			break
15203		}
15204		_ = v_0_0.Args[1]
15205		x := v_0_0.Args[0]
15206		v_0_0_1 := v_0_0.Args[1]
15207		if v_0_0_1.Op != OpConst64 {
15208			break
15209		}
15210		c1 := auxIntToInt64(v_0_0_1.AuxInt)
15211		v_0_1 := v_0.Args[1]
15212		if v_0_1.Op != OpConst64 {
15213			break
15214		}
15215		c2 := auxIntToInt64(v_0_1.AuxInt)
15216		if v_1.Op != OpConst64 {
15217			break
15218		}
15219		c3 := auxIntToInt64(v_1.AuxInt)
15220		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
15221			break
15222		}
15223		v.reset(OpLsh8x64)
15224		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
15225		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
15226		v.AddArg2(x, v0)
15227		return true
15228	}
15229	return false
15230}
15231func rewriteValuegeneric_OpLsh8x8(v *Value) bool {
15232	v_1 := v.Args[1]
15233	v_0 := v.Args[0]
15234	b := v.Block
15235	// match: (Lsh8x8 <t> x (Const8 [c]))
15236	// result: (Lsh8x64 x (Const64 <t> [int64(uint8(c))]))
15237	for {
15238		t := v.Type
15239		x := v_0
15240		if v_1.Op != OpConst8 {
15241			break
15242		}
15243		c := auxIntToInt8(v_1.AuxInt)
15244		v.reset(OpLsh8x64)
15245		v0 := b.NewValue0(v.Pos, OpConst64, t)
15246		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
15247		v.AddArg2(x, v0)
15248		return true
15249	}
15250	// match: (Lsh8x8 (Const8 [0]) _)
15251	// result: (Const8 [0])
15252	for {
15253		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
15254			break
15255		}
15256		v.reset(OpConst8)
15257		v.AuxInt = int8ToAuxInt(0)
15258		return true
15259	}
15260	return false
15261}
15262func rewriteValuegeneric_OpMod16(v *Value) bool {
15263	v_1 := v.Args[1]
15264	v_0 := v.Args[0]
15265	b := v.Block
15266	// match: (Mod16 (Const16 [c]) (Const16 [d]))
15267	// cond: d != 0
15268	// result: (Const16 [c % d])
15269	for {
15270		if v_0.Op != OpConst16 {
15271			break
15272		}
15273		c := auxIntToInt16(v_0.AuxInt)
15274		if v_1.Op != OpConst16 {
15275			break
15276		}
15277		d := auxIntToInt16(v_1.AuxInt)
15278		if !(d != 0) {
15279			break
15280		}
15281		v.reset(OpConst16)
15282		v.AuxInt = int16ToAuxInt(c % d)
15283		return true
15284	}
15285	// match: (Mod16 <t> n (Const16 [c]))
15286	// cond: isNonNegative(n) && isPowerOfTwo16(c)
15287	// result: (And16 n (Const16 <t> [c-1]))
15288	for {
15289		t := v.Type
15290		n := v_0
15291		if v_1.Op != OpConst16 {
15292			break
15293		}
15294		c := auxIntToInt16(v_1.AuxInt)
15295		if !(isNonNegative(n) && isPowerOfTwo16(c)) {
15296			break
15297		}
15298		v.reset(OpAnd16)
15299		v0 := b.NewValue0(v.Pos, OpConst16, t)
15300		v0.AuxInt = int16ToAuxInt(c - 1)
15301		v.AddArg2(n, v0)
15302		return true
15303	}
15304	// match: (Mod16 <t> n (Const16 [c]))
15305	// cond: c < 0 && c != -1<<15
15306	// result: (Mod16 <t> n (Const16 <t> [-c]))
15307	for {
15308		t := v.Type
15309		n := v_0
15310		if v_1.Op != OpConst16 {
15311			break
15312		}
15313		c := auxIntToInt16(v_1.AuxInt)
15314		if !(c < 0 && c != -1<<15) {
15315			break
15316		}
15317		v.reset(OpMod16)
15318		v.Type = t
15319		v0 := b.NewValue0(v.Pos, OpConst16, t)
15320		v0.AuxInt = int16ToAuxInt(-c)
15321		v.AddArg2(n, v0)
15322		return true
15323	}
15324	// match: (Mod16 <t> x (Const16 [c]))
15325	// cond: x.Op != OpConst16 && (c > 0 || c == -1<<15)
15326	// result: (Sub16 x (Mul16 <t> (Div16 <t> x (Const16 <t> [c])) (Const16 <t> [c])))
15327	for {
15328		t := v.Type
15329		x := v_0
15330		if v_1.Op != OpConst16 {
15331			break
15332		}
15333		c := auxIntToInt16(v_1.AuxInt)
15334		if !(x.Op != OpConst16 && (c > 0 || c == -1<<15)) {
15335			break
15336		}
15337		v.reset(OpSub16)
15338		v0 := b.NewValue0(v.Pos, OpMul16, t)
15339		v1 := b.NewValue0(v.Pos, OpDiv16, t)
15340		v2 := b.NewValue0(v.Pos, OpConst16, t)
15341		v2.AuxInt = int16ToAuxInt(c)
15342		v1.AddArg2(x, v2)
15343		v0.AddArg2(v1, v2)
15344		v.AddArg2(x, v0)
15345		return true
15346	}
15347	return false
15348}
15349func rewriteValuegeneric_OpMod16u(v *Value) bool {
15350	v_1 := v.Args[1]
15351	v_0 := v.Args[0]
15352	b := v.Block
15353	// match: (Mod16u (Const16 [c]) (Const16 [d]))
15354	// cond: d != 0
15355	// result: (Const16 [int16(uint16(c) % uint16(d))])
15356	for {
15357		if v_0.Op != OpConst16 {
15358			break
15359		}
15360		c := auxIntToInt16(v_0.AuxInt)
15361		if v_1.Op != OpConst16 {
15362			break
15363		}
15364		d := auxIntToInt16(v_1.AuxInt)
15365		if !(d != 0) {
15366			break
15367		}
15368		v.reset(OpConst16)
15369		v.AuxInt = int16ToAuxInt(int16(uint16(c) % uint16(d)))
15370		return true
15371	}
15372	// match: (Mod16u <t> n (Const16 [c]))
15373	// cond: isPowerOfTwo16(c)
15374	// result: (And16 n (Const16 <t> [c-1]))
15375	for {
15376		t := v.Type
15377		n := v_0
15378		if v_1.Op != OpConst16 {
15379			break
15380		}
15381		c := auxIntToInt16(v_1.AuxInt)
15382		if !(isPowerOfTwo16(c)) {
15383			break
15384		}
15385		v.reset(OpAnd16)
15386		v0 := b.NewValue0(v.Pos, OpConst16, t)
15387		v0.AuxInt = int16ToAuxInt(c - 1)
15388		v.AddArg2(n, v0)
15389		return true
15390	}
15391	// match: (Mod16u <t> x (Const16 [c]))
15392	// cond: x.Op != OpConst16 && c > 0 && umagicOK16(c)
15393	// result: (Sub16 x (Mul16 <t> (Div16u <t> x (Const16 <t> [c])) (Const16 <t> [c])))
15394	for {
15395		t := v.Type
15396		x := v_0
15397		if v_1.Op != OpConst16 {
15398			break
15399		}
15400		c := auxIntToInt16(v_1.AuxInt)
15401		if !(x.Op != OpConst16 && c > 0 && umagicOK16(c)) {
15402			break
15403		}
15404		v.reset(OpSub16)
15405		v0 := b.NewValue0(v.Pos, OpMul16, t)
15406		v1 := b.NewValue0(v.Pos, OpDiv16u, t)
15407		v2 := b.NewValue0(v.Pos, OpConst16, t)
15408		v2.AuxInt = int16ToAuxInt(c)
15409		v1.AddArg2(x, v2)
15410		v0.AddArg2(v1, v2)
15411		v.AddArg2(x, v0)
15412		return true
15413	}
15414	return false
15415}
15416func rewriteValuegeneric_OpMod32(v *Value) bool {
15417	v_1 := v.Args[1]
15418	v_0 := v.Args[0]
15419	b := v.Block
15420	// match: (Mod32 (Const32 [c]) (Const32 [d]))
15421	// cond: d != 0
15422	// result: (Const32 [c % d])
15423	for {
15424		if v_0.Op != OpConst32 {
15425			break
15426		}
15427		c := auxIntToInt32(v_0.AuxInt)
15428		if v_1.Op != OpConst32 {
15429			break
15430		}
15431		d := auxIntToInt32(v_1.AuxInt)
15432		if !(d != 0) {
15433			break
15434		}
15435		v.reset(OpConst32)
15436		v.AuxInt = int32ToAuxInt(c % d)
15437		return true
15438	}
15439	// match: (Mod32 <t> n (Const32 [c]))
15440	// cond: isNonNegative(n) && isPowerOfTwo32(c)
15441	// result: (And32 n (Const32 <t> [c-1]))
15442	for {
15443		t := v.Type
15444		n := v_0
15445		if v_1.Op != OpConst32 {
15446			break
15447		}
15448		c := auxIntToInt32(v_1.AuxInt)
15449		if !(isNonNegative(n) && isPowerOfTwo32(c)) {
15450			break
15451		}
15452		v.reset(OpAnd32)
15453		v0 := b.NewValue0(v.Pos, OpConst32, t)
15454		v0.AuxInt = int32ToAuxInt(c - 1)
15455		v.AddArg2(n, v0)
15456		return true
15457	}
15458	// match: (Mod32 <t> n (Const32 [c]))
15459	// cond: c < 0 && c != -1<<31
15460	// result: (Mod32 <t> n (Const32 <t> [-c]))
15461	for {
15462		t := v.Type
15463		n := v_0
15464		if v_1.Op != OpConst32 {
15465			break
15466		}
15467		c := auxIntToInt32(v_1.AuxInt)
15468		if !(c < 0 && c != -1<<31) {
15469			break
15470		}
15471		v.reset(OpMod32)
15472		v.Type = t
15473		v0 := b.NewValue0(v.Pos, OpConst32, t)
15474		v0.AuxInt = int32ToAuxInt(-c)
15475		v.AddArg2(n, v0)
15476		return true
15477	}
15478	// match: (Mod32 <t> x (Const32 [c]))
15479	// cond: x.Op != OpConst32 && (c > 0 || c == -1<<31)
15480	// result: (Sub32 x (Mul32 <t> (Div32 <t> x (Const32 <t> [c])) (Const32 <t> [c])))
15481	for {
15482		t := v.Type
15483		x := v_0
15484		if v_1.Op != OpConst32 {
15485			break
15486		}
15487		c := auxIntToInt32(v_1.AuxInt)
15488		if !(x.Op != OpConst32 && (c > 0 || c == -1<<31)) {
15489			break
15490		}
15491		v.reset(OpSub32)
15492		v0 := b.NewValue0(v.Pos, OpMul32, t)
15493		v1 := b.NewValue0(v.Pos, OpDiv32, t)
15494		v2 := b.NewValue0(v.Pos, OpConst32, t)
15495		v2.AuxInt = int32ToAuxInt(c)
15496		v1.AddArg2(x, v2)
15497		v0.AddArg2(v1, v2)
15498		v.AddArg2(x, v0)
15499		return true
15500	}
15501	return false
15502}
15503func rewriteValuegeneric_OpMod32u(v *Value) bool {
15504	v_1 := v.Args[1]
15505	v_0 := v.Args[0]
15506	b := v.Block
15507	// match: (Mod32u (Const32 [c]) (Const32 [d]))
15508	// cond: d != 0
15509	// result: (Const32 [int32(uint32(c) % uint32(d))])
15510	for {
15511		if v_0.Op != OpConst32 {
15512			break
15513		}
15514		c := auxIntToInt32(v_0.AuxInt)
15515		if v_1.Op != OpConst32 {
15516			break
15517		}
15518		d := auxIntToInt32(v_1.AuxInt)
15519		if !(d != 0) {
15520			break
15521		}
15522		v.reset(OpConst32)
15523		v.AuxInt = int32ToAuxInt(int32(uint32(c) % uint32(d)))
15524		return true
15525	}
15526	// match: (Mod32u <t> n (Const32 [c]))
15527	// cond: isPowerOfTwo32(c)
15528	// result: (And32 n (Const32 <t> [c-1]))
15529	for {
15530		t := v.Type
15531		n := v_0
15532		if v_1.Op != OpConst32 {
15533			break
15534		}
15535		c := auxIntToInt32(v_1.AuxInt)
15536		if !(isPowerOfTwo32(c)) {
15537			break
15538		}
15539		v.reset(OpAnd32)
15540		v0 := b.NewValue0(v.Pos, OpConst32, t)
15541		v0.AuxInt = int32ToAuxInt(c - 1)
15542		v.AddArg2(n, v0)
15543		return true
15544	}
15545	// match: (Mod32u <t> x (Const32 [c]))
15546	// cond: x.Op != OpConst32 && c > 0 && umagicOK32(c)
15547	// result: (Sub32 x (Mul32 <t> (Div32u <t> x (Const32 <t> [c])) (Const32 <t> [c])))
15548	for {
15549		t := v.Type
15550		x := v_0
15551		if v_1.Op != OpConst32 {
15552			break
15553		}
15554		c := auxIntToInt32(v_1.AuxInt)
15555		if !(x.Op != OpConst32 && c > 0 && umagicOK32(c)) {
15556			break
15557		}
15558		v.reset(OpSub32)
15559		v0 := b.NewValue0(v.Pos, OpMul32, t)
15560		v1 := b.NewValue0(v.Pos, OpDiv32u, t)
15561		v2 := b.NewValue0(v.Pos, OpConst32, t)
15562		v2.AuxInt = int32ToAuxInt(c)
15563		v1.AddArg2(x, v2)
15564		v0.AddArg2(v1, v2)
15565		v.AddArg2(x, v0)
15566		return true
15567	}
15568	return false
15569}
15570func rewriteValuegeneric_OpMod64(v *Value) bool {
15571	v_1 := v.Args[1]
15572	v_0 := v.Args[0]
15573	b := v.Block
15574	// match: (Mod64 (Const64 [c]) (Const64 [d]))
15575	// cond: d != 0
15576	// result: (Const64 [c % d])
15577	for {
15578		if v_0.Op != OpConst64 {
15579			break
15580		}
15581		c := auxIntToInt64(v_0.AuxInt)
15582		if v_1.Op != OpConst64 {
15583			break
15584		}
15585		d := auxIntToInt64(v_1.AuxInt)
15586		if !(d != 0) {
15587			break
15588		}
15589		v.reset(OpConst64)
15590		v.AuxInt = int64ToAuxInt(c % d)
15591		return true
15592	}
15593	// match: (Mod64 <t> n (Const64 [c]))
15594	// cond: isNonNegative(n) && isPowerOfTwo64(c)
15595	// result: (And64 n (Const64 <t> [c-1]))
15596	for {
15597		t := v.Type
15598		n := v_0
15599		if v_1.Op != OpConst64 {
15600			break
15601		}
15602		c := auxIntToInt64(v_1.AuxInt)
15603		if !(isNonNegative(n) && isPowerOfTwo64(c)) {
15604			break
15605		}
15606		v.reset(OpAnd64)
15607		v0 := b.NewValue0(v.Pos, OpConst64, t)
15608		v0.AuxInt = int64ToAuxInt(c - 1)
15609		v.AddArg2(n, v0)
15610		return true
15611	}
15612	// match: (Mod64 n (Const64 [-1<<63]))
15613	// cond: isNonNegative(n)
15614	// result: n
15615	for {
15616		n := v_0
15617		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 || !(isNonNegative(n)) {
15618			break
15619		}
15620		v.copyOf(n)
15621		return true
15622	}
15623	// match: (Mod64 <t> n (Const64 [c]))
15624	// cond: c < 0 && c != -1<<63
15625	// result: (Mod64 <t> n (Const64 <t> [-c]))
15626	for {
15627		t := v.Type
15628		n := v_0
15629		if v_1.Op != OpConst64 {
15630			break
15631		}
15632		c := auxIntToInt64(v_1.AuxInt)
15633		if !(c < 0 && c != -1<<63) {
15634			break
15635		}
15636		v.reset(OpMod64)
15637		v.Type = t
15638		v0 := b.NewValue0(v.Pos, OpConst64, t)
15639		v0.AuxInt = int64ToAuxInt(-c)
15640		v.AddArg2(n, v0)
15641		return true
15642	}
15643	// match: (Mod64 <t> x (Const64 [c]))
15644	// cond: x.Op != OpConst64 && (c > 0 || c == -1<<63)
15645	// result: (Sub64 x (Mul64 <t> (Div64 <t> x (Const64 <t> [c])) (Const64 <t> [c])))
15646	for {
15647		t := v.Type
15648		x := v_0
15649		if v_1.Op != OpConst64 {
15650			break
15651		}
15652		c := auxIntToInt64(v_1.AuxInt)
15653		if !(x.Op != OpConst64 && (c > 0 || c == -1<<63)) {
15654			break
15655		}
15656		v.reset(OpSub64)
15657		v0 := b.NewValue0(v.Pos, OpMul64, t)
15658		v1 := b.NewValue0(v.Pos, OpDiv64, t)
15659		v2 := b.NewValue0(v.Pos, OpConst64, t)
15660		v2.AuxInt = int64ToAuxInt(c)
15661		v1.AddArg2(x, v2)
15662		v0.AddArg2(v1, v2)
15663		v.AddArg2(x, v0)
15664		return true
15665	}
15666	return false
15667}
15668func rewriteValuegeneric_OpMod64u(v *Value) bool {
15669	v_1 := v.Args[1]
15670	v_0 := v.Args[0]
15671	b := v.Block
15672	// match: (Mod64u (Const64 [c]) (Const64 [d]))
15673	// cond: d != 0
15674	// result: (Const64 [int64(uint64(c) % uint64(d))])
15675	for {
15676		if v_0.Op != OpConst64 {
15677			break
15678		}
15679		c := auxIntToInt64(v_0.AuxInt)
15680		if v_1.Op != OpConst64 {
15681			break
15682		}
15683		d := auxIntToInt64(v_1.AuxInt)
15684		if !(d != 0) {
15685			break
15686		}
15687		v.reset(OpConst64)
15688		v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
15689		return true
15690	}
15691	// match: (Mod64u <t> n (Const64 [c]))
15692	// cond: isPowerOfTwo64(c)
15693	// result: (And64 n (Const64 <t> [c-1]))
15694	for {
15695		t := v.Type
15696		n := v_0
15697		if v_1.Op != OpConst64 {
15698			break
15699		}
15700		c := auxIntToInt64(v_1.AuxInt)
15701		if !(isPowerOfTwo64(c)) {
15702			break
15703		}
15704		v.reset(OpAnd64)
15705		v0 := b.NewValue0(v.Pos, OpConst64, t)
15706		v0.AuxInt = int64ToAuxInt(c - 1)
15707		v.AddArg2(n, v0)
15708		return true
15709	}
15710	// match: (Mod64u <t> n (Const64 [-1<<63]))
15711	// result: (And64 n (Const64 <t> [1<<63-1]))
15712	for {
15713		t := v.Type
15714		n := v_0
15715		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 {
15716			break
15717		}
15718		v.reset(OpAnd64)
15719		v0 := b.NewValue0(v.Pos, OpConst64, t)
15720		v0.AuxInt = int64ToAuxInt(1<<63 - 1)
15721		v.AddArg2(n, v0)
15722		return true
15723	}
15724	// match: (Mod64u <t> x (Const64 [c]))
15725	// cond: x.Op != OpConst64 && c > 0 && umagicOK64(c)
15726	// result: (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c])))
15727	for {
15728		t := v.Type
15729		x := v_0
15730		if v_1.Op != OpConst64 {
15731			break
15732		}
15733		c := auxIntToInt64(v_1.AuxInt)
15734		if !(x.Op != OpConst64 && c > 0 && umagicOK64(c)) {
15735			break
15736		}
15737		v.reset(OpSub64)
15738		v0 := b.NewValue0(v.Pos, OpMul64, t)
15739		v1 := b.NewValue0(v.Pos, OpDiv64u, t)
15740		v2 := b.NewValue0(v.Pos, OpConst64, t)
15741		v2.AuxInt = int64ToAuxInt(c)
15742		v1.AddArg2(x, v2)
15743		v0.AddArg2(v1, v2)
15744		v.AddArg2(x, v0)
15745		return true
15746	}
15747	return false
15748}
15749func rewriteValuegeneric_OpMod8(v *Value) bool {
15750	v_1 := v.Args[1]
15751	v_0 := v.Args[0]
15752	b := v.Block
15753	// match: (Mod8 (Const8 [c]) (Const8 [d]))
15754	// cond: d != 0
15755	// result: (Const8 [c % d])
15756	for {
15757		if v_0.Op != OpConst8 {
15758			break
15759		}
15760		c := auxIntToInt8(v_0.AuxInt)
15761		if v_1.Op != OpConst8 {
15762			break
15763		}
15764		d := auxIntToInt8(v_1.AuxInt)
15765		if !(d != 0) {
15766			break
15767		}
15768		v.reset(OpConst8)
15769		v.AuxInt = int8ToAuxInt(c % d)
15770		return true
15771	}
15772	// match: (Mod8 <t> n (Const8 [c]))
15773	// cond: isNonNegative(n) && isPowerOfTwo8(c)
15774	// result: (And8 n (Const8 <t> [c-1]))
15775	for {
15776		t := v.Type
15777		n := v_0
15778		if v_1.Op != OpConst8 {
15779			break
15780		}
15781		c := auxIntToInt8(v_1.AuxInt)
15782		if !(isNonNegative(n) && isPowerOfTwo8(c)) {
15783			break
15784		}
15785		v.reset(OpAnd8)
15786		v0 := b.NewValue0(v.Pos, OpConst8, t)
15787		v0.AuxInt = int8ToAuxInt(c - 1)
15788		v.AddArg2(n, v0)
15789		return true
15790	}
15791	// match: (Mod8 <t> n (Const8 [c]))
15792	// cond: c < 0 && c != -1<<7
15793	// result: (Mod8 <t> n (Const8 <t> [-c]))
15794	for {
15795		t := v.Type
15796		n := v_0
15797		if v_1.Op != OpConst8 {
15798			break
15799		}
15800		c := auxIntToInt8(v_1.AuxInt)
15801		if !(c < 0 && c != -1<<7) {
15802			break
15803		}
15804		v.reset(OpMod8)
15805		v.Type = t
15806		v0 := b.NewValue0(v.Pos, OpConst8, t)
15807		v0.AuxInt = int8ToAuxInt(-c)
15808		v.AddArg2(n, v0)
15809		return true
15810	}
15811	// match: (Mod8 <t> x (Const8 [c]))
15812	// cond: x.Op != OpConst8 && (c > 0 || c == -1<<7)
15813	// result: (Sub8 x (Mul8 <t> (Div8 <t> x (Const8 <t> [c])) (Const8 <t> [c])))
15814	for {
15815		t := v.Type
15816		x := v_0
15817		if v_1.Op != OpConst8 {
15818			break
15819		}
15820		c := auxIntToInt8(v_1.AuxInt)
15821		if !(x.Op != OpConst8 && (c > 0 || c == -1<<7)) {
15822			break
15823		}
15824		v.reset(OpSub8)
15825		v0 := b.NewValue0(v.Pos, OpMul8, t)
15826		v1 := b.NewValue0(v.Pos, OpDiv8, t)
15827		v2 := b.NewValue0(v.Pos, OpConst8, t)
15828		v2.AuxInt = int8ToAuxInt(c)
15829		v1.AddArg2(x, v2)
15830		v0.AddArg2(v1, v2)
15831		v.AddArg2(x, v0)
15832		return true
15833	}
15834	return false
15835}
15836func rewriteValuegeneric_OpMod8u(v *Value) bool {
15837	v_1 := v.Args[1]
15838	v_0 := v.Args[0]
15839	b := v.Block
15840	// match: (Mod8u (Const8 [c]) (Const8 [d]))
15841	// cond: d != 0
15842	// result: (Const8 [int8(uint8(c) % uint8(d))])
15843	for {
15844		if v_0.Op != OpConst8 {
15845			break
15846		}
15847		c := auxIntToInt8(v_0.AuxInt)
15848		if v_1.Op != OpConst8 {
15849			break
15850		}
15851		d := auxIntToInt8(v_1.AuxInt)
15852		if !(d != 0) {
15853			break
15854		}
15855		v.reset(OpConst8)
15856		v.AuxInt = int8ToAuxInt(int8(uint8(c) % uint8(d)))
15857		return true
15858	}
15859	// match: (Mod8u <t> n (Const8 [c]))
15860	// cond: isPowerOfTwo8(c)
15861	// result: (And8 n (Const8 <t> [c-1]))
15862	for {
15863		t := v.Type
15864		n := v_0
15865		if v_1.Op != OpConst8 {
15866			break
15867		}
15868		c := auxIntToInt8(v_1.AuxInt)
15869		if !(isPowerOfTwo8(c)) {
15870			break
15871		}
15872		v.reset(OpAnd8)
15873		v0 := b.NewValue0(v.Pos, OpConst8, t)
15874		v0.AuxInt = int8ToAuxInt(c - 1)
15875		v.AddArg2(n, v0)
15876		return true
15877	}
15878	// match: (Mod8u <t> x (Const8 [c]))
15879	// cond: x.Op != OpConst8 && c > 0 && umagicOK8( c)
15880	// result: (Sub8 x (Mul8 <t> (Div8u <t> x (Const8 <t> [c])) (Const8 <t> [c])))
15881	for {
15882		t := v.Type
15883		x := v_0
15884		if v_1.Op != OpConst8 {
15885			break
15886		}
15887		c := auxIntToInt8(v_1.AuxInt)
15888		if !(x.Op != OpConst8 && c > 0 && umagicOK8(c)) {
15889			break
15890		}
15891		v.reset(OpSub8)
15892		v0 := b.NewValue0(v.Pos, OpMul8, t)
15893		v1 := b.NewValue0(v.Pos, OpDiv8u, t)
15894		v2 := b.NewValue0(v.Pos, OpConst8, t)
15895		v2.AuxInt = int8ToAuxInt(c)
15896		v1.AddArg2(x, v2)
15897		v0.AddArg2(v1, v2)
15898		v.AddArg2(x, v0)
15899		return true
15900	}
15901	return false
15902}
15903func rewriteValuegeneric_OpMove(v *Value) bool {
15904	v_2 := v.Args[2]
15905	v_1 := v.Args[1]
15906	v_0 := v.Args[0]
15907	b := v.Block
15908	config := b.Func.Config
15909	// match: (Move {t} [n] dst1 src mem:(Zero {t} [n] dst2 _))
15910	// cond: isSamePtr(src, dst2)
15911	// result: (Zero {t} [n] dst1 mem)
15912	for {
15913		n := auxIntToInt64(v.AuxInt)
15914		t := auxToType(v.Aux)
15915		dst1 := v_0
15916		src := v_1
15917		mem := v_2
15918		if mem.Op != OpZero || auxIntToInt64(mem.AuxInt) != n || auxToType(mem.Aux) != t {
15919			break
15920		}
15921		dst2 := mem.Args[0]
15922		if !(isSamePtr(src, dst2)) {
15923			break
15924		}
15925		v.reset(OpZero)
15926		v.AuxInt = int64ToAuxInt(n)
15927		v.Aux = typeToAux(t)
15928		v.AddArg2(dst1, mem)
15929		return true
15930	}
15931	// match: (Move {t} [n] dst1 src mem:(VarDef (Zero {t} [n] dst0 _)))
15932	// cond: isSamePtr(src, dst0)
15933	// result: (Zero {t} [n] dst1 mem)
15934	for {
15935		n := auxIntToInt64(v.AuxInt)
15936		t := auxToType(v.Aux)
15937		dst1 := v_0
15938		src := v_1
15939		mem := v_2
15940		if mem.Op != OpVarDef {
15941			break
15942		}
15943		mem_0 := mem.Args[0]
15944		if mem_0.Op != OpZero || auxIntToInt64(mem_0.AuxInt) != n || auxToType(mem_0.Aux) != t {
15945			break
15946		}
15947		dst0 := mem_0.Args[0]
15948		if !(isSamePtr(src, dst0)) {
15949			break
15950		}
15951		v.reset(OpZero)
15952		v.AuxInt = int64ToAuxInt(n)
15953		v.Aux = typeToAux(t)
15954		v.AddArg2(dst1, mem)
15955		return true
15956	}
15957	// match: (Move {t} [n] dst (Addr {sym} (SB)) mem)
15958	// cond: symIsROZero(sym)
15959	// result: (Zero {t} [n] dst mem)
15960	for {
15961		n := auxIntToInt64(v.AuxInt)
15962		t := auxToType(v.Aux)
15963		dst := v_0
15964		if v_1.Op != OpAddr {
15965			break
15966		}
15967		sym := auxToSym(v_1.Aux)
15968		v_1_0 := v_1.Args[0]
15969		if v_1_0.Op != OpSB {
15970			break
15971		}
15972		mem := v_2
15973		if !(symIsROZero(sym)) {
15974			break
15975		}
15976		v.reset(OpZero)
15977		v.AuxInt = int64ToAuxInt(n)
15978		v.Aux = typeToAux(t)
15979		v.AddArg2(dst, mem)
15980		return true
15981	}
15982	// match: (Move {t1} [n] dst1 src1 store:(Store {t2} op:(OffPtr [o2] dst2) _ mem))
15983	// cond: isSamePtr(dst1, dst2) && store.Uses == 1 && n >= o2 + t2.Size() && disjoint(src1, n, op, t2.Size()) && clobber(store)
15984	// result: (Move {t1} [n] dst1 src1 mem)
15985	for {
15986		n := auxIntToInt64(v.AuxInt)
15987		t1 := auxToType(v.Aux)
15988		dst1 := v_0
15989		src1 := v_1
15990		store := v_2
15991		if store.Op != OpStore {
15992			break
15993		}
15994		t2 := auxToType(store.Aux)
15995		mem := store.Args[2]
15996		op := store.Args[0]
15997		if op.Op != OpOffPtr {
15998			break
15999		}
16000		o2 := auxIntToInt64(op.AuxInt)
16001		dst2 := op.Args[0]
16002		if !(isSamePtr(dst1, dst2) && store.Uses == 1 && n >= o2+t2.Size() && disjoint(src1, n, op, t2.Size()) && clobber(store)) {
16003			break
16004		}
16005		v.reset(OpMove)
16006		v.AuxInt = int64ToAuxInt(n)
16007		v.Aux = typeToAux(t1)
16008		v.AddArg3(dst1, src1, mem)
16009		return true
16010	}
16011	// match: (Move {t} [n] dst1 src1 move:(Move {t} [n] dst2 _ mem))
16012	// cond: move.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move)
16013	// result: (Move {t} [n] dst1 src1 mem)
16014	for {
16015		n := auxIntToInt64(v.AuxInt)
16016		t := auxToType(v.Aux)
16017		dst1 := v_0
16018		src1 := v_1
16019		move := v_2
16020		if move.Op != OpMove || auxIntToInt64(move.AuxInt) != n || auxToType(move.Aux) != t {
16021			break
16022		}
16023		mem := move.Args[2]
16024		dst2 := move.Args[0]
16025		if !(move.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move)) {
16026			break
16027		}
16028		v.reset(OpMove)
16029		v.AuxInt = int64ToAuxInt(n)
16030		v.Aux = typeToAux(t)
16031		v.AddArg3(dst1, src1, mem)
16032		return true
16033	}
16034	// match: (Move {t} [n] dst1 src1 vardef:(VarDef {x} move:(Move {t} [n] dst2 _ mem)))
16035	// cond: move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move, vardef)
16036	// result: (Move {t} [n] dst1 src1 (VarDef {x} mem))
16037	for {
16038		n := auxIntToInt64(v.AuxInt)
16039		t := auxToType(v.Aux)
16040		dst1 := v_0
16041		src1 := v_1
16042		vardef := v_2
16043		if vardef.Op != OpVarDef {
16044			break
16045		}
16046		x := auxToSym(vardef.Aux)
16047		move := vardef.Args[0]
16048		if move.Op != OpMove || auxIntToInt64(move.AuxInt) != n || auxToType(move.Aux) != t {
16049			break
16050		}
16051		mem := move.Args[2]
16052		dst2 := move.Args[0]
16053		if !(move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move, vardef)) {
16054			break
16055		}
16056		v.reset(OpMove)
16057		v.AuxInt = int64ToAuxInt(n)
16058		v.Aux = typeToAux(t)
16059		v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
16060		v0.Aux = symToAux(x)
16061		v0.AddArg(mem)
16062		v.AddArg3(dst1, src1, v0)
16063		return true
16064	}
16065	// match: (Move {t} [n] dst1 src1 zero:(Zero {t} [n] dst2 mem))
16066	// cond: zero.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero)
16067	// result: (Move {t} [n] dst1 src1 mem)
16068	for {
16069		n := auxIntToInt64(v.AuxInt)
16070		t := auxToType(v.Aux)
16071		dst1 := v_0
16072		src1 := v_1
16073		zero := v_2
16074		if zero.Op != OpZero || auxIntToInt64(zero.AuxInt) != n || auxToType(zero.Aux) != t {
16075			break
16076		}
16077		mem := zero.Args[1]
16078		dst2 := zero.Args[0]
16079		if !(zero.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero)) {
16080			break
16081		}
16082		v.reset(OpMove)
16083		v.AuxInt = int64ToAuxInt(n)
16084		v.Aux = typeToAux(t)
16085		v.AddArg3(dst1, src1, mem)
16086		return true
16087	}
16088	// match: (Move {t} [n] dst1 src1 vardef:(VarDef {x} zero:(Zero {t} [n] dst2 mem)))
16089	// cond: zero.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero, vardef)
16090	// result: (Move {t} [n] dst1 src1 (VarDef {x} mem))
16091	for {
16092		n := auxIntToInt64(v.AuxInt)
16093		t := auxToType(v.Aux)
16094		dst1 := v_0
16095		src1 := v_1
16096		vardef := v_2
16097		if vardef.Op != OpVarDef {
16098			break
16099		}
16100		x := auxToSym(vardef.Aux)
16101		zero := vardef.Args[0]
16102		if zero.Op != OpZero || auxIntToInt64(zero.AuxInt) != n || auxToType(zero.Aux) != t {
16103			break
16104		}
16105		mem := zero.Args[1]
16106		dst2 := zero.Args[0]
16107		if !(zero.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero, vardef)) {
16108			break
16109		}
16110		v.reset(OpMove)
16111		v.AuxInt = int64ToAuxInt(n)
16112		v.Aux = typeToAux(t)
16113		v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
16114		v0.Aux = symToAux(x)
16115		v0.AddArg(mem)
16116		v.AddArg3(dst1, src1, v0)
16117		return true
16118	}
16119	// match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [0] p3) d2 _)))
16120	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && o2 == t3.Size() && n == t2.Size() + t3.Size()
16121	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [0] dst) d2 mem))
16122	for {
16123		n := auxIntToInt64(v.AuxInt)
16124		t1 := auxToType(v.Aux)
16125		dst := v_0
16126		p1 := v_1
16127		mem := v_2
16128		if mem.Op != OpStore {
16129			break
16130		}
16131		t2 := auxToType(mem.Aux)
16132		_ = mem.Args[2]
16133		op2 := mem.Args[0]
16134		if op2.Op != OpOffPtr {
16135			break
16136		}
16137		tt2 := op2.Type
16138		o2 := auxIntToInt64(op2.AuxInt)
16139		p2 := op2.Args[0]
16140		d1 := mem.Args[1]
16141		mem_2 := mem.Args[2]
16142		if mem_2.Op != OpStore {
16143			break
16144		}
16145		t3 := auxToType(mem_2.Aux)
16146		d2 := mem_2.Args[1]
16147		op3 := mem_2.Args[0]
16148		if op3.Op != OpOffPtr {
16149			break
16150		}
16151		tt3 := op3.Type
16152		if auxIntToInt64(op3.AuxInt) != 0 {
16153			break
16154		}
16155		p3 := op3.Args[0]
16156		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && o2 == t3.Size() && n == t2.Size()+t3.Size()) {
16157			break
16158		}
16159		v.reset(OpStore)
16160		v.Aux = typeToAux(t2)
16161		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16162		v0.AuxInt = int64ToAuxInt(o2)
16163		v0.AddArg(dst)
16164		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16165		v1.Aux = typeToAux(t3)
16166		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
16167		v2.AuxInt = int64ToAuxInt(0)
16168		v2.AddArg(dst)
16169		v1.AddArg3(v2, d2, mem)
16170		v.AddArg3(v0, d1, v1)
16171		return true
16172	}
16173	// match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [o3] p3) d2 (Store {t4} op4:(OffPtr <tt4> [0] p4) d3 _))))
16174	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size() + t3.Size() + t4.Size()
16175	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [0] dst) d3 mem)))
16176	for {
16177		n := auxIntToInt64(v.AuxInt)
16178		t1 := auxToType(v.Aux)
16179		dst := v_0
16180		p1 := v_1
16181		mem := v_2
16182		if mem.Op != OpStore {
16183			break
16184		}
16185		t2 := auxToType(mem.Aux)
16186		_ = mem.Args[2]
16187		op2 := mem.Args[0]
16188		if op2.Op != OpOffPtr {
16189			break
16190		}
16191		tt2 := op2.Type
16192		o2 := auxIntToInt64(op2.AuxInt)
16193		p2 := op2.Args[0]
16194		d1 := mem.Args[1]
16195		mem_2 := mem.Args[2]
16196		if mem_2.Op != OpStore {
16197			break
16198		}
16199		t3 := auxToType(mem_2.Aux)
16200		_ = mem_2.Args[2]
16201		op3 := mem_2.Args[0]
16202		if op3.Op != OpOffPtr {
16203			break
16204		}
16205		tt3 := op3.Type
16206		o3 := auxIntToInt64(op3.AuxInt)
16207		p3 := op3.Args[0]
16208		d2 := mem_2.Args[1]
16209		mem_2_2 := mem_2.Args[2]
16210		if mem_2_2.Op != OpStore {
16211			break
16212		}
16213		t4 := auxToType(mem_2_2.Aux)
16214		d3 := mem_2_2.Args[1]
16215		op4 := mem_2_2.Args[0]
16216		if op4.Op != OpOffPtr {
16217			break
16218		}
16219		tt4 := op4.Type
16220		if auxIntToInt64(op4.AuxInt) != 0 {
16221			break
16222		}
16223		p4 := op4.Args[0]
16224		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size()+t3.Size()+t4.Size()) {
16225			break
16226		}
16227		v.reset(OpStore)
16228		v.Aux = typeToAux(t2)
16229		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16230		v0.AuxInt = int64ToAuxInt(o2)
16231		v0.AddArg(dst)
16232		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16233		v1.Aux = typeToAux(t3)
16234		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
16235		v2.AuxInt = int64ToAuxInt(o3)
16236		v2.AddArg(dst)
16237		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16238		v3.Aux = typeToAux(t4)
16239		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
16240		v4.AuxInt = int64ToAuxInt(0)
16241		v4.AddArg(dst)
16242		v3.AddArg3(v4, d3, mem)
16243		v1.AddArg3(v2, d2, v3)
16244		v.AddArg3(v0, d1, v1)
16245		return true
16246	}
16247	// match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [o3] p3) d2 (Store {t4} op4:(OffPtr <tt4> [o4] p4) d3 (Store {t5} op5:(OffPtr <tt5> [0] p5) d4 _)))))
16248	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == t5.Size() && o3-o4 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size() + t3.Size() + t4.Size() + t5.Size()
16249	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [0] dst) d4 mem))))
16250	for {
16251		n := auxIntToInt64(v.AuxInt)
16252		t1 := auxToType(v.Aux)
16253		dst := v_0
16254		p1 := v_1
16255		mem := v_2
16256		if mem.Op != OpStore {
16257			break
16258		}
16259		t2 := auxToType(mem.Aux)
16260		_ = mem.Args[2]
16261		op2 := mem.Args[0]
16262		if op2.Op != OpOffPtr {
16263			break
16264		}
16265		tt2 := op2.Type
16266		o2 := auxIntToInt64(op2.AuxInt)
16267		p2 := op2.Args[0]
16268		d1 := mem.Args[1]
16269		mem_2 := mem.Args[2]
16270		if mem_2.Op != OpStore {
16271			break
16272		}
16273		t3 := auxToType(mem_2.Aux)
16274		_ = mem_2.Args[2]
16275		op3 := mem_2.Args[0]
16276		if op3.Op != OpOffPtr {
16277			break
16278		}
16279		tt3 := op3.Type
16280		o3 := auxIntToInt64(op3.AuxInt)
16281		p3 := op3.Args[0]
16282		d2 := mem_2.Args[1]
16283		mem_2_2 := mem_2.Args[2]
16284		if mem_2_2.Op != OpStore {
16285			break
16286		}
16287		t4 := auxToType(mem_2_2.Aux)
16288		_ = mem_2_2.Args[2]
16289		op4 := mem_2_2.Args[0]
16290		if op4.Op != OpOffPtr {
16291			break
16292		}
16293		tt4 := op4.Type
16294		o4 := auxIntToInt64(op4.AuxInt)
16295		p4 := op4.Args[0]
16296		d3 := mem_2_2.Args[1]
16297		mem_2_2_2 := mem_2_2.Args[2]
16298		if mem_2_2_2.Op != OpStore {
16299			break
16300		}
16301		t5 := auxToType(mem_2_2_2.Aux)
16302		d4 := mem_2_2_2.Args[1]
16303		op5 := mem_2_2_2.Args[0]
16304		if op5.Op != OpOffPtr {
16305			break
16306		}
16307		tt5 := op5.Type
16308		if auxIntToInt64(op5.AuxInt) != 0 {
16309			break
16310		}
16311		p5 := op5.Args[0]
16312		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == t5.Size() && o3-o4 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size()+t3.Size()+t4.Size()+t5.Size()) {
16313			break
16314		}
16315		v.reset(OpStore)
16316		v.Aux = typeToAux(t2)
16317		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16318		v0.AuxInt = int64ToAuxInt(o2)
16319		v0.AddArg(dst)
16320		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16321		v1.Aux = typeToAux(t3)
16322		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
16323		v2.AuxInt = int64ToAuxInt(o3)
16324		v2.AddArg(dst)
16325		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16326		v3.Aux = typeToAux(t4)
16327		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
16328		v4.AuxInt = int64ToAuxInt(o4)
16329		v4.AddArg(dst)
16330		v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16331		v5.Aux = typeToAux(t5)
16332		v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
16333		v6.AuxInt = int64ToAuxInt(0)
16334		v6.AddArg(dst)
16335		v5.AddArg3(v6, d4, mem)
16336		v3.AddArg3(v4, d3, v5)
16337		v1.AddArg3(v2, d2, v3)
16338		v.AddArg3(v0, d1, v1)
16339		return true
16340	}
16341	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [0] p3) d2 _))))
16342	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && o2 == t3.Size() && n == t2.Size() + t3.Size()
16343	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [0] dst) d2 mem))
16344	for {
16345		n := auxIntToInt64(v.AuxInt)
16346		t1 := auxToType(v.Aux)
16347		dst := v_0
16348		p1 := v_1
16349		mem := v_2
16350		if mem.Op != OpVarDef {
16351			break
16352		}
16353		mem_0 := mem.Args[0]
16354		if mem_0.Op != OpStore {
16355			break
16356		}
16357		t2 := auxToType(mem_0.Aux)
16358		_ = mem_0.Args[2]
16359		op2 := mem_0.Args[0]
16360		if op2.Op != OpOffPtr {
16361			break
16362		}
16363		tt2 := op2.Type
16364		o2 := auxIntToInt64(op2.AuxInt)
16365		p2 := op2.Args[0]
16366		d1 := mem_0.Args[1]
16367		mem_0_2 := mem_0.Args[2]
16368		if mem_0_2.Op != OpStore {
16369			break
16370		}
16371		t3 := auxToType(mem_0_2.Aux)
16372		d2 := mem_0_2.Args[1]
16373		op3 := mem_0_2.Args[0]
16374		if op3.Op != OpOffPtr {
16375			break
16376		}
16377		tt3 := op3.Type
16378		if auxIntToInt64(op3.AuxInt) != 0 {
16379			break
16380		}
16381		p3 := op3.Args[0]
16382		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && o2 == t3.Size() && n == t2.Size()+t3.Size()) {
16383			break
16384		}
16385		v.reset(OpStore)
16386		v.Aux = typeToAux(t2)
16387		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16388		v0.AuxInt = int64ToAuxInt(o2)
16389		v0.AddArg(dst)
16390		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16391		v1.Aux = typeToAux(t3)
16392		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
16393		v2.AuxInt = int64ToAuxInt(0)
16394		v2.AddArg(dst)
16395		v1.AddArg3(v2, d2, mem)
16396		v.AddArg3(v0, d1, v1)
16397		return true
16398	}
16399	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [o3] p3) d2 (Store {t4} op4:(OffPtr <tt4> [0] p4) d3 _)))))
16400	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size() + t3.Size() + t4.Size()
16401	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [0] dst) d3 mem)))
16402	for {
16403		n := auxIntToInt64(v.AuxInt)
16404		t1 := auxToType(v.Aux)
16405		dst := v_0
16406		p1 := v_1
16407		mem := v_2
16408		if mem.Op != OpVarDef {
16409			break
16410		}
16411		mem_0 := mem.Args[0]
16412		if mem_0.Op != OpStore {
16413			break
16414		}
16415		t2 := auxToType(mem_0.Aux)
16416		_ = mem_0.Args[2]
16417		op2 := mem_0.Args[0]
16418		if op2.Op != OpOffPtr {
16419			break
16420		}
16421		tt2 := op2.Type
16422		o2 := auxIntToInt64(op2.AuxInt)
16423		p2 := op2.Args[0]
16424		d1 := mem_0.Args[1]
16425		mem_0_2 := mem_0.Args[2]
16426		if mem_0_2.Op != OpStore {
16427			break
16428		}
16429		t3 := auxToType(mem_0_2.Aux)
16430		_ = mem_0_2.Args[2]
16431		op3 := mem_0_2.Args[0]
16432		if op3.Op != OpOffPtr {
16433			break
16434		}
16435		tt3 := op3.Type
16436		o3 := auxIntToInt64(op3.AuxInt)
16437		p3 := op3.Args[0]
16438		d2 := mem_0_2.Args[1]
16439		mem_0_2_2 := mem_0_2.Args[2]
16440		if mem_0_2_2.Op != OpStore {
16441			break
16442		}
16443		t4 := auxToType(mem_0_2_2.Aux)
16444		d3 := mem_0_2_2.Args[1]
16445		op4 := mem_0_2_2.Args[0]
16446		if op4.Op != OpOffPtr {
16447			break
16448		}
16449		tt4 := op4.Type
16450		if auxIntToInt64(op4.AuxInt) != 0 {
16451			break
16452		}
16453		p4 := op4.Args[0]
16454		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size()+t3.Size()+t4.Size()) {
16455			break
16456		}
16457		v.reset(OpStore)
16458		v.Aux = typeToAux(t2)
16459		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16460		v0.AuxInt = int64ToAuxInt(o2)
16461		v0.AddArg(dst)
16462		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16463		v1.Aux = typeToAux(t3)
16464		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
16465		v2.AuxInt = int64ToAuxInt(o3)
16466		v2.AddArg(dst)
16467		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16468		v3.Aux = typeToAux(t4)
16469		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
16470		v4.AuxInt = int64ToAuxInt(0)
16471		v4.AddArg(dst)
16472		v3.AddArg3(v4, d3, mem)
16473		v1.AddArg3(v2, d2, v3)
16474		v.AddArg3(v0, d1, v1)
16475		return true
16476	}
16477	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [o3] p3) d2 (Store {t4} op4:(OffPtr <tt4> [o4] p4) d3 (Store {t5} op5:(OffPtr <tt5> [0] p5) d4 _))))))
16478	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == t5.Size() && o3-o4 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size() + t3.Size() + t4.Size() + t5.Size()
16479	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [0] dst) d4 mem))))
16480	for {
16481		n := auxIntToInt64(v.AuxInt)
16482		t1 := auxToType(v.Aux)
16483		dst := v_0
16484		p1 := v_1
16485		mem := v_2
16486		if mem.Op != OpVarDef {
16487			break
16488		}
16489		mem_0 := mem.Args[0]
16490		if mem_0.Op != OpStore {
16491			break
16492		}
16493		t2 := auxToType(mem_0.Aux)
16494		_ = mem_0.Args[2]
16495		op2 := mem_0.Args[0]
16496		if op2.Op != OpOffPtr {
16497			break
16498		}
16499		tt2 := op2.Type
16500		o2 := auxIntToInt64(op2.AuxInt)
16501		p2 := op2.Args[0]
16502		d1 := mem_0.Args[1]
16503		mem_0_2 := mem_0.Args[2]
16504		if mem_0_2.Op != OpStore {
16505			break
16506		}
16507		t3 := auxToType(mem_0_2.Aux)
16508		_ = mem_0_2.Args[2]
16509		op3 := mem_0_2.Args[0]
16510		if op3.Op != OpOffPtr {
16511			break
16512		}
16513		tt3 := op3.Type
16514		o3 := auxIntToInt64(op3.AuxInt)
16515		p3 := op3.Args[0]
16516		d2 := mem_0_2.Args[1]
16517		mem_0_2_2 := mem_0_2.Args[2]
16518		if mem_0_2_2.Op != OpStore {
16519			break
16520		}
16521		t4 := auxToType(mem_0_2_2.Aux)
16522		_ = mem_0_2_2.Args[2]
16523		op4 := mem_0_2_2.Args[0]
16524		if op4.Op != OpOffPtr {
16525			break
16526		}
16527		tt4 := op4.Type
16528		o4 := auxIntToInt64(op4.AuxInt)
16529		p4 := op4.Args[0]
16530		d3 := mem_0_2_2.Args[1]
16531		mem_0_2_2_2 := mem_0_2_2.Args[2]
16532		if mem_0_2_2_2.Op != OpStore {
16533			break
16534		}
16535		t5 := auxToType(mem_0_2_2_2.Aux)
16536		d4 := mem_0_2_2_2.Args[1]
16537		op5 := mem_0_2_2_2.Args[0]
16538		if op5.Op != OpOffPtr {
16539			break
16540		}
16541		tt5 := op5.Type
16542		if auxIntToInt64(op5.AuxInt) != 0 {
16543			break
16544		}
16545		p5 := op5.Args[0]
16546		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == t5.Size() && o3-o4 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size()+t3.Size()+t4.Size()+t5.Size()) {
16547			break
16548		}
16549		v.reset(OpStore)
16550		v.Aux = typeToAux(t2)
16551		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16552		v0.AuxInt = int64ToAuxInt(o2)
16553		v0.AddArg(dst)
16554		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16555		v1.Aux = typeToAux(t3)
16556		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
16557		v2.AuxInt = int64ToAuxInt(o3)
16558		v2.AddArg(dst)
16559		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16560		v3.Aux = typeToAux(t4)
16561		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
16562		v4.AuxInt = int64ToAuxInt(o4)
16563		v4.AddArg(dst)
16564		v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16565		v5.Aux = typeToAux(t5)
16566		v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
16567		v6.AuxInt = int64ToAuxInt(0)
16568		v6.AddArg(dst)
16569		v5.AddArg3(v6, d4, mem)
16570		v3.AddArg3(v4, d3, v5)
16571		v1.AddArg3(v2, d2, v3)
16572		v.AddArg3(v0, d1, v1)
16573		return true
16574	}
16575	// match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Zero {t3} [n] p3 _)))
16576	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && n >= o2 + t2.Size()
16577	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Zero {t1} [n] dst mem))
16578	for {
16579		n := auxIntToInt64(v.AuxInt)
16580		t1 := auxToType(v.Aux)
16581		dst := v_0
16582		p1 := v_1
16583		mem := v_2
16584		if mem.Op != OpStore {
16585			break
16586		}
16587		t2 := auxToType(mem.Aux)
16588		_ = mem.Args[2]
16589		op2 := mem.Args[0]
16590		if op2.Op != OpOffPtr {
16591			break
16592		}
16593		tt2 := op2.Type
16594		o2 := auxIntToInt64(op2.AuxInt)
16595		p2 := op2.Args[0]
16596		d1 := mem.Args[1]
16597		mem_2 := mem.Args[2]
16598		if mem_2.Op != OpZero || auxIntToInt64(mem_2.AuxInt) != n {
16599			break
16600		}
16601		t3 := auxToType(mem_2.Aux)
16602		p3 := mem_2.Args[0]
16603		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && n >= o2+t2.Size()) {
16604			break
16605		}
16606		v.reset(OpStore)
16607		v.Aux = typeToAux(t2)
16608		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16609		v0.AuxInt = int64ToAuxInt(o2)
16610		v0.AddArg(dst)
16611		v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
16612		v1.AuxInt = int64ToAuxInt(n)
16613		v1.Aux = typeToAux(t1)
16614		v1.AddArg2(dst, mem)
16615		v.AddArg3(v0, d1, v1)
16616		return true
16617	}
16618	// match: (Move {t1} [n] dst p1 mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Zero {t4} [n] p4 _))))
16619	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && n >= o2 + t2.Size() && n >= o3 + t3.Size()
16620	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Zero {t1} [n] dst mem)))
16621	for {
16622		n := auxIntToInt64(v.AuxInt)
16623		t1 := auxToType(v.Aux)
16624		dst := v_0
16625		p1 := v_1
16626		mem := v_2
16627		if mem.Op != OpStore {
16628			break
16629		}
16630		t2 := auxToType(mem.Aux)
16631		_ = mem.Args[2]
16632		mem_0 := mem.Args[0]
16633		if mem_0.Op != OpOffPtr {
16634			break
16635		}
16636		tt2 := mem_0.Type
16637		o2 := auxIntToInt64(mem_0.AuxInt)
16638		p2 := mem_0.Args[0]
16639		d1 := mem.Args[1]
16640		mem_2 := mem.Args[2]
16641		if mem_2.Op != OpStore {
16642			break
16643		}
16644		t3 := auxToType(mem_2.Aux)
16645		_ = mem_2.Args[2]
16646		mem_2_0 := mem_2.Args[0]
16647		if mem_2_0.Op != OpOffPtr {
16648			break
16649		}
16650		tt3 := mem_2_0.Type
16651		o3 := auxIntToInt64(mem_2_0.AuxInt)
16652		p3 := mem_2_0.Args[0]
16653		d2 := mem_2.Args[1]
16654		mem_2_2 := mem_2.Args[2]
16655		if mem_2_2.Op != OpZero || auxIntToInt64(mem_2_2.AuxInt) != n {
16656			break
16657		}
16658		t4 := auxToType(mem_2_2.Aux)
16659		p4 := mem_2_2.Args[0]
16660		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && n >= o2+t2.Size() && n >= o3+t3.Size()) {
16661			break
16662		}
16663		v.reset(OpStore)
16664		v.Aux = typeToAux(t2)
16665		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16666		v0.AuxInt = int64ToAuxInt(o2)
16667		v0.AddArg(dst)
16668		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16669		v1.Aux = typeToAux(t3)
16670		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
16671		v2.AuxInt = int64ToAuxInt(o3)
16672		v2.AddArg(dst)
16673		v3 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
16674		v3.AuxInt = int64ToAuxInt(n)
16675		v3.Aux = typeToAux(t1)
16676		v3.AddArg2(dst, mem)
16677		v1.AddArg3(v2, d2, v3)
16678		v.AddArg3(v0, d1, v1)
16679		return true
16680	}
16681	// match: (Move {t1} [n] dst p1 mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Zero {t5} [n] p5 _)))))
16682	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2 + t2.Size() && n >= o3 + t3.Size() && n >= o4 + t4.Size()
16683	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Zero {t1} [n] dst mem))))
16684	for {
16685		n := auxIntToInt64(v.AuxInt)
16686		t1 := auxToType(v.Aux)
16687		dst := v_0
16688		p1 := v_1
16689		mem := v_2
16690		if mem.Op != OpStore {
16691			break
16692		}
16693		t2 := auxToType(mem.Aux)
16694		_ = mem.Args[2]
16695		mem_0 := mem.Args[0]
16696		if mem_0.Op != OpOffPtr {
16697			break
16698		}
16699		tt2 := mem_0.Type
16700		o2 := auxIntToInt64(mem_0.AuxInt)
16701		p2 := mem_0.Args[0]
16702		d1 := mem.Args[1]
16703		mem_2 := mem.Args[2]
16704		if mem_2.Op != OpStore {
16705			break
16706		}
16707		t3 := auxToType(mem_2.Aux)
16708		_ = mem_2.Args[2]
16709		mem_2_0 := mem_2.Args[0]
16710		if mem_2_0.Op != OpOffPtr {
16711			break
16712		}
16713		tt3 := mem_2_0.Type
16714		o3 := auxIntToInt64(mem_2_0.AuxInt)
16715		p3 := mem_2_0.Args[0]
16716		d2 := mem_2.Args[1]
16717		mem_2_2 := mem_2.Args[2]
16718		if mem_2_2.Op != OpStore {
16719			break
16720		}
16721		t4 := auxToType(mem_2_2.Aux)
16722		_ = mem_2_2.Args[2]
16723		mem_2_2_0 := mem_2_2.Args[0]
16724		if mem_2_2_0.Op != OpOffPtr {
16725			break
16726		}
16727		tt4 := mem_2_2_0.Type
16728		o4 := auxIntToInt64(mem_2_2_0.AuxInt)
16729		p4 := mem_2_2_0.Args[0]
16730		d3 := mem_2_2.Args[1]
16731		mem_2_2_2 := mem_2_2.Args[2]
16732		if mem_2_2_2.Op != OpZero || auxIntToInt64(mem_2_2_2.AuxInt) != n {
16733			break
16734		}
16735		t5 := auxToType(mem_2_2_2.Aux)
16736		p5 := mem_2_2_2.Args[0]
16737		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2+t2.Size() && n >= o3+t3.Size() && n >= o4+t4.Size()) {
16738			break
16739		}
16740		v.reset(OpStore)
16741		v.Aux = typeToAux(t2)
16742		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16743		v0.AuxInt = int64ToAuxInt(o2)
16744		v0.AddArg(dst)
16745		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16746		v1.Aux = typeToAux(t3)
16747		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
16748		v2.AuxInt = int64ToAuxInt(o3)
16749		v2.AddArg(dst)
16750		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16751		v3.Aux = typeToAux(t4)
16752		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
16753		v4.AuxInt = int64ToAuxInt(o4)
16754		v4.AddArg(dst)
16755		v5 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
16756		v5.AuxInt = int64ToAuxInt(n)
16757		v5.Aux = typeToAux(t1)
16758		v5.AddArg2(dst, mem)
16759		v3.AddArg3(v4, d3, v5)
16760		v1.AddArg3(v2, d2, v3)
16761		v.AddArg3(v0, d1, v1)
16762		return true
16763	}
16764	// match: (Move {t1} [n] dst p1 mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Store {t5} (OffPtr <tt5> [o5] p5) d4 (Zero {t6} [n] p6 _))))))
16765	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && t6.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2 + t2.Size() && n >= o3 + t3.Size() && n >= o4 + t4.Size() && n >= o5 + t5.Size()
16766	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [o5] dst) d4 (Zero {t1} [n] dst mem)))))
16767	for {
16768		n := auxIntToInt64(v.AuxInt)
16769		t1 := auxToType(v.Aux)
16770		dst := v_0
16771		p1 := v_1
16772		mem := v_2
16773		if mem.Op != OpStore {
16774			break
16775		}
16776		t2 := auxToType(mem.Aux)
16777		_ = mem.Args[2]
16778		mem_0 := mem.Args[0]
16779		if mem_0.Op != OpOffPtr {
16780			break
16781		}
16782		tt2 := mem_0.Type
16783		o2 := auxIntToInt64(mem_0.AuxInt)
16784		p2 := mem_0.Args[0]
16785		d1 := mem.Args[1]
16786		mem_2 := mem.Args[2]
16787		if mem_2.Op != OpStore {
16788			break
16789		}
16790		t3 := auxToType(mem_2.Aux)
16791		_ = mem_2.Args[2]
16792		mem_2_0 := mem_2.Args[0]
16793		if mem_2_0.Op != OpOffPtr {
16794			break
16795		}
16796		tt3 := mem_2_0.Type
16797		o3 := auxIntToInt64(mem_2_0.AuxInt)
16798		p3 := mem_2_0.Args[0]
16799		d2 := mem_2.Args[1]
16800		mem_2_2 := mem_2.Args[2]
16801		if mem_2_2.Op != OpStore {
16802			break
16803		}
16804		t4 := auxToType(mem_2_2.Aux)
16805		_ = mem_2_2.Args[2]
16806		mem_2_2_0 := mem_2_2.Args[0]
16807		if mem_2_2_0.Op != OpOffPtr {
16808			break
16809		}
16810		tt4 := mem_2_2_0.Type
16811		o4 := auxIntToInt64(mem_2_2_0.AuxInt)
16812		p4 := mem_2_2_0.Args[0]
16813		d3 := mem_2_2.Args[1]
16814		mem_2_2_2 := mem_2_2.Args[2]
16815		if mem_2_2_2.Op != OpStore {
16816			break
16817		}
16818		t5 := auxToType(mem_2_2_2.Aux)
16819		_ = mem_2_2_2.Args[2]
16820		mem_2_2_2_0 := mem_2_2_2.Args[0]
16821		if mem_2_2_2_0.Op != OpOffPtr {
16822			break
16823		}
16824		tt5 := mem_2_2_2_0.Type
16825		o5 := auxIntToInt64(mem_2_2_2_0.AuxInt)
16826		p5 := mem_2_2_2_0.Args[0]
16827		d4 := mem_2_2_2.Args[1]
16828		mem_2_2_2_2 := mem_2_2_2.Args[2]
16829		if mem_2_2_2_2.Op != OpZero || auxIntToInt64(mem_2_2_2_2.AuxInt) != n {
16830			break
16831		}
16832		t6 := auxToType(mem_2_2_2_2.Aux)
16833		p6 := mem_2_2_2_2.Args[0]
16834		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && t6.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2+t2.Size() && n >= o3+t3.Size() && n >= o4+t4.Size() && n >= o5+t5.Size()) {
16835			break
16836		}
16837		v.reset(OpStore)
16838		v.Aux = typeToAux(t2)
16839		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16840		v0.AuxInt = int64ToAuxInt(o2)
16841		v0.AddArg(dst)
16842		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16843		v1.Aux = typeToAux(t3)
16844		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
16845		v2.AuxInt = int64ToAuxInt(o3)
16846		v2.AddArg(dst)
16847		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16848		v3.Aux = typeToAux(t4)
16849		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
16850		v4.AuxInt = int64ToAuxInt(o4)
16851		v4.AddArg(dst)
16852		v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16853		v5.Aux = typeToAux(t5)
16854		v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
16855		v6.AuxInt = int64ToAuxInt(o5)
16856		v6.AddArg(dst)
16857		v7 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
16858		v7.AuxInt = int64ToAuxInt(n)
16859		v7.Aux = typeToAux(t1)
16860		v7.AddArg2(dst, mem)
16861		v5.AddArg3(v6, d4, v7)
16862		v3.AddArg3(v4, d3, v5)
16863		v1.AddArg3(v2, d2, v3)
16864		v.AddArg3(v0, d1, v1)
16865		return true
16866	}
16867	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Zero {t3} [n] p3 _))))
16868	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && n >= o2 + t2.Size()
16869	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Zero {t1} [n] dst mem))
16870	for {
16871		n := auxIntToInt64(v.AuxInt)
16872		t1 := auxToType(v.Aux)
16873		dst := v_0
16874		p1 := v_1
16875		mem := v_2
16876		if mem.Op != OpVarDef {
16877			break
16878		}
16879		mem_0 := mem.Args[0]
16880		if mem_0.Op != OpStore {
16881			break
16882		}
16883		t2 := auxToType(mem_0.Aux)
16884		_ = mem_0.Args[2]
16885		op2 := mem_0.Args[0]
16886		if op2.Op != OpOffPtr {
16887			break
16888		}
16889		tt2 := op2.Type
16890		o2 := auxIntToInt64(op2.AuxInt)
16891		p2 := op2.Args[0]
16892		d1 := mem_0.Args[1]
16893		mem_0_2 := mem_0.Args[2]
16894		if mem_0_2.Op != OpZero || auxIntToInt64(mem_0_2.AuxInt) != n {
16895			break
16896		}
16897		t3 := auxToType(mem_0_2.Aux)
16898		p3 := mem_0_2.Args[0]
16899		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && n >= o2+t2.Size()) {
16900			break
16901		}
16902		v.reset(OpStore)
16903		v.Aux = typeToAux(t2)
16904		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16905		v0.AuxInt = int64ToAuxInt(o2)
16906		v0.AddArg(dst)
16907		v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
16908		v1.AuxInt = int64ToAuxInt(n)
16909		v1.Aux = typeToAux(t1)
16910		v1.AddArg2(dst, mem)
16911		v.AddArg3(v0, d1, v1)
16912		return true
16913	}
16914	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Zero {t4} [n] p4 _)))))
16915	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && n >= o2 + t2.Size() && n >= o3 + t3.Size()
16916	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Zero {t1} [n] dst mem)))
16917	for {
16918		n := auxIntToInt64(v.AuxInt)
16919		t1 := auxToType(v.Aux)
16920		dst := v_0
16921		p1 := v_1
16922		mem := v_2
16923		if mem.Op != OpVarDef {
16924			break
16925		}
16926		mem_0 := mem.Args[0]
16927		if mem_0.Op != OpStore {
16928			break
16929		}
16930		t2 := auxToType(mem_0.Aux)
16931		_ = mem_0.Args[2]
16932		mem_0_0 := mem_0.Args[0]
16933		if mem_0_0.Op != OpOffPtr {
16934			break
16935		}
16936		tt2 := mem_0_0.Type
16937		o2 := auxIntToInt64(mem_0_0.AuxInt)
16938		p2 := mem_0_0.Args[0]
16939		d1 := mem_0.Args[1]
16940		mem_0_2 := mem_0.Args[2]
16941		if mem_0_2.Op != OpStore {
16942			break
16943		}
16944		t3 := auxToType(mem_0_2.Aux)
16945		_ = mem_0_2.Args[2]
16946		mem_0_2_0 := mem_0_2.Args[0]
16947		if mem_0_2_0.Op != OpOffPtr {
16948			break
16949		}
16950		tt3 := mem_0_2_0.Type
16951		o3 := auxIntToInt64(mem_0_2_0.AuxInt)
16952		p3 := mem_0_2_0.Args[0]
16953		d2 := mem_0_2.Args[1]
16954		mem_0_2_2 := mem_0_2.Args[2]
16955		if mem_0_2_2.Op != OpZero || auxIntToInt64(mem_0_2_2.AuxInt) != n {
16956			break
16957		}
16958		t4 := auxToType(mem_0_2_2.Aux)
16959		p4 := mem_0_2_2.Args[0]
16960		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && n >= o2+t2.Size() && n >= o3+t3.Size()) {
16961			break
16962		}
16963		v.reset(OpStore)
16964		v.Aux = typeToAux(t2)
16965		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16966		v0.AuxInt = int64ToAuxInt(o2)
16967		v0.AddArg(dst)
16968		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16969		v1.Aux = typeToAux(t3)
16970		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
16971		v2.AuxInt = int64ToAuxInt(o3)
16972		v2.AddArg(dst)
16973		v3 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
16974		v3.AuxInt = int64ToAuxInt(n)
16975		v3.Aux = typeToAux(t1)
16976		v3.AddArg2(dst, mem)
16977		v1.AddArg3(v2, d2, v3)
16978		v.AddArg3(v0, d1, v1)
16979		return true
16980	}
16981	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Zero {t5} [n] p5 _))))))
16982	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2 + t2.Size() && n >= o3 + t3.Size() && n >= o4 + t4.Size()
16983	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Zero {t1} [n] dst mem))))
16984	for {
16985		n := auxIntToInt64(v.AuxInt)
16986		t1 := auxToType(v.Aux)
16987		dst := v_0
16988		p1 := v_1
16989		mem := v_2
16990		if mem.Op != OpVarDef {
16991			break
16992		}
16993		mem_0 := mem.Args[0]
16994		if mem_0.Op != OpStore {
16995			break
16996		}
16997		t2 := auxToType(mem_0.Aux)
16998		_ = mem_0.Args[2]
16999		mem_0_0 := mem_0.Args[0]
17000		if mem_0_0.Op != OpOffPtr {
17001			break
17002		}
17003		tt2 := mem_0_0.Type
17004		o2 := auxIntToInt64(mem_0_0.AuxInt)
17005		p2 := mem_0_0.Args[0]
17006		d1 := mem_0.Args[1]
17007		mem_0_2 := mem_0.Args[2]
17008		if mem_0_2.Op != OpStore {
17009			break
17010		}
17011		t3 := auxToType(mem_0_2.Aux)
17012		_ = mem_0_2.Args[2]
17013		mem_0_2_0 := mem_0_2.Args[0]
17014		if mem_0_2_0.Op != OpOffPtr {
17015			break
17016		}
17017		tt3 := mem_0_2_0.Type
17018		o3 := auxIntToInt64(mem_0_2_0.AuxInt)
17019		p3 := mem_0_2_0.Args[0]
17020		d2 := mem_0_2.Args[1]
17021		mem_0_2_2 := mem_0_2.Args[2]
17022		if mem_0_2_2.Op != OpStore {
17023			break
17024		}
17025		t4 := auxToType(mem_0_2_2.Aux)
17026		_ = mem_0_2_2.Args[2]
17027		mem_0_2_2_0 := mem_0_2_2.Args[0]
17028		if mem_0_2_2_0.Op != OpOffPtr {
17029			break
17030		}
17031		tt4 := mem_0_2_2_0.Type
17032		o4 := auxIntToInt64(mem_0_2_2_0.AuxInt)
17033		p4 := mem_0_2_2_0.Args[0]
17034		d3 := mem_0_2_2.Args[1]
17035		mem_0_2_2_2 := mem_0_2_2.Args[2]
17036		if mem_0_2_2_2.Op != OpZero || auxIntToInt64(mem_0_2_2_2.AuxInt) != n {
17037			break
17038		}
17039		t5 := auxToType(mem_0_2_2_2.Aux)
17040		p5 := mem_0_2_2_2.Args[0]
17041		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2+t2.Size() && n >= o3+t3.Size() && n >= o4+t4.Size()) {
17042			break
17043		}
17044		v.reset(OpStore)
17045		v.Aux = typeToAux(t2)
17046		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
17047		v0.AuxInt = int64ToAuxInt(o2)
17048		v0.AddArg(dst)
17049		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
17050		v1.Aux = typeToAux(t3)
17051		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
17052		v2.AuxInt = int64ToAuxInt(o3)
17053		v2.AddArg(dst)
17054		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
17055		v3.Aux = typeToAux(t4)
17056		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
17057		v4.AuxInt = int64ToAuxInt(o4)
17058		v4.AddArg(dst)
17059		v5 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
17060		v5.AuxInt = int64ToAuxInt(n)
17061		v5.Aux = typeToAux(t1)
17062		v5.AddArg2(dst, mem)
17063		v3.AddArg3(v4, d3, v5)
17064		v1.AddArg3(v2, d2, v3)
17065		v.AddArg3(v0, d1, v1)
17066		return true
17067	}
17068	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Store {t5} (OffPtr <tt5> [o5] p5) d4 (Zero {t6} [n] p6 _)))))))
17069	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && t6.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2 + t2.Size() && n >= o3 + t3.Size() && n >= o4 + t4.Size() && n >= o5 + t5.Size()
17070	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [o5] dst) d4 (Zero {t1} [n] dst mem)))))
17071	for {
17072		n := auxIntToInt64(v.AuxInt)
17073		t1 := auxToType(v.Aux)
17074		dst := v_0
17075		p1 := v_1
17076		mem := v_2
17077		if mem.Op != OpVarDef {
17078			break
17079		}
17080		mem_0 := mem.Args[0]
17081		if mem_0.Op != OpStore {
17082			break
17083		}
17084		t2 := auxToType(mem_0.Aux)
17085		_ = mem_0.Args[2]
17086		mem_0_0 := mem_0.Args[0]
17087		if mem_0_0.Op != OpOffPtr {
17088			break
17089		}
17090		tt2 := mem_0_0.Type
17091		o2 := auxIntToInt64(mem_0_0.AuxInt)
17092		p2 := mem_0_0.Args[0]
17093		d1 := mem_0.Args[1]
17094		mem_0_2 := mem_0.Args[2]
17095		if mem_0_2.Op != OpStore {
17096			break
17097		}
17098		t3 := auxToType(mem_0_2.Aux)
17099		_ = mem_0_2.Args[2]
17100		mem_0_2_0 := mem_0_2.Args[0]
17101		if mem_0_2_0.Op != OpOffPtr {
17102			break
17103		}
17104		tt3 := mem_0_2_0.Type
17105		o3 := auxIntToInt64(mem_0_2_0.AuxInt)
17106		p3 := mem_0_2_0.Args[0]
17107		d2 := mem_0_2.Args[1]
17108		mem_0_2_2 := mem_0_2.Args[2]
17109		if mem_0_2_2.Op != OpStore {
17110			break
17111		}
17112		t4 := auxToType(mem_0_2_2.Aux)
17113		_ = mem_0_2_2.Args[2]
17114		mem_0_2_2_0 := mem_0_2_2.Args[0]
17115		if mem_0_2_2_0.Op != OpOffPtr {
17116			break
17117		}
17118		tt4 := mem_0_2_2_0.Type
17119		o4 := auxIntToInt64(mem_0_2_2_0.AuxInt)
17120		p4 := mem_0_2_2_0.Args[0]
17121		d3 := mem_0_2_2.Args[1]
17122		mem_0_2_2_2 := mem_0_2_2.Args[2]
17123		if mem_0_2_2_2.Op != OpStore {
17124			break
17125		}
17126		t5 := auxToType(mem_0_2_2_2.Aux)
17127		_ = mem_0_2_2_2.Args[2]
17128		mem_0_2_2_2_0 := mem_0_2_2_2.Args[0]
17129		if mem_0_2_2_2_0.Op != OpOffPtr {
17130			break
17131		}
17132		tt5 := mem_0_2_2_2_0.Type
17133		o5 := auxIntToInt64(mem_0_2_2_2_0.AuxInt)
17134		p5 := mem_0_2_2_2_0.Args[0]
17135		d4 := mem_0_2_2_2.Args[1]
17136		mem_0_2_2_2_2 := mem_0_2_2_2.Args[2]
17137		if mem_0_2_2_2_2.Op != OpZero || auxIntToInt64(mem_0_2_2_2_2.AuxInt) != n {
17138			break
17139		}
17140		t6 := auxToType(mem_0_2_2_2_2.Aux)
17141		p6 := mem_0_2_2_2_2.Args[0]
17142		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && t6.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2+t2.Size() && n >= o3+t3.Size() && n >= o4+t4.Size() && n >= o5+t5.Size()) {
17143			break
17144		}
17145		v.reset(OpStore)
17146		v.Aux = typeToAux(t2)
17147		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
17148		v0.AuxInt = int64ToAuxInt(o2)
17149		v0.AddArg(dst)
17150		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
17151		v1.Aux = typeToAux(t3)
17152		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
17153		v2.AuxInt = int64ToAuxInt(o3)
17154		v2.AddArg(dst)
17155		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
17156		v3.Aux = typeToAux(t4)
17157		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
17158		v4.AuxInt = int64ToAuxInt(o4)
17159		v4.AddArg(dst)
17160		v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
17161		v5.Aux = typeToAux(t5)
17162		v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
17163		v6.AuxInt = int64ToAuxInt(o5)
17164		v6.AddArg(dst)
17165		v7 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
17166		v7.AuxInt = int64ToAuxInt(n)
17167		v7.Aux = typeToAux(t1)
17168		v7.AddArg2(dst, mem)
17169		v5.AddArg3(v6, d4, v7)
17170		v3.AddArg3(v4, d3, v5)
17171		v1.AddArg3(v2, d2, v3)
17172		v.AddArg3(v0, d1, v1)
17173		return true
17174	}
17175	// match: (Move {t1} [s] dst tmp1 midmem:(Move {t2} [s] tmp2 src _))
17176	// cond: t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))
17177	// result: (Move {t1} [s] dst src midmem)
17178	for {
17179		s := auxIntToInt64(v.AuxInt)
17180		t1 := auxToType(v.Aux)
17181		dst := v_0
17182		tmp1 := v_1
17183		midmem := v_2
17184		if midmem.Op != OpMove || auxIntToInt64(midmem.AuxInt) != s {
17185			break
17186		}
17187		t2 := auxToType(midmem.Aux)
17188		src := midmem.Args[1]
17189		tmp2 := midmem.Args[0]
17190		if !(t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))) {
17191			break
17192		}
17193		v.reset(OpMove)
17194		v.AuxInt = int64ToAuxInt(s)
17195		v.Aux = typeToAux(t1)
17196		v.AddArg3(dst, src, midmem)
17197		return true
17198	}
17199	// match: (Move {t1} [s] dst tmp1 midmem:(VarDef (Move {t2} [s] tmp2 src _)))
17200	// cond: t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))
17201	// result: (Move {t1} [s] dst src midmem)
17202	for {
17203		s := auxIntToInt64(v.AuxInt)
17204		t1 := auxToType(v.Aux)
17205		dst := v_0
17206		tmp1 := v_1
17207		midmem := v_2
17208		if midmem.Op != OpVarDef {
17209			break
17210		}
17211		midmem_0 := midmem.Args[0]
17212		if midmem_0.Op != OpMove || auxIntToInt64(midmem_0.AuxInt) != s {
17213			break
17214		}
17215		t2 := auxToType(midmem_0.Aux)
17216		src := midmem_0.Args[1]
17217		tmp2 := midmem_0.Args[0]
17218		if !(t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))) {
17219			break
17220		}
17221		v.reset(OpMove)
17222		v.AuxInt = int64ToAuxInt(s)
17223		v.Aux = typeToAux(t1)
17224		v.AddArg3(dst, src, midmem)
17225		return true
17226	}
17227	// match: (Move dst src mem)
17228	// cond: isSamePtr(dst, src)
17229	// result: mem
17230	for {
17231		dst := v_0
17232		src := v_1
17233		mem := v_2
17234		if !(isSamePtr(dst, src)) {
17235			break
17236		}
17237		v.copyOf(mem)
17238		return true
17239	}
17240	return false
17241}
17242func rewriteValuegeneric_OpMul16(v *Value) bool {
17243	v_1 := v.Args[1]
17244	v_0 := v.Args[0]
17245	b := v.Block
17246	typ := &b.Func.Config.Types
17247	// match: (Mul16 (Const16 [c]) (Const16 [d]))
17248	// result: (Const16 [c*d])
17249	for {
17250		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17251			if v_0.Op != OpConst16 {
17252				continue
17253			}
17254			c := auxIntToInt16(v_0.AuxInt)
17255			if v_1.Op != OpConst16 {
17256				continue
17257			}
17258			d := auxIntToInt16(v_1.AuxInt)
17259			v.reset(OpConst16)
17260			v.AuxInt = int16ToAuxInt(c * d)
17261			return true
17262		}
17263		break
17264	}
17265	// match: (Mul16 (Const16 [1]) x)
17266	// result: x
17267	for {
17268		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17269			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 1 {
17270				continue
17271			}
17272			x := v_1
17273			v.copyOf(x)
17274			return true
17275		}
17276		break
17277	}
17278	// match: (Mul16 (Const16 [-1]) x)
17279	// result: (Neg16 x)
17280	for {
17281		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17282			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
17283				continue
17284			}
17285			x := v_1
17286			v.reset(OpNeg16)
17287			v.AddArg(x)
17288			return true
17289		}
17290		break
17291	}
17292	// match: (Mul16 <t> n (Const16 [c]))
17293	// cond: isPowerOfTwo16(c)
17294	// result: (Lsh16x64 <t> n (Const64 <typ.UInt64> [log16(c)]))
17295	for {
17296		t := v.Type
17297		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17298			n := v_0
17299			if v_1.Op != OpConst16 {
17300				continue
17301			}
17302			c := auxIntToInt16(v_1.AuxInt)
17303			if !(isPowerOfTwo16(c)) {
17304				continue
17305			}
17306			v.reset(OpLsh16x64)
17307			v.Type = t
17308			v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
17309			v0.AuxInt = int64ToAuxInt(log16(c))
17310			v.AddArg2(n, v0)
17311			return true
17312		}
17313		break
17314	}
17315	// match: (Mul16 <t> n (Const16 [c]))
17316	// cond: t.IsSigned() && isPowerOfTwo16(-c)
17317	// result: (Neg16 (Lsh16x64 <t> n (Const64 <typ.UInt64> [log16(-c)])))
17318	for {
17319		t := v.Type
17320		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17321			n := v_0
17322			if v_1.Op != OpConst16 {
17323				continue
17324			}
17325			c := auxIntToInt16(v_1.AuxInt)
17326			if !(t.IsSigned() && isPowerOfTwo16(-c)) {
17327				continue
17328			}
17329			v.reset(OpNeg16)
17330			v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
17331			v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
17332			v1.AuxInt = int64ToAuxInt(log16(-c))
17333			v0.AddArg2(n, v1)
17334			v.AddArg(v0)
17335			return true
17336		}
17337		break
17338	}
17339	// match: (Mul16 (Const16 [0]) _)
17340	// result: (Const16 [0])
17341	for {
17342		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17343			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
17344				continue
17345			}
17346			v.reset(OpConst16)
17347			v.AuxInt = int16ToAuxInt(0)
17348			return true
17349		}
17350		break
17351	}
17352	// match: (Mul16 (Mul16 i:(Const16 <t>) z) x)
17353	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
17354	// result: (Mul16 i (Mul16 <t> x z))
17355	for {
17356		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17357			if v_0.Op != OpMul16 {
17358				continue
17359			}
17360			_ = v_0.Args[1]
17361			v_0_0 := v_0.Args[0]
17362			v_0_1 := v_0.Args[1]
17363			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
17364				i := v_0_0
17365				if i.Op != OpConst16 {
17366					continue
17367				}
17368				t := i.Type
17369				z := v_0_1
17370				x := v_1
17371				if !(z.Op != OpConst16 && x.Op != OpConst16) {
17372					continue
17373				}
17374				v.reset(OpMul16)
17375				v0 := b.NewValue0(v.Pos, OpMul16, t)
17376				v0.AddArg2(x, z)
17377				v.AddArg2(i, v0)
17378				return true
17379			}
17380		}
17381		break
17382	}
17383	// match: (Mul16 (Const16 <t> [c]) (Mul16 (Const16 <t> [d]) x))
17384	// result: (Mul16 (Const16 <t> [c*d]) x)
17385	for {
17386		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17387			if v_0.Op != OpConst16 {
17388				continue
17389			}
17390			t := v_0.Type
17391			c := auxIntToInt16(v_0.AuxInt)
17392			if v_1.Op != OpMul16 {
17393				continue
17394			}
17395			_ = v_1.Args[1]
17396			v_1_0 := v_1.Args[0]
17397			v_1_1 := v_1.Args[1]
17398			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
17399				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
17400					continue
17401				}
17402				d := auxIntToInt16(v_1_0.AuxInt)
17403				x := v_1_1
17404				v.reset(OpMul16)
17405				v0 := b.NewValue0(v.Pos, OpConst16, t)
17406				v0.AuxInt = int16ToAuxInt(c * d)
17407				v.AddArg2(v0, x)
17408				return true
17409			}
17410		}
17411		break
17412	}
17413	return false
17414}
17415func rewriteValuegeneric_OpMul32(v *Value) bool {
17416	v_1 := v.Args[1]
17417	v_0 := v.Args[0]
17418	b := v.Block
17419	typ := &b.Func.Config.Types
17420	// match: (Mul32 (Const32 [c]) (Const32 [d]))
17421	// result: (Const32 [c*d])
17422	for {
17423		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17424			if v_0.Op != OpConst32 {
17425				continue
17426			}
17427			c := auxIntToInt32(v_0.AuxInt)
17428			if v_1.Op != OpConst32 {
17429				continue
17430			}
17431			d := auxIntToInt32(v_1.AuxInt)
17432			v.reset(OpConst32)
17433			v.AuxInt = int32ToAuxInt(c * d)
17434			return true
17435		}
17436		break
17437	}
17438	// match: (Mul32 (Const32 [1]) x)
17439	// result: x
17440	for {
17441		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17442			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 1 {
17443				continue
17444			}
17445			x := v_1
17446			v.copyOf(x)
17447			return true
17448		}
17449		break
17450	}
17451	// match: (Mul32 (Const32 [-1]) x)
17452	// result: (Neg32 x)
17453	for {
17454		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17455			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
17456				continue
17457			}
17458			x := v_1
17459			v.reset(OpNeg32)
17460			v.AddArg(x)
17461			return true
17462		}
17463		break
17464	}
17465	// match: (Mul32 <t> n (Const32 [c]))
17466	// cond: isPowerOfTwo32(c)
17467	// result: (Lsh32x64 <t> n (Const64 <typ.UInt64> [log32(c)]))
17468	for {
17469		t := v.Type
17470		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17471			n := v_0
17472			if v_1.Op != OpConst32 {
17473				continue
17474			}
17475			c := auxIntToInt32(v_1.AuxInt)
17476			if !(isPowerOfTwo32(c)) {
17477				continue
17478			}
17479			v.reset(OpLsh32x64)
17480			v.Type = t
17481			v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
17482			v0.AuxInt = int64ToAuxInt(log32(c))
17483			v.AddArg2(n, v0)
17484			return true
17485		}
17486		break
17487	}
17488	// match: (Mul32 <t> n (Const32 [c]))
17489	// cond: t.IsSigned() && isPowerOfTwo32(-c)
17490	// result: (Neg32 (Lsh32x64 <t> n (Const64 <typ.UInt64> [log32(-c)])))
17491	for {
17492		t := v.Type
17493		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17494			n := v_0
17495			if v_1.Op != OpConst32 {
17496				continue
17497			}
17498			c := auxIntToInt32(v_1.AuxInt)
17499			if !(t.IsSigned() && isPowerOfTwo32(-c)) {
17500				continue
17501			}
17502			v.reset(OpNeg32)
17503			v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
17504			v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
17505			v1.AuxInt = int64ToAuxInt(log32(-c))
17506			v0.AddArg2(n, v1)
17507			v.AddArg(v0)
17508			return true
17509		}
17510		break
17511	}
17512	// match: (Mul32 (Const32 <t> [c]) (Add32 <t> (Const32 <t> [d]) x))
17513	// result: (Add32 (Const32 <t> [c*d]) (Mul32 <t> (Const32 <t> [c]) x))
17514	for {
17515		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17516			if v_0.Op != OpConst32 {
17517				continue
17518			}
17519			t := v_0.Type
17520			c := auxIntToInt32(v_0.AuxInt)
17521			if v_1.Op != OpAdd32 || v_1.Type != t {
17522				continue
17523			}
17524			_ = v_1.Args[1]
17525			v_1_0 := v_1.Args[0]
17526			v_1_1 := v_1.Args[1]
17527			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
17528				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
17529					continue
17530				}
17531				d := auxIntToInt32(v_1_0.AuxInt)
17532				x := v_1_1
17533				v.reset(OpAdd32)
17534				v0 := b.NewValue0(v.Pos, OpConst32, t)
17535				v0.AuxInt = int32ToAuxInt(c * d)
17536				v1 := b.NewValue0(v.Pos, OpMul32, t)
17537				v2 := b.NewValue0(v.Pos, OpConst32, t)
17538				v2.AuxInt = int32ToAuxInt(c)
17539				v1.AddArg2(v2, x)
17540				v.AddArg2(v0, v1)
17541				return true
17542			}
17543		}
17544		break
17545	}
17546	// match: (Mul32 (Const32 [0]) _)
17547	// result: (Const32 [0])
17548	for {
17549		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17550			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
17551				continue
17552			}
17553			v.reset(OpConst32)
17554			v.AuxInt = int32ToAuxInt(0)
17555			return true
17556		}
17557		break
17558	}
17559	// match: (Mul32 (Mul32 i:(Const32 <t>) z) x)
17560	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
17561	// result: (Mul32 i (Mul32 <t> x z))
17562	for {
17563		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17564			if v_0.Op != OpMul32 {
17565				continue
17566			}
17567			_ = v_0.Args[1]
17568			v_0_0 := v_0.Args[0]
17569			v_0_1 := v_0.Args[1]
17570			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
17571				i := v_0_0
17572				if i.Op != OpConst32 {
17573					continue
17574				}
17575				t := i.Type
17576				z := v_0_1
17577				x := v_1
17578				if !(z.Op != OpConst32 && x.Op != OpConst32) {
17579					continue
17580				}
17581				v.reset(OpMul32)
17582				v0 := b.NewValue0(v.Pos, OpMul32, t)
17583				v0.AddArg2(x, z)
17584				v.AddArg2(i, v0)
17585				return true
17586			}
17587		}
17588		break
17589	}
17590	// match: (Mul32 (Const32 <t> [c]) (Mul32 (Const32 <t> [d]) x))
17591	// result: (Mul32 (Const32 <t> [c*d]) x)
17592	for {
17593		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17594			if v_0.Op != OpConst32 {
17595				continue
17596			}
17597			t := v_0.Type
17598			c := auxIntToInt32(v_0.AuxInt)
17599			if v_1.Op != OpMul32 {
17600				continue
17601			}
17602			_ = v_1.Args[1]
17603			v_1_0 := v_1.Args[0]
17604			v_1_1 := v_1.Args[1]
17605			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
17606				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
17607					continue
17608				}
17609				d := auxIntToInt32(v_1_0.AuxInt)
17610				x := v_1_1
17611				v.reset(OpMul32)
17612				v0 := b.NewValue0(v.Pos, OpConst32, t)
17613				v0.AuxInt = int32ToAuxInt(c * d)
17614				v.AddArg2(v0, x)
17615				return true
17616			}
17617		}
17618		break
17619	}
17620	return false
17621}
17622func rewriteValuegeneric_OpMul32F(v *Value) bool {
17623	v_1 := v.Args[1]
17624	v_0 := v.Args[0]
17625	// match: (Mul32F (Const32F [c]) (Const32F [d]))
17626	// cond: c*d == c*d
17627	// result: (Const32F [c*d])
17628	for {
17629		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17630			if v_0.Op != OpConst32F {
17631				continue
17632			}
17633			c := auxIntToFloat32(v_0.AuxInt)
17634			if v_1.Op != OpConst32F {
17635				continue
17636			}
17637			d := auxIntToFloat32(v_1.AuxInt)
17638			if !(c*d == c*d) {
17639				continue
17640			}
17641			v.reset(OpConst32F)
17642			v.AuxInt = float32ToAuxInt(c * d)
17643			return true
17644		}
17645		break
17646	}
17647	// match: (Mul32F x (Const32F [1]))
17648	// result: x
17649	for {
17650		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17651			x := v_0
17652			if v_1.Op != OpConst32F || auxIntToFloat32(v_1.AuxInt) != 1 {
17653				continue
17654			}
17655			v.copyOf(x)
17656			return true
17657		}
17658		break
17659	}
17660	// match: (Mul32F x (Const32F [-1]))
17661	// result: (Neg32F x)
17662	for {
17663		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17664			x := v_0
17665			if v_1.Op != OpConst32F || auxIntToFloat32(v_1.AuxInt) != -1 {
17666				continue
17667			}
17668			v.reset(OpNeg32F)
17669			v.AddArg(x)
17670			return true
17671		}
17672		break
17673	}
17674	// match: (Mul32F x (Const32F [2]))
17675	// result: (Add32F x x)
17676	for {
17677		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17678			x := v_0
17679			if v_1.Op != OpConst32F || auxIntToFloat32(v_1.AuxInt) != 2 {
17680				continue
17681			}
17682			v.reset(OpAdd32F)
17683			v.AddArg2(x, x)
17684			return true
17685		}
17686		break
17687	}
17688	return false
17689}
17690func rewriteValuegeneric_OpMul64(v *Value) bool {
17691	v_1 := v.Args[1]
17692	v_0 := v.Args[0]
17693	b := v.Block
17694	typ := &b.Func.Config.Types
17695	// match: (Mul64 (Const64 [c]) (Const64 [d]))
17696	// result: (Const64 [c*d])
17697	for {
17698		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17699			if v_0.Op != OpConst64 {
17700				continue
17701			}
17702			c := auxIntToInt64(v_0.AuxInt)
17703			if v_1.Op != OpConst64 {
17704				continue
17705			}
17706			d := auxIntToInt64(v_1.AuxInt)
17707			v.reset(OpConst64)
17708			v.AuxInt = int64ToAuxInt(c * d)
17709			return true
17710		}
17711		break
17712	}
17713	// match: (Mul64 (Const64 [1]) x)
17714	// result: x
17715	for {
17716		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17717			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 1 {
17718				continue
17719			}
17720			x := v_1
17721			v.copyOf(x)
17722			return true
17723		}
17724		break
17725	}
17726	// match: (Mul64 (Const64 [-1]) x)
17727	// result: (Neg64 x)
17728	for {
17729		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17730			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
17731				continue
17732			}
17733			x := v_1
17734			v.reset(OpNeg64)
17735			v.AddArg(x)
17736			return true
17737		}
17738		break
17739	}
17740	// match: (Mul64 <t> n (Const64 [c]))
17741	// cond: isPowerOfTwo64(c)
17742	// result: (Lsh64x64 <t> n (Const64 <typ.UInt64> [log64(c)]))
17743	for {
17744		t := v.Type
17745		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17746			n := v_0
17747			if v_1.Op != OpConst64 {
17748				continue
17749			}
17750			c := auxIntToInt64(v_1.AuxInt)
17751			if !(isPowerOfTwo64(c)) {
17752				continue
17753			}
17754			v.reset(OpLsh64x64)
17755			v.Type = t
17756			v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
17757			v0.AuxInt = int64ToAuxInt(log64(c))
17758			v.AddArg2(n, v0)
17759			return true
17760		}
17761		break
17762	}
17763	// match: (Mul64 <t> n (Const64 [c]))
17764	// cond: t.IsSigned() && isPowerOfTwo64(-c)
17765	// result: (Neg64 (Lsh64x64 <t> n (Const64 <typ.UInt64> [log64(-c)])))
17766	for {
17767		t := v.Type
17768		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17769			n := v_0
17770			if v_1.Op != OpConst64 {
17771				continue
17772			}
17773			c := auxIntToInt64(v_1.AuxInt)
17774			if !(t.IsSigned() && isPowerOfTwo64(-c)) {
17775				continue
17776			}
17777			v.reset(OpNeg64)
17778			v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
17779			v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
17780			v1.AuxInt = int64ToAuxInt(log64(-c))
17781			v0.AddArg2(n, v1)
17782			v.AddArg(v0)
17783			return true
17784		}
17785		break
17786	}
17787	// match: (Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x))
17788	// result: (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x))
17789	for {
17790		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17791			if v_0.Op != OpConst64 {
17792				continue
17793			}
17794			t := v_0.Type
17795			c := auxIntToInt64(v_0.AuxInt)
17796			if v_1.Op != OpAdd64 || v_1.Type != t {
17797				continue
17798			}
17799			_ = v_1.Args[1]
17800			v_1_0 := v_1.Args[0]
17801			v_1_1 := v_1.Args[1]
17802			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
17803				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
17804					continue
17805				}
17806				d := auxIntToInt64(v_1_0.AuxInt)
17807				x := v_1_1
17808				v.reset(OpAdd64)
17809				v0 := b.NewValue0(v.Pos, OpConst64, t)
17810				v0.AuxInt = int64ToAuxInt(c * d)
17811				v1 := b.NewValue0(v.Pos, OpMul64, t)
17812				v2 := b.NewValue0(v.Pos, OpConst64, t)
17813				v2.AuxInt = int64ToAuxInt(c)
17814				v1.AddArg2(v2, x)
17815				v.AddArg2(v0, v1)
17816				return true
17817			}
17818		}
17819		break
17820	}
17821	// match: (Mul64 (Const64 [0]) _)
17822	// result: (Const64 [0])
17823	for {
17824		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17825			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
17826				continue
17827			}
17828			v.reset(OpConst64)
17829			v.AuxInt = int64ToAuxInt(0)
17830			return true
17831		}
17832		break
17833	}
17834	// match: (Mul64 (Mul64 i:(Const64 <t>) z) x)
17835	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
17836	// result: (Mul64 i (Mul64 <t> x z))
17837	for {
17838		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17839			if v_0.Op != OpMul64 {
17840				continue
17841			}
17842			_ = v_0.Args[1]
17843			v_0_0 := v_0.Args[0]
17844			v_0_1 := v_0.Args[1]
17845			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
17846				i := v_0_0
17847				if i.Op != OpConst64 {
17848					continue
17849				}
17850				t := i.Type
17851				z := v_0_1
17852				x := v_1
17853				if !(z.Op != OpConst64 && x.Op != OpConst64) {
17854					continue
17855				}
17856				v.reset(OpMul64)
17857				v0 := b.NewValue0(v.Pos, OpMul64, t)
17858				v0.AddArg2(x, z)
17859				v.AddArg2(i, v0)
17860				return true
17861			}
17862		}
17863		break
17864	}
17865	// match: (Mul64 (Const64 <t> [c]) (Mul64 (Const64 <t> [d]) x))
17866	// result: (Mul64 (Const64 <t> [c*d]) x)
17867	for {
17868		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17869			if v_0.Op != OpConst64 {
17870				continue
17871			}
17872			t := v_0.Type
17873			c := auxIntToInt64(v_0.AuxInt)
17874			if v_1.Op != OpMul64 {
17875				continue
17876			}
17877			_ = v_1.Args[1]
17878			v_1_0 := v_1.Args[0]
17879			v_1_1 := v_1.Args[1]
17880			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
17881				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
17882					continue
17883				}
17884				d := auxIntToInt64(v_1_0.AuxInt)
17885				x := v_1_1
17886				v.reset(OpMul64)
17887				v0 := b.NewValue0(v.Pos, OpConst64, t)
17888				v0.AuxInt = int64ToAuxInt(c * d)
17889				v.AddArg2(v0, x)
17890				return true
17891			}
17892		}
17893		break
17894	}
17895	return false
17896}
17897func rewriteValuegeneric_OpMul64F(v *Value) bool {
17898	v_1 := v.Args[1]
17899	v_0 := v.Args[0]
17900	// match: (Mul64F (Const64F [c]) (Const64F [d]))
17901	// cond: c*d == c*d
17902	// result: (Const64F [c*d])
17903	for {
17904		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17905			if v_0.Op != OpConst64F {
17906				continue
17907			}
17908			c := auxIntToFloat64(v_0.AuxInt)
17909			if v_1.Op != OpConst64F {
17910				continue
17911			}
17912			d := auxIntToFloat64(v_1.AuxInt)
17913			if !(c*d == c*d) {
17914				continue
17915			}
17916			v.reset(OpConst64F)
17917			v.AuxInt = float64ToAuxInt(c * d)
17918			return true
17919		}
17920		break
17921	}
17922	// match: (Mul64F x (Const64F [1]))
17923	// result: x
17924	for {
17925		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17926			x := v_0
17927			if v_1.Op != OpConst64F || auxIntToFloat64(v_1.AuxInt) != 1 {
17928				continue
17929			}
17930			v.copyOf(x)
17931			return true
17932		}
17933		break
17934	}
17935	// match: (Mul64F x (Const64F [-1]))
17936	// result: (Neg64F x)
17937	for {
17938		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17939			x := v_0
17940			if v_1.Op != OpConst64F || auxIntToFloat64(v_1.AuxInt) != -1 {
17941				continue
17942			}
17943			v.reset(OpNeg64F)
17944			v.AddArg(x)
17945			return true
17946		}
17947		break
17948	}
17949	// match: (Mul64F x (Const64F [2]))
17950	// result: (Add64F x x)
17951	for {
17952		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17953			x := v_0
17954			if v_1.Op != OpConst64F || auxIntToFloat64(v_1.AuxInt) != 2 {
17955				continue
17956			}
17957			v.reset(OpAdd64F)
17958			v.AddArg2(x, x)
17959			return true
17960		}
17961		break
17962	}
17963	return false
17964}
17965func rewriteValuegeneric_OpMul8(v *Value) bool {
17966	v_1 := v.Args[1]
17967	v_0 := v.Args[0]
17968	b := v.Block
17969	typ := &b.Func.Config.Types
17970	// match: (Mul8 (Const8 [c]) (Const8 [d]))
17971	// result: (Const8 [c*d])
17972	for {
17973		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17974			if v_0.Op != OpConst8 {
17975				continue
17976			}
17977			c := auxIntToInt8(v_0.AuxInt)
17978			if v_1.Op != OpConst8 {
17979				continue
17980			}
17981			d := auxIntToInt8(v_1.AuxInt)
17982			v.reset(OpConst8)
17983			v.AuxInt = int8ToAuxInt(c * d)
17984			return true
17985		}
17986		break
17987	}
17988	// match: (Mul8 (Const8 [1]) x)
17989	// result: x
17990	for {
17991		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17992			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 1 {
17993				continue
17994			}
17995			x := v_1
17996			v.copyOf(x)
17997			return true
17998		}
17999		break
18000	}
18001	// match: (Mul8 (Const8 [-1]) x)
18002	// result: (Neg8 x)
18003	for {
18004		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18005			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
18006				continue
18007			}
18008			x := v_1
18009			v.reset(OpNeg8)
18010			v.AddArg(x)
18011			return true
18012		}
18013		break
18014	}
18015	// match: (Mul8 <t> n (Const8 [c]))
18016	// cond: isPowerOfTwo8(c)
18017	// result: (Lsh8x64 <t> n (Const64 <typ.UInt64> [log8(c)]))
18018	for {
18019		t := v.Type
18020		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18021			n := v_0
18022			if v_1.Op != OpConst8 {
18023				continue
18024			}
18025			c := auxIntToInt8(v_1.AuxInt)
18026			if !(isPowerOfTwo8(c)) {
18027				continue
18028			}
18029			v.reset(OpLsh8x64)
18030			v.Type = t
18031			v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
18032			v0.AuxInt = int64ToAuxInt(log8(c))
18033			v.AddArg2(n, v0)
18034			return true
18035		}
18036		break
18037	}
18038	// match: (Mul8 <t> n (Const8 [c]))
18039	// cond: t.IsSigned() && isPowerOfTwo8(-c)
18040	// result: (Neg8 (Lsh8x64 <t> n (Const64 <typ.UInt64> [log8(-c)])))
18041	for {
18042		t := v.Type
18043		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18044			n := v_0
18045			if v_1.Op != OpConst8 {
18046				continue
18047			}
18048			c := auxIntToInt8(v_1.AuxInt)
18049			if !(t.IsSigned() && isPowerOfTwo8(-c)) {
18050				continue
18051			}
18052			v.reset(OpNeg8)
18053			v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
18054			v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
18055			v1.AuxInt = int64ToAuxInt(log8(-c))
18056			v0.AddArg2(n, v1)
18057			v.AddArg(v0)
18058			return true
18059		}
18060		break
18061	}
18062	// match: (Mul8 (Const8 [0]) _)
18063	// result: (Const8 [0])
18064	for {
18065		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18066			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
18067				continue
18068			}
18069			v.reset(OpConst8)
18070			v.AuxInt = int8ToAuxInt(0)
18071			return true
18072		}
18073		break
18074	}
18075	// match: (Mul8 (Mul8 i:(Const8 <t>) z) x)
18076	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
18077	// result: (Mul8 i (Mul8 <t> x z))
18078	for {
18079		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18080			if v_0.Op != OpMul8 {
18081				continue
18082			}
18083			_ = v_0.Args[1]
18084			v_0_0 := v_0.Args[0]
18085			v_0_1 := v_0.Args[1]
18086			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
18087				i := v_0_0
18088				if i.Op != OpConst8 {
18089					continue
18090				}
18091				t := i.Type
18092				z := v_0_1
18093				x := v_1
18094				if !(z.Op != OpConst8 && x.Op != OpConst8) {
18095					continue
18096				}
18097				v.reset(OpMul8)
18098				v0 := b.NewValue0(v.Pos, OpMul8, t)
18099				v0.AddArg2(x, z)
18100				v.AddArg2(i, v0)
18101				return true
18102			}
18103		}
18104		break
18105	}
18106	// match: (Mul8 (Const8 <t> [c]) (Mul8 (Const8 <t> [d]) x))
18107	// result: (Mul8 (Const8 <t> [c*d]) x)
18108	for {
18109		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18110			if v_0.Op != OpConst8 {
18111				continue
18112			}
18113			t := v_0.Type
18114			c := auxIntToInt8(v_0.AuxInt)
18115			if v_1.Op != OpMul8 {
18116				continue
18117			}
18118			_ = v_1.Args[1]
18119			v_1_0 := v_1.Args[0]
18120			v_1_1 := v_1.Args[1]
18121			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
18122				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
18123					continue
18124				}
18125				d := auxIntToInt8(v_1_0.AuxInt)
18126				x := v_1_1
18127				v.reset(OpMul8)
18128				v0 := b.NewValue0(v.Pos, OpConst8, t)
18129				v0.AuxInt = int8ToAuxInt(c * d)
18130				v.AddArg2(v0, x)
18131				return true
18132			}
18133		}
18134		break
18135	}
18136	return false
18137}
18138func rewriteValuegeneric_OpNeg16(v *Value) bool {
18139	v_0 := v.Args[0]
18140	b := v.Block
18141	// match: (Neg16 (Const16 [c]))
18142	// result: (Const16 [-c])
18143	for {
18144		if v_0.Op != OpConst16 {
18145			break
18146		}
18147		c := auxIntToInt16(v_0.AuxInt)
18148		v.reset(OpConst16)
18149		v.AuxInt = int16ToAuxInt(-c)
18150		return true
18151	}
18152	// match: (Neg16 (Sub16 x y))
18153	// result: (Sub16 y x)
18154	for {
18155		if v_0.Op != OpSub16 {
18156			break
18157		}
18158		y := v_0.Args[1]
18159		x := v_0.Args[0]
18160		v.reset(OpSub16)
18161		v.AddArg2(y, x)
18162		return true
18163	}
18164	// match: (Neg16 (Neg16 x))
18165	// result: x
18166	for {
18167		if v_0.Op != OpNeg16 {
18168			break
18169		}
18170		x := v_0.Args[0]
18171		v.copyOf(x)
18172		return true
18173	}
18174	// match: (Neg16 <t> (Com16 x))
18175	// result: (Add16 (Const16 <t> [1]) x)
18176	for {
18177		t := v.Type
18178		if v_0.Op != OpCom16 {
18179			break
18180		}
18181		x := v_0.Args[0]
18182		v.reset(OpAdd16)
18183		v0 := b.NewValue0(v.Pos, OpConst16, t)
18184		v0.AuxInt = int16ToAuxInt(1)
18185		v.AddArg2(v0, x)
18186		return true
18187	}
18188	return false
18189}
18190func rewriteValuegeneric_OpNeg32(v *Value) bool {
18191	v_0 := v.Args[0]
18192	b := v.Block
18193	// match: (Neg32 (Const32 [c]))
18194	// result: (Const32 [-c])
18195	for {
18196		if v_0.Op != OpConst32 {
18197			break
18198		}
18199		c := auxIntToInt32(v_0.AuxInt)
18200		v.reset(OpConst32)
18201		v.AuxInt = int32ToAuxInt(-c)
18202		return true
18203	}
18204	// match: (Neg32 (Sub32 x y))
18205	// result: (Sub32 y x)
18206	for {
18207		if v_0.Op != OpSub32 {
18208			break
18209		}
18210		y := v_0.Args[1]
18211		x := v_0.Args[0]
18212		v.reset(OpSub32)
18213		v.AddArg2(y, x)
18214		return true
18215	}
18216	// match: (Neg32 (Neg32 x))
18217	// result: x
18218	for {
18219		if v_0.Op != OpNeg32 {
18220			break
18221		}
18222		x := v_0.Args[0]
18223		v.copyOf(x)
18224		return true
18225	}
18226	// match: (Neg32 <t> (Com32 x))
18227	// result: (Add32 (Const32 <t> [1]) x)
18228	for {
18229		t := v.Type
18230		if v_0.Op != OpCom32 {
18231			break
18232		}
18233		x := v_0.Args[0]
18234		v.reset(OpAdd32)
18235		v0 := b.NewValue0(v.Pos, OpConst32, t)
18236		v0.AuxInt = int32ToAuxInt(1)
18237		v.AddArg2(v0, x)
18238		return true
18239	}
18240	return false
18241}
18242func rewriteValuegeneric_OpNeg32F(v *Value) bool {
18243	v_0 := v.Args[0]
18244	// match: (Neg32F (Const32F [c]))
18245	// cond: c != 0
18246	// result: (Const32F [-c])
18247	for {
18248		if v_0.Op != OpConst32F {
18249			break
18250		}
18251		c := auxIntToFloat32(v_0.AuxInt)
18252		if !(c != 0) {
18253			break
18254		}
18255		v.reset(OpConst32F)
18256		v.AuxInt = float32ToAuxInt(-c)
18257		return true
18258	}
18259	return false
18260}
18261func rewriteValuegeneric_OpNeg64(v *Value) bool {
18262	v_0 := v.Args[0]
18263	b := v.Block
18264	// match: (Neg64 (Const64 [c]))
18265	// result: (Const64 [-c])
18266	for {
18267		if v_0.Op != OpConst64 {
18268			break
18269		}
18270		c := auxIntToInt64(v_0.AuxInt)
18271		v.reset(OpConst64)
18272		v.AuxInt = int64ToAuxInt(-c)
18273		return true
18274	}
18275	// match: (Neg64 (Sub64 x y))
18276	// result: (Sub64 y x)
18277	for {
18278		if v_0.Op != OpSub64 {
18279			break
18280		}
18281		y := v_0.Args[1]
18282		x := v_0.Args[0]
18283		v.reset(OpSub64)
18284		v.AddArg2(y, x)
18285		return true
18286	}
18287	// match: (Neg64 (Neg64 x))
18288	// result: x
18289	for {
18290		if v_0.Op != OpNeg64 {
18291			break
18292		}
18293		x := v_0.Args[0]
18294		v.copyOf(x)
18295		return true
18296	}
18297	// match: (Neg64 <t> (Com64 x))
18298	// result: (Add64 (Const64 <t> [1]) x)
18299	for {
18300		t := v.Type
18301		if v_0.Op != OpCom64 {
18302			break
18303		}
18304		x := v_0.Args[0]
18305		v.reset(OpAdd64)
18306		v0 := b.NewValue0(v.Pos, OpConst64, t)
18307		v0.AuxInt = int64ToAuxInt(1)
18308		v.AddArg2(v0, x)
18309		return true
18310	}
18311	return false
18312}
18313func rewriteValuegeneric_OpNeg64F(v *Value) bool {
18314	v_0 := v.Args[0]
18315	// match: (Neg64F (Const64F [c]))
18316	// cond: c != 0
18317	// result: (Const64F [-c])
18318	for {
18319		if v_0.Op != OpConst64F {
18320			break
18321		}
18322		c := auxIntToFloat64(v_0.AuxInt)
18323		if !(c != 0) {
18324			break
18325		}
18326		v.reset(OpConst64F)
18327		v.AuxInt = float64ToAuxInt(-c)
18328		return true
18329	}
18330	return false
18331}
18332func rewriteValuegeneric_OpNeg8(v *Value) bool {
18333	v_0 := v.Args[0]
18334	b := v.Block
18335	// match: (Neg8 (Const8 [c]))
18336	// result: (Const8 [-c])
18337	for {
18338		if v_0.Op != OpConst8 {
18339			break
18340		}
18341		c := auxIntToInt8(v_0.AuxInt)
18342		v.reset(OpConst8)
18343		v.AuxInt = int8ToAuxInt(-c)
18344		return true
18345	}
18346	// match: (Neg8 (Sub8 x y))
18347	// result: (Sub8 y x)
18348	for {
18349		if v_0.Op != OpSub8 {
18350			break
18351		}
18352		y := v_0.Args[1]
18353		x := v_0.Args[0]
18354		v.reset(OpSub8)
18355		v.AddArg2(y, x)
18356		return true
18357	}
18358	// match: (Neg8 (Neg8 x))
18359	// result: x
18360	for {
18361		if v_0.Op != OpNeg8 {
18362			break
18363		}
18364		x := v_0.Args[0]
18365		v.copyOf(x)
18366		return true
18367	}
18368	// match: (Neg8 <t> (Com8 x))
18369	// result: (Add8 (Const8 <t> [1]) x)
18370	for {
18371		t := v.Type
18372		if v_0.Op != OpCom8 {
18373			break
18374		}
18375		x := v_0.Args[0]
18376		v.reset(OpAdd8)
18377		v0 := b.NewValue0(v.Pos, OpConst8, t)
18378		v0.AuxInt = int8ToAuxInt(1)
18379		v.AddArg2(v0, x)
18380		return true
18381	}
18382	return false
18383}
18384func rewriteValuegeneric_OpNeq16(v *Value) bool {
18385	v_1 := v.Args[1]
18386	v_0 := v.Args[0]
18387	b := v.Block
18388	typ := &b.Func.Config.Types
18389	// match: (Neq16 x x)
18390	// result: (ConstBool [false])
18391	for {
18392		x := v_0
18393		if x != v_1 {
18394			break
18395		}
18396		v.reset(OpConstBool)
18397		v.AuxInt = boolToAuxInt(false)
18398		return true
18399	}
18400	// match: (Neq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
18401	// result: (Neq16 (Const16 <t> [c-d]) x)
18402	for {
18403		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18404			if v_0.Op != OpConst16 {
18405				continue
18406			}
18407			t := v_0.Type
18408			c := auxIntToInt16(v_0.AuxInt)
18409			if v_1.Op != OpAdd16 {
18410				continue
18411			}
18412			_ = v_1.Args[1]
18413			v_1_0 := v_1.Args[0]
18414			v_1_1 := v_1.Args[1]
18415			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
18416				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
18417					continue
18418				}
18419				d := auxIntToInt16(v_1_0.AuxInt)
18420				x := v_1_1
18421				v.reset(OpNeq16)
18422				v0 := b.NewValue0(v.Pos, OpConst16, t)
18423				v0.AuxInt = int16ToAuxInt(c - d)
18424				v.AddArg2(v0, x)
18425				return true
18426			}
18427		}
18428		break
18429	}
18430	// match: (Neq16 (Const16 [c]) (Const16 [d]))
18431	// result: (ConstBool [c != d])
18432	for {
18433		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18434			if v_0.Op != OpConst16 {
18435				continue
18436			}
18437			c := auxIntToInt16(v_0.AuxInt)
18438			if v_1.Op != OpConst16 {
18439				continue
18440			}
18441			d := auxIntToInt16(v_1.AuxInt)
18442			v.reset(OpConstBool)
18443			v.AuxInt = boolToAuxInt(c != d)
18444			return true
18445		}
18446		break
18447	}
18448	// match: (Neq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
18449	// cond: k > 0 && k < 15 && kbar == 16 - k
18450	// result: (Neq16 (And16 <t> n (Const16 <t> [1<<uint(k)-1])) (Const16 <t> [0]))
18451	for {
18452		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18453			n := v_0
18454			if v_1.Op != OpLsh16x64 {
18455				continue
18456			}
18457			_ = v_1.Args[1]
18458			v_1_0 := v_1.Args[0]
18459			if v_1_0.Op != OpRsh16x64 {
18460				continue
18461			}
18462			_ = v_1_0.Args[1]
18463			v_1_0_0 := v_1_0.Args[0]
18464			if v_1_0_0.Op != OpAdd16 {
18465				continue
18466			}
18467			t := v_1_0_0.Type
18468			_ = v_1_0_0.Args[1]
18469			v_1_0_0_0 := v_1_0_0.Args[0]
18470			v_1_0_0_1 := v_1_0_0.Args[1]
18471			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
18472				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh16Ux64 || v_1_0_0_1.Type != t {
18473					continue
18474				}
18475				_ = v_1_0_0_1.Args[1]
18476				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
18477				if v_1_0_0_1_0.Op != OpRsh16x64 || v_1_0_0_1_0.Type != t {
18478					continue
18479				}
18480				_ = v_1_0_0_1_0.Args[1]
18481				if n != v_1_0_0_1_0.Args[0] {
18482					continue
18483				}
18484				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
18485				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 15 {
18486					continue
18487				}
18488				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
18489				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
18490					continue
18491				}
18492				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
18493				v_1_0_1 := v_1_0.Args[1]
18494				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
18495					continue
18496				}
18497				k := auxIntToInt64(v_1_0_1.AuxInt)
18498				v_1_1 := v_1.Args[1]
18499				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 15 && kbar == 16-k) {
18500					continue
18501				}
18502				v.reset(OpNeq16)
18503				v0 := b.NewValue0(v.Pos, OpAnd16, t)
18504				v1 := b.NewValue0(v.Pos, OpConst16, t)
18505				v1.AuxInt = int16ToAuxInt(1<<uint(k) - 1)
18506				v0.AddArg2(n, v1)
18507				v2 := b.NewValue0(v.Pos, OpConst16, t)
18508				v2.AuxInt = int16ToAuxInt(0)
18509				v.AddArg2(v0, v2)
18510				return true
18511			}
18512		}
18513		break
18514	}
18515	// match: (Neq16 s:(Sub16 x y) (Const16 [0]))
18516	// cond: s.Uses == 1
18517	// result: (Neq16 x y)
18518	for {
18519		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18520			s := v_0
18521			if s.Op != OpSub16 {
18522				continue
18523			}
18524			y := s.Args[1]
18525			x := s.Args[0]
18526			if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 || !(s.Uses == 1) {
18527				continue
18528			}
18529			v.reset(OpNeq16)
18530			v.AddArg2(x, y)
18531			return true
18532		}
18533		break
18534	}
18535	// match: (Neq16 (And16 <t> x (Const16 <t> [y])) (Const16 <t> [y]))
18536	// cond: oneBit16(y)
18537	// result: (Eq16 (And16 <t> x (Const16 <t> [y])) (Const16 <t> [0]))
18538	for {
18539		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18540			if v_0.Op != OpAnd16 {
18541				continue
18542			}
18543			t := v_0.Type
18544			_ = v_0.Args[1]
18545			v_0_0 := v_0.Args[0]
18546			v_0_1 := v_0.Args[1]
18547			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
18548				x := v_0_0
18549				if v_0_1.Op != OpConst16 || v_0_1.Type != t {
18550					continue
18551				}
18552				y := auxIntToInt16(v_0_1.AuxInt)
18553				if v_1.Op != OpConst16 || v_1.Type != t || auxIntToInt16(v_1.AuxInt) != y || !(oneBit16(y)) {
18554					continue
18555				}
18556				v.reset(OpEq16)
18557				v0 := b.NewValue0(v.Pos, OpAnd16, t)
18558				v1 := b.NewValue0(v.Pos, OpConst16, t)
18559				v1.AuxInt = int16ToAuxInt(y)
18560				v0.AddArg2(x, v1)
18561				v2 := b.NewValue0(v.Pos, OpConst16, t)
18562				v2.AuxInt = int16ToAuxInt(0)
18563				v.AddArg2(v0, v2)
18564				return true
18565			}
18566		}
18567		break
18568	}
18569	return false
18570}
18571func rewriteValuegeneric_OpNeq32(v *Value) bool {
18572	v_1 := v.Args[1]
18573	v_0 := v.Args[0]
18574	b := v.Block
18575	typ := &b.Func.Config.Types
18576	// match: (Neq32 x x)
18577	// result: (ConstBool [false])
18578	for {
18579		x := v_0
18580		if x != v_1 {
18581			break
18582		}
18583		v.reset(OpConstBool)
18584		v.AuxInt = boolToAuxInt(false)
18585		return true
18586	}
18587	// match: (Neq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
18588	// result: (Neq32 (Const32 <t> [c-d]) x)
18589	for {
18590		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18591			if v_0.Op != OpConst32 {
18592				continue
18593			}
18594			t := v_0.Type
18595			c := auxIntToInt32(v_0.AuxInt)
18596			if v_1.Op != OpAdd32 {
18597				continue
18598			}
18599			_ = v_1.Args[1]
18600			v_1_0 := v_1.Args[0]
18601			v_1_1 := v_1.Args[1]
18602			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
18603				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
18604					continue
18605				}
18606				d := auxIntToInt32(v_1_0.AuxInt)
18607				x := v_1_1
18608				v.reset(OpNeq32)
18609				v0 := b.NewValue0(v.Pos, OpConst32, t)
18610				v0.AuxInt = int32ToAuxInt(c - d)
18611				v.AddArg2(v0, x)
18612				return true
18613			}
18614		}
18615		break
18616	}
18617	// match: (Neq32 (Const32 [c]) (Const32 [d]))
18618	// result: (ConstBool [c != d])
18619	for {
18620		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18621			if v_0.Op != OpConst32 {
18622				continue
18623			}
18624			c := auxIntToInt32(v_0.AuxInt)
18625			if v_1.Op != OpConst32 {
18626				continue
18627			}
18628			d := auxIntToInt32(v_1.AuxInt)
18629			v.reset(OpConstBool)
18630			v.AuxInt = boolToAuxInt(c != d)
18631			return true
18632		}
18633		break
18634	}
18635	// match: (Neq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
18636	// cond: k > 0 && k < 31 && kbar == 32 - k
18637	// result: (Neq32 (And32 <t> n (Const32 <t> [1<<uint(k)-1])) (Const32 <t> [0]))
18638	for {
18639		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18640			n := v_0
18641			if v_1.Op != OpLsh32x64 {
18642				continue
18643			}
18644			_ = v_1.Args[1]
18645			v_1_0 := v_1.Args[0]
18646			if v_1_0.Op != OpRsh32x64 {
18647				continue
18648			}
18649			_ = v_1_0.Args[1]
18650			v_1_0_0 := v_1_0.Args[0]
18651			if v_1_0_0.Op != OpAdd32 {
18652				continue
18653			}
18654			t := v_1_0_0.Type
18655			_ = v_1_0_0.Args[1]
18656			v_1_0_0_0 := v_1_0_0.Args[0]
18657			v_1_0_0_1 := v_1_0_0.Args[1]
18658			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
18659				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh32Ux64 || v_1_0_0_1.Type != t {
18660					continue
18661				}
18662				_ = v_1_0_0_1.Args[1]
18663				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
18664				if v_1_0_0_1_0.Op != OpRsh32x64 || v_1_0_0_1_0.Type != t {
18665					continue
18666				}
18667				_ = v_1_0_0_1_0.Args[1]
18668				if n != v_1_0_0_1_0.Args[0] {
18669					continue
18670				}
18671				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
18672				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 31 {
18673					continue
18674				}
18675				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
18676				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
18677					continue
18678				}
18679				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
18680				v_1_0_1 := v_1_0.Args[1]
18681				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
18682					continue
18683				}
18684				k := auxIntToInt64(v_1_0_1.AuxInt)
18685				v_1_1 := v_1.Args[1]
18686				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 31 && kbar == 32-k) {
18687					continue
18688				}
18689				v.reset(OpNeq32)
18690				v0 := b.NewValue0(v.Pos, OpAnd32, t)
18691				v1 := b.NewValue0(v.Pos, OpConst32, t)
18692				v1.AuxInt = int32ToAuxInt(1<<uint(k) - 1)
18693				v0.AddArg2(n, v1)
18694				v2 := b.NewValue0(v.Pos, OpConst32, t)
18695				v2.AuxInt = int32ToAuxInt(0)
18696				v.AddArg2(v0, v2)
18697				return true
18698			}
18699		}
18700		break
18701	}
18702	// match: (Neq32 s:(Sub32 x y) (Const32 [0]))
18703	// cond: s.Uses == 1
18704	// result: (Neq32 x y)
18705	for {
18706		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18707			s := v_0
18708			if s.Op != OpSub32 {
18709				continue
18710			}
18711			y := s.Args[1]
18712			x := s.Args[0]
18713			if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != 0 || !(s.Uses == 1) {
18714				continue
18715			}
18716			v.reset(OpNeq32)
18717			v.AddArg2(x, y)
18718			return true
18719		}
18720		break
18721	}
18722	// match: (Neq32 (And32 <t> x (Const32 <t> [y])) (Const32 <t> [y]))
18723	// cond: oneBit32(y)
18724	// result: (Eq32 (And32 <t> x (Const32 <t> [y])) (Const32 <t> [0]))
18725	for {
18726		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18727			if v_0.Op != OpAnd32 {
18728				continue
18729			}
18730			t := v_0.Type
18731			_ = v_0.Args[1]
18732			v_0_0 := v_0.Args[0]
18733			v_0_1 := v_0.Args[1]
18734			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
18735				x := v_0_0
18736				if v_0_1.Op != OpConst32 || v_0_1.Type != t {
18737					continue
18738				}
18739				y := auxIntToInt32(v_0_1.AuxInt)
18740				if v_1.Op != OpConst32 || v_1.Type != t || auxIntToInt32(v_1.AuxInt) != y || !(oneBit32(y)) {
18741					continue
18742				}
18743				v.reset(OpEq32)
18744				v0 := b.NewValue0(v.Pos, OpAnd32, t)
18745				v1 := b.NewValue0(v.Pos, OpConst32, t)
18746				v1.AuxInt = int32ToAuxInt(y)
18747				v0.AddArg2(x, v1)
18748				v2 := b.NewValue0(v.Pos, OpConst32, t)
18749				v2.AuxInt = int32ToAuxInt(0)
18750				v.AddArg2(v0, v2)
18751				return true
18752			}
18753		}
18754		break
18755	}
18756	return false
18757}
18758func rewriteValuegeneric_OpNeq32F(v *Value) bool {
18759	v_1 := v.Args[1]
18760	v_0 := v.Args[0]
18761	// match: (Neq32F (Const32F [c]) (Const32F [d]))
18762	// result: (ConstBool [c != d])
18763	for {
18764		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18765			if v_0.Op != OpConst32F {
18766				continue
18767			}
18768			c := auxIntToFloat32(v_0.AuxInt)
18769			if v_1.Op != OpConst32F {
18770				continue
18771			}
18772			d := auxIntToFloat32(v_1.AuxInt)
18773			v.reset(OpConstBool)
18774			v.AuxInt = boolToAuxInt(c != d)
18775			return true
18776		}
18777		break
18778	}
18779	return false
18780}
18781func rewriteValuegeneric_OpNeq64(v *Value) bool {
18782	v_1 := v.Args[1]
18783	v_0 := v.Args[0]
18784	b := v.Block
18785	typ := &b.Func.Config.Types
18786	// match: (Neq64 x x)
18787	// result: (ConstBool [false])
18788	for {
18789		x := v_0
18790		if x != v_1 {
18791			break
18792		}
18793		v.reset(OpConstBool)
18794		v.AuxInt = boolToAuxInt(false)
18795		return true
18796	}
18797	// match: (Neq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
18798	// result: (Neq64 (Const64 <t> [c-d]) x)
18799	for {
18800		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18801			if v_0.Op != OpConst64 {
18802				continue
18803			}
18804			t := v_0.Type
18805			c := auxIntToInt64(v_0.AuxInt)
18806			if v_1.Op != OpAdd64 {
18807				continue
18808			}
18809			_ = v_1.Args[1]
18810			v_1_0 := v_1.Args[0]
18811			v_1_1 := v_1.Args[1]
18812			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
18813				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
18814					continue
18815				}
18816				d := auxIntToInt64(v_1_0.AuxInt)
18817				x := v_1_1
18818				v.reset(OpNeq64)
18819				v0 := b.NewValue0(v.Pos, OpConst64, t)
18820				v0.AuxInt = int64ToAuxInt(c - d)
18821				v.AddArg2(v0, x)
18822				return true
18823			}
18824		}
18825		break
18826	}
18827	// match: (Neq64 (Const64 [c]) (Const64 [d]))
18828	// result: (ConstBool [c != d])
18829	for {
18830		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18831			if v_0.Op != OpConst64 {
18832				continue
18833			}
18834			c := auxIntToInt64(v_0.AuxInt)
18835			if v_1.Op != OpConst64 {
18836				continue
18837			}
18838			d := auxIntToInt64(v_1.AuxInt)
18839			v.reset(OpConstBool)
18840			v.AuxInt = boolToAuxInt(c != d)
18841			return true
18842		}
18843		break
18844	}
18845	// match: (Neq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
18846	// cond: k > 0 && k < 63 && kbar == 64 - k
18847	// result: (Neq64 (And64 <t> n (Const64 <t> [1<<uint(k)-1])) (Const64 <t> [0]))
18848	for {
18849		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18850			n := v_0
18851			if v_1.Op != OpLsh64x64 {
18852				continue
18853			}
18854			_ = v_1.Args[1]
18855			v_1_0 := v_1.Args[0]
18856			if v_1_0.Op != OpRsh64x64 {
18857				continue
18858			}
18859			_ = v_1_0.Args[1]
18860			v_1_0_0 := v_1_0.Args[0]
18861			if v_1_0_0.Op != OpAdd64 {
18862				continue
18863			}
18864			t := v_1_0_0.Type
18865			_ = v_1_0_0.Args[1]
18866			v_1_0_0_0 := v_1_0_0.Args[0]
18867			v_1_0_0_1 := v_1_0_0.Args[1]
18868			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
18869				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh64Ux64 || v_1_0_0_1.Type != t {
18870					continue
18871				}
18872				_ = v_1_0_0_1.Args[1]
18873				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
18874				if v_1_0_0_1_0.Op != OpRsh64x64 || v_1_0_0_1_0.Type != t {
18875					continue
18876				}
18877				_ = v_1_0_0_1_0.Args[1]
18878				if n != v_1_0_0_1_0.Args[0] {
18879					continue
18880				}
18881				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
18882				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 63 {
18883					continue
18884				}
18885				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
18886				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
18887					continue
18888				}
18889				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
18890				v_1_0_1 := v_1_0.Args[1]
18891				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
18892					continue
18893				}
18894				k := auxIntToInt64(v_1_0_1.AuxInt)
18895				v_1_1 := v_1.Args[1]
18896				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 63 && kbar == 64-k) {
18897					continue
18898				}
18899				v.reset(OpNeq64)
18900				v0 := b.NewValue0(v.Pos, OpAnd64, t)
18901				v1 := b.NewValue0(v.Pos, OpConst64, t)
18902				v1.AuxInt = int64ToAuxInt(1<<uint(k) - 1)
18903				v0.AddArg2(n, v1)
18904				v2 := b.NewValue0(v.Pos, OpConst64, t)
18905				v2.AuxInt = int64ToAuxInt(0)
18906				v.AddArg2(v0, v2)
18907				return true
18908			}
18909		}
18910		break
18911	}
18912	// match: (Neq64 s:(Sub64 x y) (Const64 [0]))
18913	// cond: s.Uses == 1
18914	// result: (Neq64 x y)
18915	for {
18916		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18917			s := v_0
18918			if s.Op != OpSub64 {
18919				continue
18920			}
18921			y := s.Args[1]
18922			x := s.Args[0]
18923			if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 || !(s.Uses == 1) {
18924				continue
18925			}
18926			v.reset(OpNeq64)
18927			v.AddArg2(x, y)
18928			return true
18929		}
18930		break
18931	}
18932	// match: (Neq64 (And64 <t> x (Const64 <t> [y])) (Const64 <t> [y]))
18933	// cond: oneBit64(y)
18934	// result: (Eq64 (And64 <t> x (Const64 <t> [y])) (Const64 <t> [0]))
18935	for {
18936		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18937			if v_0.Op != OpAnd64 {
18938				continue
18939			}
18940			t := v_0.Type
18941			_ = v_0.Args[1]
18942			v_0_0 := v_0.Args[0]
18943			v_0_1 := v_0.Args[1]
18944			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
18945				x := v_0_0
18946				if v_0_1.Op != OpConst64 || v_0_1.Type != t {
18947					continue
18948				}
18949				y := auxIntToInt64(v_0_1.AuxInt)
18950				if v_1.Op != OpConst64 || v_1.Type != t || auxIntToInt64(v_1.AuxInt) != y || !(oneBit64(y)) {
18951					continue
18952				}
18953				v.reset(OpEq64)
18954				v0 := b.NewValue0(v.Pos, OpAnd64, t)
18955				v1 := b.NewValue0(v.Pos, OpConst64, t)
18956				v1.AuxInt = int64ToAuxInt(y)
18957				v0.AddArg2(x, v1)
18958				v2 := b.NewValue0(v.Pos, OpConst64, t)
18959				v2.AuxInt = int64ToAuxInt(0)
18960				v.AddArg2(v0, v2)
18961				return true
18962			}
18963		}
18964		break
18965	}
18966	return false
18967}
18968func rewriteValuegeneric_OpNeq64F(v *Value) bool {
18969	v_1 := v.Args[1]
18970	v_0 := v.Args[0]
18971	// match: (Neq64F (Const64F [c]) (Const64F [d]))
18972	// result: (ConstBool [c != d])
18973	for {
18974		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18975			if v_0.Op != OpConst64F {
18976				continue
18977			}
18978			c := auxIntToFloat64(v_0.AuxInt)
18979			if v_1.Op != OpConst64F {
18980				continue
18981			}
18982			d := auxIntToFloat64(v_1.AuxInt)
18983			v.reset(OpConstBool)
18984			v.AuxInt = boolToAuxInt(c != d)
18985			return true
18986		}
18987		break
18988	}
18989	return false
18990}
18991func rewriteValuegeneric_OpNeq8(v *Value) bool {
18992	v_1 := v.Args[1]
18993	v_0 := v.Args[0]
18994	b := v.Block
18995	typ := &b.Func.Config.Types
18996	// match: (Neq8 x x)
18997	// result: (ConstBool [false])
18998	for {
18999		x := v_0
19000		if x != v_1 {
19001			break
19002		}
19003		v.reset(OpConstBool)
19004		v.AuxInt = boolToAuxInt(false)
19005		return true
19006	}
19007	// match: (Neq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
19008	// result: (Neq8 (Const8 <t> [c-d]) x)
19009	for {
19010		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19011			if v_0.Op != OpConst8 {
19012				continue
19013			}
19014			t := v_0.Type
19015			c := auxIntToInt8(v_0.AuxInt)
19016			if v_1.Op != OpAdd8 {
19017				continue
19018			}
19019			_ = v_1.Args[1]
19020			v_1_0 := v_1.Args[0]
19021			v_1_1 := v_1.Args[1]
19022			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
19023				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
19024					continue
19025				}
19026				d := auxIntToInt8(v_1_0.AuxInt)
19027				x := v_1_1
19028				v.reset(OpNeq8)
19029				v0 := b.NewValue0(v.Pos, OpConst8, t)
19030				v0.AuxInt = int8ToAuxInt(c - d)
19031				v.AddArg2(v0, x)
19032				return true
19033			}
19034		}
19035		break
19036	}
19037	// match: (Neq8 (Const8 [c]) (Const8 [d]))
19038	// result: (ConstBool [c != d])
19039	for {
19040		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19041			if v_0.Op != OpConst8 {
19042				continue
19043			}
19044			c := auxIntToInt8(v_0.AuxInt)
19045			if v_1.Op != OpConst8 {
19046				continue
19047			}
19048			d := auxIntToInt8(v_1.AuxInt)
19049			v.reset(OpConstBool)
19050			v.AuxInt = boolToAuxInt(c != d)
19051			return true
19052		}
19053		break
19054	}
19055	// match: (Neq8 n (Lsh8x64 (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
19056	// cond: k > 0 && k < 7 && kbar == 8 - k
19057	// result: (Neq8 (And8 <t> n (Const8 <t> [1<<uint(k)-1])) (Const8 <t> [0]))
19058	for {
19059		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19060			n := v_0
19061			if v_1.Op != OpLsh8x64 {
19062				continue
19063			}
19064			_ = v_1.Args[1]
19065			v_1_0 := v_1.Args[0]
19066			if v_1_0.Op != OpRsh8x64 {
19067				continue
19068			}
19069			_ = v_1_0.Args[1]
19070			v_1_0_0 := v_1_0.Args[0]
19071			if v_1_0_0.Op != OpAdd8 {
19072				continue
19073			}
19074			t := v_1_0_0.Type
19075			_ = v_1_0_0.Args[1]
19076			v_1_0_0_0 := v_1_0_0.Args[0]
19077			v_1_0_0_1 := v_1_0_0.Args[1]
19078			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
19079				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh8Ux64 || v_1_0_0_1.Type != t {
19080					continue
19081				}
19082				_ = v_1_0_0_1.Args[1]
19083				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
19084				if v_1_0_0_1_0.Op != OpRsh8x64 || v_1_0_0_1_0.Type != t {
19085					continue
19086				}
19087				_ = v_1_0_0_1_0.Args[1]
19088				if n != v_1_0_0_1_0.Args[0] {
19089					continue
19090				}
19091				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
19092				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 7 {
19093					continue
19094				}
19095				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
19096				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
19097					continue
19098				}
19099				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
19100				v_1_0_1 := v_1_0.Args[1]
19101				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
19102					continue
19103				}
19104				k := auxIntToInt64(v_1_0_1.AuxInt)
19105				v_1_1 := v_1.Args[1]
19106				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 7 && kbar == 8-k) {
19107					continue
19108				}
19109				v.reset(OpNeq8)
19110				v0 := b.NewValue0(v.Pos, OpAnd8, t)
19111				v1 := b.NewValue0(v.Pos, OpConst8, t)
19112				v1.AuxInt = int8ToAuxInt(1<<uint(k) - 1)
19113				v0.AddArg2(n, v1)
19114				v2 := b.NewValue0(v.Pos, OpConst8, t)
19115				v2.AuxInt = int8ToAuxInt(0)
19116				v.AddArg2(v0, v2)
19117				return true
19118			}
19119		}
19120		break
19121	}
19122	// match: (Neq8 s:(Sub8 x y) (Const8 [0]))
19123	// cond: s.Uses == 1
19124	// result: (Neq8 x y)
19125	for {
19126		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19127			s := v_0
19128			if s.Op != OpSub8 {
19129				continue
19130			}
19131			y := s.Args[1]
19132			x := s.Args[0]
19133			if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 || !(s.Uses == 1) {
19134				continue
19135			}
19136			v.reset(OpNeq8)
19137			v.AddArg2(x, y)
19138			return true
19139		}
19140		break
19141	}
19142	// match: (Neq8 (And8 <t> x (Const8 <t> [y])) (Const8 <t> [y]))
19143	// cond: oneBit8(y)
19144	// result: (Eq8 (And8 <t> x (Const8 <t> [y])) (Const8 <t> [0]))
19145	for {
19146		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19147			if v_0.Op != OpAnd8 {
19148				continue
19149			}
19150			t := v_0.Type
19151			_ = v_0.Args[1]
19152			v_0_0 := v_0.Args[0]
19153			v_0_1 := v_0.Args[1]
19154			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
19155				x := v_0_0
19156				if v_0_1.Op != OpConst8 || v_0_1.Type != t {
19157					continue
19158				}
19159				y := auxIntToInt8(v_0_1.AuxInt)
19160				if v_1.Op != OpConst8 || v_1.Type != t || auxIntToInt8(v_1.AuxInt) != y || !(oneBit8(y)) {
19161					continue
19162				}
19163				v.reset(OpEq8)
19164				v0 := b.NewValue0(v.Pos, OpAnd8, t)
19165				v1 := b.NewValue0(v.Pos, OpConst8, t)
19166				v1.AuxInt = int8ToAuxInt(y)
19167				v0.AddArg2(x, v1)
19168				v2 := b.NewValue0(v.Pos, OpConst8, t)
19169				v2.AuxInt = int8ToAuxInt(0)
19170				v.AddArg2(v0, v2)
19171				return true
19172			}
19173		}
19174		break
19175	}
19176	return false
19177}
19178func rewriteValuegeneric_OpNeqB(v *Value) bool {
19179	v_1 := v.Args[1]
19180	v_0 := v.Args[0]
19181	// match: (NeqB (ConstBool [c]) (ConstBool [d]))
19182	// result: (ConstBool [c != d])
19183	for {
19184		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19185			if v_0.Op != OpConstBool {
19186				continue
19187			}
19188			c := auxIntToBool(v_0.AuxInt)
19189			if v_1.Op != OpConstBool {
19190				continue
19191			}
19192			d := auxIntToBool(v_1.AuxInt)
19193			v.reset(OpConstBool)
19194			v.AuxInt = boolToAuxInt(c != d)
19195			return true
19196		}
19197		break
19198	}
19199	// match: (NeqB (ConstBool [false]) x)
19200	// result: x
19201	for {
19202		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19203			if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != false {
19204				continue
19205			}
19206			x := v_1
19207			v.copyOf(x)
19208			return true
19209		}
19210		break
19211	}
19212	// match: (NeqB (ConstBool [true]) x)
19213	// result: (Not x)
19214	for {
19215		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19216			if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != true {
19217				continue
19218			}
19219			x := v_1
19220			v.reset(OpNot)
19221			v.AddArg(x)
19222			return true
19223		}
19224		break
19225	}
19226	// match: (NeqB (Not x) (Not y))
19227	// result: (NeqB x y)
19228	for {
19229		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19230			if v_0.Op != OpNot {
19231				continue
19232			}
19233			x := v_0.Args[0]
19234			if v_1.Op != OpNot {
19235				continue
19236			}
19237			y := v_1.Args[0]
19238			v.reset(OpNeqB)
19239			v.AddArg2(x, y)
19240			return true
19241		}
19242		break
19243	}
19244	return false
19245}
19246func rewriteValuegeneric_OpNeqInter(v *Value) bool {
19247	v_1 := v.Args[1]
19248	v_0 := v.Args[0]
19249	b := v.Block
19250	typ := &b.Func.Config.Types
19251	// match: (NeqInter x y)
19252	// result: (NeqPtr (ITab x) (ITab y))
19253	for {
19254		x := v_0
19255		y := v_1
19256		v.reset(OpNeqPtr)
19257		v0 := b.NewValue0(v.Pos, OpITab, typ.Uintptr)
19258		v0.AddArg(x)
19259		v1 := b.NewValue0(v.Pos, OpITab, typ.Uintptr)
19260		v1.AddArg(y)
19261		v.AddArg2(v0, v1)
19262		return true
19263	}
19264}
19265func rewriteValuegeneric_OpNeqPtr(v *Value) bool {
19266	v_1 := v.Args[1]
19267	v_0 := v.Args[0]
19268	// match: (NeqPtr x x)
19269	// result: (ConstBool [false])
19270	for {
19271		x := v_0
19272		if x != v_1 {
19273			break
19274		}
19275		v.reset(OpConstBool)
19276		v.AuxInt = boolToAuxInt(false)
19277		return true
19278	}
19279	// match: (NeqPtr (Addr {x} _) (Addr {y} _))
19280	// result: (ConstBool [x != y])
19281	for {
19282		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19283			if v_0.Op != OpAddr {
19284				continue
19285			}
19286			x := auxToSym(v_0.Aux)
19287			if v_1.Op != OpAddr {
19288				continue
19289			}
19290			y := auxToSym(v_1.Aux)
19291			v.reset(OpConstBool)
19292			v.AuxInt = boolToAuxInt(x != y)
19293			return true
19294		}
19295		break
19296	}
19297	// match: (NeqPtr (Addr {x} _) (OffPtr [o] (Addr {y} _)))
19298	// result: (ConstBool [x != y || o != 0])
19299	for {
19300		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19301			if v_0.Op != OpAddr {
19302				continue
19303			}
19304			x := auxToSym(v_0.Aux)
19305			if v_1.Op != OpOffPtr {
19306				continue
19307			}
19308			o := auxIntToInt64(v_1.AuxInt)
19309			v_1_0 := v_1.Args[0]
19310			if v_1_0.Op != OpAddr {
19311				continue
19312			}
19313			y := auxToSym(v_1_0.Aux)
19314			v.reset(OpConstBool)
19315			v.AuxInt = boolToAuxInt(x != y || o != 0)
19316			return true
19317		}
19318		break
19319	}
19320	// match: (NeqPtr (OffPtr [o1] (Addr {x} _)) (OffPtr [o2] (Addr {y} _)))
19321	// result: (ConstBool [x != y || o1 != o2])
19322	for {
19323		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19324			if v_0.Op != OpOffPtr {
19325				continue
19326			}
19327			o1 := auxIntToInt64(v_0.AuxInt)
19328			v_0_0 := v_0.Args[0]
19329			if v_0_0.Op != OpAddr {
19330				continue
19331			}
19332			x := auxToSym(v_0_0.Aux)
19333			if v_1.Op != OpOffPtr {
19334				continue
19335			}
19336			o2 := auxIntToInt64(v_1.AuxInt)
19337			v_1_0 := v_1.Args[0]
19338			if v_1_0.Op != OpAddr {
19339				continue
19340			}
19341			y := auxToSym(v_1_0.Aux)
19342			v.reset(OpConstBool)
19343			v.AuxInt = boolToAuxInt(x != y || o1 != o2)
19344			return true
19345		}
19346		break
19347	}
19348	// match: (NeqPtr (LocalAddr {x} _ _) (LocalAddr {y} _ _))
19349	// result: (ConstBool [x != y])
19350	for {
19351		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19352			if v_0.Op != OpLocalAddr {
19353				continue
19354			}
19355			x := auxToSym(v_0.Aux)
19356			if v_1.Op != OpLocalAddr {
19357				continue
19358			}
19359			y := auxToSym(v_1.Aux)
19360			v.reset(OpConstBool)
19361			v.AuxInt = boolToAuxInt(x != y)
19362			return true
19363		}
19364		break
19365	}
19366	// match: (NeqPtr (LocalAddr {x} _ _) (OffPtr [o] (LocalAddr {y} _ _)))
19367	// result: (ConstBool [x != y || o != 0])
19368	for {
19369		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19370			if v_0.Op != OpLocalAddr {
19371				continue
19372			}
19373			x := auxToSym(v_0.Aux)
19374			if v_1.Op != OpOffPtr {
19375				continue
19376			}
19377			o := auxIntToInt64(v_1.AuxInt)
19378			v_1_0 := v_1.Args[0]
19379			if v_1_0.Op != OpLocalAddr {
19380				continue
19381			}
19382			y := auxToSym(v_1_0.Aux)
19383			v.reset(OpConstBool)
19384			v.AuxInt = boolToAuxInt(x != y || o != 0)
19385			return true
19386		}
19387		break
19388	}
19389	// match: (NeqPtr (OffPtr [o1] (LocalAddr {x} _ _)) (OffPtr [o2] (LocalAddr {y} _ _)))
19390	// result: (ConstBool [x != y || o1 != o2])
19391	for {
19392		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19393			if v_0.Op != OpOffPtr {
19394				continue
19395			}
19396			o1 := auxIntToInt64(v_0.AuxInt)
19397			v_0_0 := v_0.Args[0]
19398			if v_0_0.Op != OpLocalAddr {
19399				continue
19400			}
19401			x := auxToSym(v_0_0.Aux)
19402			if v_1.Op != OpOffPtr {
19403				continue
19404			}
19405			o2 := auxIntToInt64(v_1.AuxInt)
19406			v_1_0 := v_1.Args[0]
19407			if v_1_0.Op != OpLocalAddr {
19408				continue
19409			}
19410			y := auxToSym(v_1_0.Aux)
19411			v.reset(OpConstBool)
19412			v.AuxInt = boolToAuxInt(x != y || o1 != o2)
19413			return true
19414		}
19415		break
19416	}
19417	// match: (NeqPtr (OffPtr [o1] p1) p2)
19418	// cond: isSamePtr(p1, p2)
19419	// result: (ConstBool [o1 != 0])
19420	for {
19421		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19422			if v_0.Op != OpOffPtr {
19423				continue
19424			}
19425			o1 := auxIntToInt64(v_0.AuxInt)
19426			p1 := v_0.Args[0]
19427			p2 := v_1
19428			if !(isSamePtr(p1, p2)) {
19429				continue
19430			}
19431			v.reset(OpConstBool)
19432			v.AuxInt = boolToAuxInt(o1 != 0)
19433			return true
19434		}
19435		break
19436	}
19437	// match: (NeqPtr (OffPtr [o1] p1) (OffPtr [o2] p2))
19438	// cond: isSamePtr(p1, p2)
19439	// result: (ConstBool [o1 != o2])
19440	for {
19441		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19442			if v_0.Op != OpOffPtr {
19443				continue
19444			}
19445			o1 := auxIntToInt64(v_0.AuxInt)
19446			p1 := v_0.Args[0]
19447			if v_1.Op != OpOffPtr {
19448				continue
19449			}
19450			o2 := auxIntToInt64(v_1.AuxInt)
19451			p2 := v_1.Args[0]
19452			if !(isSamePtr(p1, p2)) {
19453				continue
19454			}
19455			v.reset(OpConstBool)
19456			v.AuxInt = boolToAuxInt(o1 != o2)
19457			return true
19458		}
19459		break
19460	}
19461	// match: (NeqPtr (Const32 [c]) (Const32 [d]))
19462	// result: (ConstBool [c != d])
19463	for {
19464		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19465			if v_0.Op != OpConst32 {
19466				continue
19467			}
19468			c := auxIntToInt32(v_0.AuxInt)
19469			if v_1.Op != OpConst32 {
19470				continue
19471			}
19472			d := auxIntToInt32(v_1.AuxInt)
19473			v.reset(OpConstBool)
19474			v.AuxInt = boolToAuxInt(c != d)
19475			return true
19476		}
19477		break
19478	}
19479	// match: (NeqPtr (Const64 [c]) (Const64 [d]))
19480	// result: (ConstBool [c != d])
19481	for {
19482		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19483			if v_0.Op != OpConst64 {
19484				continue
19485			}
19486			c := auxIntToInt64(v_0.AuxInt)
19487			if v_1.Op != OpConst64 {
19488				continue
19489			}
19490			d := auxIntToInt64(v_1.AuxInt)
19491			v.reset(OpConstBool)
19492			v.AuxInt = boolToAuxInt(c != d)
19493			return true
19494		}
19495		break
19496	}
19497	// match: (NeqPtr (Convert (Addr {x} _) _) (Addr {y} _))
19498	// result: (ConstBool [x!=y])
19499	for {
19500		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19501			if v_0.Op != OpConvert {
19502				continue
19503			}
19504			v_0_0 := v_0.Args[0]
19505			if v_0_0.Op != OpAddr {
19506				continue
19507			}
19508			x := auxToSym(v_0_0.Aux)
19509			if v_1.Op != OpAddr {
19510				continue
19511			}
19512			y := auxToSym(v_1.Aux)
19513			v.reset(OpConstBool)
19514			v.AuxInt = boolToAuxInt(x != y)
19515			return true
19516		}
19517		break
19518	}
19519	// match: (NeqPtr (LocalAddr _ _) (Addr _))
19520	// result: (ConstBool [true])
19521	for {
19522		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19523			if v_0.Op != OpLocalAddr || v_1.Op != OpAddr {
19524				continue
19525			}
19526			v.reset(OpConstBool)
19527			v.AuxInt = boolToAuxInt(true)
19528			return true
19529		}
19530		break
19531	}
19532	// match: (NeqPtr (OffPtr (LocalAddr _ _)) (Addr _))
19533	// result: (ConstBool [true])
19534	for {
19535		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19536			if v_0.Op != OpOffPtr {
19537				continue
19538			}
19539			v_0_0 := v_0.Args[0]
19540			if v_0_0.Op != OpLocalAddr || v_1.Op != OpAddr {
19541				continue
19542			}
19543			v.reset(OpConstBool)
19544			v.AuxInt = boolToAuxInt(true)
19545			return true
19546		}
19547		break
19548	}
19549	// match: (NeqPtr (LocalAddr _ _) (OffPtr (Addr _)))
19550	// result: (ConstBool [true])
19551	for {
19552		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19553			if v_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
19554				continue
19555			}
19556			v_1_0 := v_1.Args[0]
19557			if v_1_0.Op != OpAddr {
19558				continue
19559			}
19560			v.reset(OpConstBool)
19561			v.AuxInt = boolToAuxInt(true)
19562			return true
19563		}
19564		break
19565	}
19566	// match: (NeqPtr (OffPtr (LocalAddr _ _)) (OffPtr (Addr _)))
19567	// result: (ConstBool [true])
19568	for {
19569		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19570			if v_0.Op != OpOffPtr {
19571				continue
19572			}
19573			v_0_0 := v_0.Args[0]
19574			if v_0_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
19575				continue
19576			}
19577			v_1_0 := v_1.Args[0]
19578			if v_1_0.Op != OpAddr {
19579				continue
19580			}
19581			v.reset(OpConstBool)
19582			v.AuxInt = boolToAuxInt(true)
19583			return true
19584		}
19585		break
19586	}
19587	// match: (NeqPtr (AddPtr p1 o1) p2)
19588	// cond: isSamePtr(p1, p2)
19589	// result: (IsNonNil o1)
19590	for {
19591		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19592			if v_0.Op != OpAddPtr {
19593				continue
19594			}
19595			o1 := v_0.Args[1]
19596			p1 := v_0.Args[0]
19597			p2 := v_1
19598			if !(isSamePtr(p1, p2)) {
19599				continue
19600			}
19601			v.reset(OpIsNonNil)
19602			v.AddArg(o1)
19603			return true
19604		}
19605		break
19606	}
19607	// match: (NeqPtr (Const32 [0]) p)
19608	// result: (IsNonNil p)
19609	for {
19610		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19611			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
19612				continue
19613			}
19614			p := v_1
19615			v.reset(OpIsNonNil)
19616			v.AddArg(p)
19617			return true
19618		}
19619		break
19620	}
19621	// match: (NeqPtr (Const64 [0]) p)
19622	// result: (IsNonNil p)
19623	for {
19624		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19625			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
19626				continue
19627			}
19628			p := v_1
19629			v.reset(OpIsNonNil)
19630			v.AddArg(p)
19631			return true
19632		}
19633		break
19634	}
19635	// match: (NeqPtr (ConstNil) p)
19636	// result: (IsNonNil p)
19637	for {
19638		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19639			if v_0.Op != OpConstNil {
19640				continue
19641			}
19642			p := v_1
19643			v.reset(OpIsNonNil)
19644			v.AddArg(p)
19645			return true
19646		}
19647		break
19648	}
19649	return false
19650}
19651func rewriteValuegeneric_OpNeqSlice(v *Value) bool {
19652	v_1 := v.Args[1]
19653	v_0 := v.Args[0]
19654	b := v.Block
19655	typ := &b.Func.Config.Types
19656	// match: (NeqSlice x y)
19657	// result: (NeqPtr (SlicePtr x) (SlicePtr y))
19658	for {
19659		x := v_0
19660		y := v_1
19661		v.reset(OpNeqPtr)
19662		v0 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
19663		v0.AddArg(x)
19664		v1 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
19665		v1.AddArg(y)
19666		v.AddArg2(v0, v1)
19667		return true
19668	}
19669}
19670func rewriteValuegeneric_OpNilCheck(v *Value) bool {
19671	v_1 := v.Args[1]
19672	v_0 := v.Args[0]
19673	b := v.Block
19674	fe := b.Func.fe
19675	// match: (NilCheck ptr:(GetG mem) mem)
19676	// result: ptr
19677	for {
19678		ptr := v_0
19679		if ptr.Op != OpGetG {
19680			break
19681		}
19682		mem := ptr.Args[0]
19683		if mem != v_1 {
19684			break
19685		}
19686		v.copyOf(ptr)
19687		return true
19688	}
19689	// match: (NilCheck ptr:(SelectN [0] call:(StaticLECall _ _)) _)
19690	// cond: isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")
19691	// result: ptr
19692	for {
19693		ptr := v_0
19694		if ptr.Op != OpSelectN || auxIntToInt64(ptr.AuxInt) != 0 {
19695			break
19696		}
19697		call := ptr.Args[0]
19698		if call.Op != OpStaticLECall || len(call.Args) != 2 || !(isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
19699			break
19700		}
19701		v.copyOf(ptr)
19702		return true
19703	}
19704	// match: (NilCheck ptr:(OffPtr (SelectN [0] call:(StaticLECall _ _))) _)
19705	// cond: isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")
19706	// result: ptr
19707	for {
19708		ptr := v_0
19709		if ptr.Op != OpOffPtr {
19710			break
19711		}
19712		ptr_0 := ptr.Args[0]
19713		if ptr_0.Op != OpSelectN || auxIntToInt64(ptr_0.AuxInt) != 0 {
19714			break
19715		}
19716		call := ptr_0.Args[0]
19717		if call.Op != OpStaticLECall || len(call.Args) != 2 || !(isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
19718			break
19719		}
19720		v.copyOf(ptr)
19721		return true
19722	}
19723	// match: (NilCheck ptr:(Addr {_} (SB)) _)
19724	// result: ptr
19725	for {
19726		ptr := v_0
19727		if ptr.Op != OpAddr {
19728			break
19729		}
19730		ptr_0 := ptr.Args[0]
19731		if ptr_0.Op != OpSB {
19732			break
19733		}
19734		v.copyOf(ptr)
19735		return true
19736	}
19737	// match: (NilCheck ptr:(Convert (Addr {_} (SB)) _) _)
19738	// result: ptr
19739	for {
19740		ptr := v_0
19741		if ptr.Op != OpConvert {
19742			break
19743		}
19744		ptr_0 := ptr.Args[0]
19745		if ptr_0.Op != OpAddr {
19746			break
19747		}
19748		ptr_0_0 := ptr_0.Args[0]
19749		if ptr_0_0.Op != OpSB {
19750			break
19751		}
19752		v.copyOf(ptr)
19753		return true
19754	}
19755	return false
19756}
19757func rewriteValuegeneric_OpNot(v *Value) bool {
19758	v_0 := v.Args[0]
19759	// match: (Not (ConstBool [c]))
19760	// result: (ConstBool [!c])
19761	for {
19762		if v_0.Op != OpConstBool {
19763			break
19764		}
19765		c := auxIntToBool(v_0.AuxInt)
19766		v.reset(OpConstBool)
19767		v.AuxInt = boolToAuxInt(!c)
19768		return true
19769	}
19770	// match: (Not (Eq64 x y))
19771	// result: (Neq64 x y)
19772	for {
19773		if v_0.Op != OpEq64 {
19774			break
19775		}
19776		y := v_0.Args[1]
19777		x := v_0.Args[0]
19778		v.reset(OpNeq64)
19779		v.AddArg2(x, y)
19780		return true
19781	}
19782	// match: (Not (Eq32 x y))
19783	// result: (Neq32 x y)
19784	for {
19785		if v_0.Op != OpEq32 {
19786			break
19787		}
19788		y := v_0.Args[1]
19789		x := v_0.Args[0]
19790		v.reset(OpNeq32)
19791		v.AddArg2(x, y)
19792		return true
19793	}
19794	// match: (Not (Eq16 x y))
19795	// result: (Neq16 x y)
19796	for {
19797		if v_0.Op != OpEq16 {
19798			break
19799		}
19800		y := v_0.Args[1]
19801		x := v_0.Args[0]
19802		v.reset(OpNeq16)
19803		v.AddArg2(x, y)
19804		return true
19805	}
19806	// match: (Not (Eq8 x y))
19807	// result: (Neq8 x y)
19808	for {
19809		if v_0.Op != OpEq8 {
19810			break
19811		}
19812		y := v_0.Args[1]
19813		x := v_0.Args[0]
19814		v.reset(OpNeq8)
19815		v.AddArg2(x, y)
19816		return true
19817	}
19818	// match: (Not (EqB x y))
19819	// result: (NeqB x y)
19820	for {
19821		if v_0.Op != OpEqB {
19822			break
19823		}
19824		y := v_0.Args[1]
19825		x := v_0.Args[0]
19826		v.reset(OpNeqB)
19827		v.AddArg2(x, y)
19828		return true
19829	}
19830	// match: (Not (EqPtr x y))
19831	// result: (NeqPtr x y)
19832	for {
19833		if v_0.Op != OpEqPtr {
19834			break
19835		}
19836		y := v_0.Args[1]
19837		x := v_0.Args[0]
19838		v.reset(OpNeqPtr)
19839		v.AddArg2(x, y)
19840		return true
19841	}
19842	// match: (Not (Eq64F x y))
19843	// result: (Neq64F x y)
19844	for {
19845		if v_0.Op != OpEq64F {
19846			break
19847		}
19848		y := v_0.Args[1]
19849		x := v_0.Args[0]
19850		v.reset(OpNeq64F)
19851		v.AddArg2(x, y)
19852		return true
19853	}
19854	// match: (Not (Eq32F x y))
19855	// result: (Neq32F x y)
19856	for {
19857		if v_0.Op != OpEq32F {
19858			break
19859		}
19860		y := v_0.Args[1]
19861		x := v_0.Args[0]
19862		v.reset(OpNeq32F)
19863		v.AddArg2(x, y)
19864		return true
19865	}
19866	// match: (Not (Neq64 x y))
19867	// result: (Eq64 x y)
19868	for {
19869		if v_0.Op != OpNeq64 {
19870			break
19871		}
19872		y := v_0.Args[1]
19873		x := v_0.Args[0]
19874		v.reset(OpEq64)
19875		v.AddArg2(x, y)
19876		return true
19877	}
19878	// match: (Not (Neq32 x y))
19879	// result: (Eq32 x y)
19880	for {
19881		if v_0.Op != OpNeq32 {
19882			break
19883		}
19884		y := v_0.Args[1]
19885		x := v_0.Args[0]
19886		v.reset(OpEq32)
19887		v.AddArg2(x, y)
19888		return true
19889	}
19890	// match: (Not (Neq16 x y))
19891	// result: (Eq16 x y)
19892	for {
19893		if v_0.Op != OpNeq16 {
19894			break
19895		}
19896		y := v_0.Args[1]
19897		x := v_0.Args[0]
19898		v.reset(OpEq16)
19899		v.AddArg2(x, y)
19900		return true
19901	}
19902	// match: (Not (Neq8 x y))
19903	// result: (Eq8 x y)
19904	for {
19905		if v_0.Op != OpNeq8 {
19906			break
19907		}
19908		y := v_0.Args[1]
19909		x := v_0.Args[0]
19910		v.reset(OpEq8)
19911		v.AddArg2(x, y)
19912		return true
19913	}
19914	// match: (Not (NeqB x y))
19915	// result: (EqB x y)
19916	for {
19917		if v_0.Op != OpNeqB {
19918			break
19919		}
19920		y := v_0.Args[1]
19921		x := v_0.Args[0]
19922		v.reset(OpEqB)
19923		v.AddArg2(x, y)
19924		return true
19925	}
19926	// match: (Not (NeqPtr x y))
19927	// result: (EqPtr x y)
19928	for {
19929		if v_0.Op != OpNeqPtr {
19930			break
19931		}
19932		y := v_0.Args[1]
19933		x := v_0.Args[0]
19934		v.reset(OpEqPtr)
19935		v.AddArg2(x, y)
19936		return true
19937	}
19938	// match: (Not (Neq64F x y))
19939	// result: (Eq64F x y)
19940	for {
19941		if v_0.Op != OpNeq64F {
19942			break
19943		}
19944		y := v_0.Args[1]
19945		x := v_0.Args[0]
19946		v.reset(OpEq64F)
19947		v.AddArg2(x, y)
19948		return true
19949	}
19950	// match: (Not (Neq32F x y))
19951	// result: (Eq32F x y)
19952	for {
19953		if v_0.Op != OpNeq32F {
19954			break
19955		}
19956		y := v_0.Args[1]
19957		x := v_0.Args[0]
19958		v.reset(OpEq32F)
19959		v.AddArg2(x, y)
19960		return true
19961	}
19962	// match: (Not (Less64 x y))
19963	// result: (Leq64 y x)
19964	for {
19965		if v_0.Op != OpLess64 {
19966			break
19967		}
19968		y := v_0.Args[1]
19969		x := v_0.Args[0]
19970		v.reset(OpLeq64)
19971		v.AddArg2(y, x)
19972		return true
19973	}
19974	// match: (Not (Less32 x y))
19975	// result: (Leq32 y x)
19976	for {
19977		if v_0.Op != OpLess32 {
19978			break
19979		}
19980		y := v_0.Args[1]
19981		x := v_0.Args[0]
19982		v.reset(OpLeq32)
19983		v.AddArg2(y, x)
19984		return true
19985	}
19986	// match: (Not (Less16 x y))
19987	// result: (Leq16 y x)
19988	for {
19989		if v_0.Op != OpLess16 {
19990			break
19991		}
19992		y := v_0.Args[1]
19993		x := v_0.Args[0]
19994		v.reset(OpLeq16)
19995		v.AddArg2(y, x)
19996		return true
19997	}
19998	// match: (Not (Less8 x y))
19999	// result: (Leq8 y x)
20000	for {
20001		if v_0.Op != OpLess8 {
20002			break
20003		}
20004		y := v_0.Args[1]
20005		x := v_0.Args[0]
20006		v.reset(OpLeq8)
20007		v.AddArg2(y, x)
20008		return true
20009	}
20010	// match: (Not (Less64U x y))
20011	// result: (Leq64U y x)
20012	for {
20013		if v_0.Op != OpLess64U {
20014			break
20015		}
20016		y := v_0.Args[1]
20017		x := v_0.Args[0]
20018		v.reset(OpLeq64U)
20019		v.AddArg2(y, x)
20020		return true
20021	}
20022	// match: (Not (Less32U x y))
20023	// result: (Leq32U y x)
20024	for {
20025		if v_0.Op != OpLess32U {
20026			break
20027		}
20028		y := v_0.Args[1]
20029		x := v_0.Args[0]
20030		v.reset(OpLeq32U)
20031		v.AddArg2(y, x)
20032		return true
20033	}
20034	// match: (Not (Less16U x y))
20035	// result: (Leq16U y x)
20036	for {
20037		if v_0.Op != OpLess16U {
20038			break
20039		}
20040		y := v_0.Args[1]
20041		x := v_0.Args[0]
20042		v.reset(OpLeq16U)
20043		v.AddArg2(y, x)
20044		return true
20045	}
20046	// match: (Not (Less8U x y))
20047	// result: (Leq8U y x)
20048	for {
20049		if v_0.Op != OpLess8U {
20050			break
20051		}
20052		y := v_0.Args[1]
20053		x := v_0.Args[0]
20054		v.reset(OpLeq8U)
20055		v.AddArg2(y, x)
20056		return true
20057	}
20058	// match: (Not (Leq64 x y))
20059	// result: (Less64 y x)
20060	for {
20061		if v_0.Op != OpLeq64 {
20062			break
20063		}
20064		y := v_0.Args[1]
20065		x := v_0.Args[0]
20066		v.reset(OpLess64)
20067		v.AddArg2(y, x)
20068		return true
20069	}
20070	// match: (Not (Leq32 x y))
20071	// result: (Less32 y x)
20072	for {
20073		if v_0.Op != OpLeq32 {
20074			break
20075		}
20076		y := v_0.Args[1]
20077		x := v_0.Args[0]
20078		v.reset(OpLess32)
20079		v.AddArg2(y, x)
20080		return true
20081	}
20082	// match: (Not (Leq16 x y))
20083	// result: (Less16 y x)
20084	for {
20085		if v_0.Op != OpLeq16 {
20086			break
20087		}
20088		y := v_0.Args[1]
20089		x := v_0.Args[0]
20090		v.reset(OpLess16)
20091		v.AddArg2(y, x)
20092		return true
20093	}
20094	// match: (Not (Leq8 x y))
20095	// result: (Less8 y x)
20096	for {
20097		if v_0.Op != OpLeq8 {
20098			break
20099		}
20100		y := v_0.Args[1]
20101		x := v_0.Args[0]
20102		v.reset(OpLess8)
20103		v.AddArg2(y, x)
20104		return true
20105	}
20106	// match: (Not (Leq64U x y))
20107	// result: (Less64U y x)
20108	for {
20109		if v_0.Op != OpLeq64U {
20110			break
20111		}
20112		y := v_0.Args[1]
20113		x := v_0.Args[0]
20114		v.reset(OpLess64U)
20115		v.AddArg2(y, x)
20116		return true
20117	}
20118	// match: (Not (Leq32U x y))
20119	// result: (Less32U y x)
20120	for {
20121		if v_0.Op != OpLeq32U {
20122			break
20123		}
20124		y := v_0.Args[1]
20125		x := v_0.Args[0]
20126		v.reset(OpLess32U)
20127		v.AddArg2(y, x)
20128		return true
20129	}
20130	// match: (Not (Leq16U x y))
20131	// result: (Less16U y x)
20132	for {
20133		if v_0.Op != OpLeq16U {
20134			break
20135		}
20136		y := v_0.Args[1]
20137		x := v_0.Args[0]
20138		v.reset(OpLess16U)
20139		v.AddArg2(y, x)
20140		return true
20141	}
20142	// match: (Not (Leq8U x y))
20143	// result: (Less8U y x)
20144	for {
20145		if v_0.Op != OpLeq8U {
20146			break
20147		}
20148		y := v_0.Args[1]
20149		x := v_0.Args[0]
20150		v.reset(OpLess8U)
20151		v.AddArg2(y, x)
20152		return true
20153	}
20154	return false
20155}
20156func rewriteValuegeneric_OpOffPtr(v *Value) bool {
20157	v_0 := v.Args[0]
20158	// match: (OffPtr (OffPtr p [y]) [x])
20159	// result: (OffPtr p [x+y])
20160	for {
20161		x := auxIntToInt64(v.AuxInt)
20162		if v_0.Op != OpOffPtr {
20163			break
20164		}
20165		y := auxIntToInt64(v_0.AuxInt)
20166		p := v_0.Args[0]
20167		v.reset(OpOffPtr)
20168		v.AuxInt = int64ToAuxInt(x + y)
20169		v.AddArg(p)
20170		return true
20171	}
20172	// match: (OffPtr p [0])
20173	// cond: v.Type.Compare(p.Type) == types.CMPeq
20174	// result: p
20175	for {
20176		if auxIntToInt64(v.AuxInt) != 0 {
20177			break
20178		}
20179		p := v_0
20180		if !(v.Type.Compare(p.Type) == types.CMPeq) {
20181			break
20182		}
20183		v.copyOf(p)
20184		return true
20185	}
20186	return false
20187}
20188func rewriteValuegeneric_OpOr16(v *Value) bool {
20189	v_1 := v.Args[1]
20190	v_0 := v.Args[0]
20191	b := v.Block
20192	config := b.Func.Config
20193	// match: (Or16 (Const16 [c]) (Const16 [d]))
20194	// result: (Const16 [c|d])
20195	for {
20196		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20197			if v_0.Op != OpConst16 {
20198				continue
20199			}
20200			c := auxIntToInt16(v_0.AuxInt)
20201			if v_1.Op != OpConst16 {
20202				continue
20203			}
20204			d := auxIntToInt16(v_1.AuxInt)
20205			v.reset(OpConst16)
20206			v.AuxInt = int16ToAuxInt(c | d)
20207			return true
20208		}
20209		break
20210	}
20211	// match: (Or16 <t> (Com16 x) (Com16 y))
20212	// result: (Com16 (And16 <t> x y))
20213	for {
20214		t := v.Type
20215		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20216			if v_0.Op != OpCom16 {
20217				continue
20218			}
20219			x := v_0.Args[0]
20220			if v_1.Op != OpCom16 {
20221				continue
20222			}
20223			y := v_1.Args[0]
20224			v.reset(OpCom16)
20225			v0 := b.NewValue0(v.Pos, OpAnd16, t)
20226			v0.AddArg2(x, y)
20227			v.AddArg(v0)
20228			return true
20229		}
20230		break
20231	}
20232	// match: (Or16 x x)
20233	// result: x
20234	for {
20235		x := v_0
20236		if x != v_1 {
20237			break
20238		}
20239		v.copyOf(x)
20240		return true
20241	}
20242	// match: (Or16 (Const16 [0]) x)
20243	// result: x
20244	for {
20245		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20246			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
20247				continue
20248			}
20249			x := v_1
20250			v.copyOf(x)
20251			return true
20252		}
20253		break
20254	}
20255	// match: (Or16 (Const16 [-1]) _)
20256	// result: (Const16 [-1])
20257	for {
20258		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20259			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
20260				continue
20261			}
20262			v.reset(OpConst16)
20263			v.AuxInt = int16ToAuxInt(-1)
20264			return true
20265		}
20266		break
20267	}
20268	// match: (Or16 (Com16 x) x)
20269	// result: (Const16 [-1])
20270	for {
20271		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20272			if v_0.Op != OpCom16 {
20273				continue
20274			}
20275			x := v_0.Args[0]
20276			if x != v_1 {
20277				continue
20278			}
20279			v.reset(OpConst16)
20280			v.AuxInt = int16ToAuxInt(-1)
20281			return true
20282		}
20283		break
20284	}
20285	// match: (Or16 x (Or16 x y))
20286	// result: (Or16 x y)
20287	for {
20288		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20289			x := v_0
20290			if v_1.Op != OpOr16 {
20291				continue
20292			}
20293			_ = v_1.Args[1]
20294			v_1_0 := v_1.Args[0]
20295			v_1_1 := v_1.Args[1]
20296			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
20297				if x != v_1_0 {
20298					continue
20299				}
20300				y := v_1_1
20301				v.reset(OpOr16)
20302				v.AddArg2(x, y)
20303				return true
20304			}
20305		}
20306		break
20307	}
20308	// match: (Or16 (And16 x (Const16 [c2])) (Const16 <t> [c1]))
20309	// cond: ^(c1 | c2) == 0
20310	// result: (Or16 (Const16 <t> [c1]) x)
20311	for {
20312		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20313			if v_0.Op != OpAnd16 {
20314				continue
20315			}
20316			_ = v_0.Args[1]
20317			v_0_0 := v_0.Args[0]
20318			v_0_1 := v_0.Args[1]
20319			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
20320				x := v_0_0
20321				if v_0_1.Op != OpConst16 {
20322					continue
20323				}
20324				c2 := auxIntToInt16(v_0_1.AuxInt)
20325				if v_1.Op != OpConst16 {
20326					continue
20327				}
20328				t := v_1.Type
20329				c1 := auxIntToInt16(v_1.AuxInt)
20330				if !(^(c1 | c2) == 0) {
20331					continue
20332				}
20333				v.reset(OpOr16)
20334				v0 := b.NewValue0(v.Pos, OpConst16, t)
20335				v0.AuxInt = int16ToAuxInt(c1)
20336				v.AddArg2(v0, x)
20337				return true
20338			}
20339		}
20340		break
20341	}
20342	// match: (Or16 (Or16 i:(Const16 <t>) z) x)
20343	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
20344	// result: (Or16 i (Or16 <t> z x))
20345	for {
20346		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20347			if v_0.Op != OpOr16 {
20348				continue
20349			}
20350			_ = v_0.Args[1]
20351			v_0_0 := v_0.Args[0]
20352			v_0_1 := v_0.Args[1]
20353			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
20354				i := v_0_0
20355				if i.Op != OpConst16 {
20356					continue
20357				}
20358				t := i.Type
20359				z := v_0_1
20360				x := v_1
20361				if !(z.Op != OpConst16 && x.Op != OpConst16) {
20362					continue
20363				}
20364				v.reset(OpOr16)
20365				v0 := b.NewValue0(v.Pos, OpOr16, t)
20366				v0.AddArg2(z, x)
20367				v.AddArg2(i, v0)
20368				return true
20369			}
20370		}
20371		break
20372	}
20373	// match: (Or16 (Const16 <t> [c]) (Or16 (Const16 <t> [d]) x))
20374	// result: (Or16 (Const16 <t> [c|d]) x)
20375	for {
20376		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20377			if v_0.Op != OpConst16 {
20378				continue
20379			}
20380			t := v_0.Type
20381			c := auxIntToInt16(v_0.AuxInt)
20382			if v_1.Op != OpOr16 {
20383				continue
20384			}
20385			_ = v_1.Args[1]
20386			v_1_0 := v_1.Args[0]
20387			v_1_1 := v_1.Args[1]
20388			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
20389				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
20390					continue
20391				}
20392				d := auxIntToInt16(v_1_0.AuxInt)
20393				x := v_1_1
20394				v.reset(OpOr16)
20395				v0 := b.NewValue0(v.Pos, OpConst16, t)
20396				v0.AuxInt = int16ToAuxInt(c | d)
20397				v.AddArg2(v0, x)
20398				return true
20399			}
20400		}
20401		break
20402	}
20403	// match: (Or16 (Lsh16x64 x z:(Const64 <t> [c])) (Rsh16Ux64 x (Const64 [d])))
20404	// cond: c < 16 && d == 16-c && canRotate(config, 16)
20405	// result: (RotateLeft16 x z)
20406	for {
20407		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20408			if v_0.Op != OpLsh16x64 {
20409				continue
20410			}
20411			_ = v_0.Args[1]
20412			x := v_0.Args[0]
20413			z := v_0.Args[1]
20414			if z.Op != OpConst64 {
20415				continue
20416			}
20417			c := auxIntToInt64(z.AuxInt)
20418			if v_1.Op != OpRsh16Ux64 {
20419				continue
20420			}
20421			_ = v_1.Args[1]
20422			if x != v_1.Args[0] {
20423				continue
20424			}
20425			v_1_1 := v_1.Args[1]
20426			if v_1_1.Op != OpConst64 {
20427				continue
20428			}
20429			d := auxIntToInt64(v_1_1.AuxInt)
20430			if !(c < 16 && d == 16-c && canRotate(config, 16)) {
20431				continue
20432			}
20433			v.reset(OpRotateLeft16)
20434			v.AddArg2(x, z)
20435			return true
20436		}
20437		break
20438	}
20439	// match: (Or16 left:(Lsh16x64 x y) right:(Rsh16Ux64 x (Sub64 (Const64 [16]) y)))
20440	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
20441	// result: (RotateLeft16 x y)
20442	for {
20443		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20444			left := v_0
20445			if left.Op != OpLsh16x64 {
20446				continue
20447			}
20448			y := left.Args[1]
20449			x := left.Args[0]
20450			right := v_1
20451			if right.Op != OpRsh16Ux64 {
20452				continue
20453			}
20454			_ = right.Args[1]
20455			if x != right.Args[0] {
20456				continue
20457			}
20458			right_1 := right.Args[1]
20459			if right_1.Op != OpSub64 {
20460				continue
20461			}
20462			_ = right_1.Args[1]
20463			right_1_0 := right_1.Args[0]
20464			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
20465				continue
20466			}
20467			v.reset(OpRotateLeft16)
20468			v.AddArg2(x, y)
20469			return true
20470		}
20471		break
20472	}
20473	// match: (Or16 left:(Lsh16x32 x y) right:(Rsh16Ux32 x (Sub32 (Const32 [16]) y)))
20474	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
20475	// result: (RotateLeft16 x y)
20476	for {
20477		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20478			left := v_0
20479			if left.Op != OpLsh16x32 {
20480				continue
20481			}
20482			y := left.Args[1]
20483			x := left.Args[0]
20484			right := v_1
20485			if right.Op != OpRsh16Ux32 {
20486				continue
20487			}
20488			_ = right.Args[1]
20489			if x != right.Args[0] {
20490				continue
20491			}
20492			right_1 := right.Args[1]
20493			if right_1.Op != OpSub32 {
20494				continue
20495			}
20496			_ = right_1.Args[1]
20497			right_1_0 := right_1.Args[0]
20498			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
20499				continue
20500			}
20501			v.reset(OpRotateLeft16)
20502			v.AddArg2(x, y)
20503			return true
20504		}
20505		break
20506	}
20507	// match: (Or16 left:(Lsh16x16 x y) right:(Rsh16Ux16 x (Sub16 (Const16 [16]) y)))
20508	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
20509	// result: (RotateLeft16 x y)
20510	for {
20511		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20512			left := v_0
20513			if left.Op != OpLsh16x16 {
20514				continue
20515			}
20516			y := left.Args[1]
20517			x := left.Args[0]
20518			right := v_1
20519			if right.Op != OpRsh16Ux16 {
20520				continue
20521			}
20522			_ = right.Args[1]
20523			if x != right.Args[0] {
20524				continue
20525			}
20526			right_1 := right.Args[1]
20527			if right_1.Op != OpSub16 {
20528				continue
20529			}
20530			_ = right_1.Args[1]
20531			right_1_0 := right_1.Args[0]
20532			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
20533				continue
20534			}
20535			v.reset(OpRotateLeft16)
20536			v.AddArg2(x, y)
20537			return true
20538		}
20539		break
20540	}
20541	// match: (Or16 left:(Lsh16x8 x y) right:(Rsh16Ux8 x (Sub8 (Const8 [16]) y)))
20542	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
20543	// result: (RotateLeft16 x y)
20544	for {
20545		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20546			left := v_0
20547			if left.Op != OpLsh16x8 {
20548				continue
20549			}
20550			y := left.Args[1]
20551			x := left.Args[0]
20552			right := v_1
20553			if right.Op != OpRsh16Ux8 {
20554				continue
20555			}
20556			_ = right.Args[1]
20557			if x != right.Args[0] {
20558				continue
20559			}
20560			right_1 := right.Args[1]
20561			if right_1.Op != OpSub8 {
20562				continue
20563			}
20564			_ = right_1.Args[1]
20565			right_1_0 := right_1.Args[0]
20566			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
20567				continue
20568			}
20569			v.reset(OpRotateLeft16)
20570			v.AddArg2(x, y)
20571			return true
20572		}
20573		break
20574	}
20575	// match: (Or16 right:(Rsh16Ux64 x y) left:(Lsh16x64 x z:(Sub64 (Const64 [16]) y)))
20576	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
20577	// result: (RotateLeft16 x z)
20578	for {
20579		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20580			right := v_0
20581			if right.Op != OpRsh16Ux64 {
20582				continue
20583			}
20584			y := right.Args[1]
20585			x := right.Args[0]
20586			left := v_1
20587			if left.Op != OpLsh16x64 {
20588				continue
20589			}
20590			_ = left.Args[1]
20591			if x != left.Args[0] {
20592				continue
20593			}
20594			z := left.Args[1]
20595			if z.Op != OpSub64 {
20596				continue
20597			}
20598			_ = z.Args[1]
20599			z_0 := z.Args[0]
20600			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
20601				continue
20602			}
20603			v.reset(OpRotateLeft16)
20604			v.AddArg2(x, z)
20605			return true
20606		}
20607		break
20608	}
20609	// match: (Or16 right:(Rsh16Ux32 x y) left:(Lsh16x32 x z:(Sub32 (Const32 [16]) y)))
20610	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
20611	// result: (RotateLeft16 x z)
20612	for {
20613		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20614			right := v_0
20615			if right.Op != OpRsh16Ux32 {
20616				continue
20617			}
20618			y := right.Args[1]
20619			x := right.Args[0]
20620			left := v_1
20621			if left.Op != OpLsh16x32 {
20622				continue
20623			}
20624			_ = left.Args[1]
20625			if x != left.Args[0] {
20626				continue
20627			}
20628			z := left.Args[1]
20629			if z.Op != OpSub32 {
20630				continue
20631			}
20632			_ = z.Args[1]
20633			z_0 := z.Args[0]
20634			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
20635				continue
20636			}
20637			v.reset(OpRotateLeft16)
20638			v.AddArg2(x, z)
20639			return true
20640		}
20641		break
20642	}
20643	// match: (Or16 right:(Rsh16Ux16 x y) left:(Lsh16x16 x z:(Sub16 (Const16 [16]) y)))
20644	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
20645	// result: (RotateLeft16 x z)
20646	for {
20647		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20648			right := v_0
20649			if right.Op != OpRsh16Ux16 {
20650				continue
20651			}
20652			y := right.Args[1]
20653			x := right.Args[0]
20654			left := v_1
20655			if left.Op != OpLsh16x16 {
20656				continue
20657			}
20658			_ = left.Args[1]
20659			if x != left.Args[0] {
20660				continue
20661			}
20662			z := left.Args[1]
20663			if z.Op != OpSub16 {
20664				continue
20665			}
20666			_ = z.Args[1]
20667			z_0 := z.Args[0]
20668			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
20669				continue
20670			}
20671			v.reset(OpRotateLeft16)
20672			v.AddArg2(x, z)
20673			return true
20674		}
20675		break
20676	}
20677	// match: (Or16 right:(Rsh16Ux8 x y) left:(Lsh16x8 x z:(Sub8 (Const8 [16]) y)))
20678	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
20679	// result: (RotateLeft16 x z)
20680	for {
20681		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20682			right := v_0
20683			if right.Op != OpRsh16Ux8 {
20684				continue
20685			}
20686			y := right.Args[1]
20687			x := right.Args[0]
20688			left := v_1
20689			if left.Op != OpLsh16x8 {
20690				continue
20691			}
20692			_ = left.Args[1]
20693			if x != left.Args[0] {
20694				continue
20695			}
20696			z := left.Args[1]
20697			if z.Op != OpSub8 {
20698				continue
20699			}
20700			_ = z.Args[1]
20701			z_0 := z.Args[0]
20702			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
20703				continue
20704			}
20705			v.reset(OpRotateLeft16)
20706			v.AddArg2(x, z)
20707			return true
20708		}
20709		break
20710	}
20711	return false
20712}
20713func rewriteValuegeneric_OpOr32(v *Value) bool {
20714	v_1 := v.Args[1]
20715	v_0 := v.Args[0]
20716	b := v.Block
20717	config := b.Func.Config
20718	// match: (Or32 (Const32 [c]) (Const32 [d]))
20719	// result: (Const32 [c|d])
20720	for {
20721		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20722			if v_0.Op != OpConst32 {
20723				continue
20724			}
20725			c := auxIntToInt32(v_0.AuxInt)
20726			if v_1.Op != OpConst32 {
20727				continue
20728			}
20729			d := auxIntToInt32(v_1.AuxInt)
20730			v.reset(OpConst32)
20731			v.AuxInt = int32ToAuxInt(c | d)
20732			return true
20733		}
20734		break
20735	}
20736	// match: (Or32 <t> (Com32 x) (Com32 y))
20737	// result: (Com32 (And32 <t> x y))
20738	for {
20739		t := v.Type
20740		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20741			if v_0.Op != OpCom32 {
20742				continue
20743			}
20744			x := v_0.Args[0]
20745			if v_1.Op != OpCom32 {
20746				continue
20747			}
20748			y := v_1.Args[0]
20749			v.reset(OpCom32)
20750			v0 := b.NewValue0(v.Pos, OpAnd32, t)
20751			v0.AddArg2(x, y)
20752			v.AddArg(v0)
20753			return true
20754		}
20755		break
20756	}
20757	// match: (Or32 x x)
20758	// result: x
20759	for {
20760		x := v_0
20761		if x != v_1 {
20762			break
20763		}
20764		v.copyOf(x)
20765		return true
20766	}
20767	// match: (Or32 (Const32 [0]) x)
20768	// result: x
20769	for {
20770		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20771			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
20772				continue
20773			}
20774			x := v_1
20775			v.copyOf(x)
20776			return true
20777		}
20778		break
20779	}
20780	// match: (Or32 (Const32 [-1]) _)
20781	// result: (Const32 [-1])
20782	for {
20783		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20784			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
20785				continue
20786			}
20787			v.reset(OpConst32)
20788			v.AuxInt = int32ToAuxInt(-1)
20789			return true
20790		}
20791		break
20792	}
20793	// match: (Or32 (Com32 x) x)
20794	// result: (Const32 [-1])
20795	for {
20796		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20797			if v_0.Op != OpCom32 {
20798				continue
20799			}
20800			x := v_0.Args[0]
20801			if x != v_1 {
20802				continue
20803			}
20804			v.reset(OpConst32)
20805			v.AuxInt = int32ToAuxInt(-1)
20806			return true
20807		}
20808		break
20809	}
20810	// match: (Or32 x (Or32 x y))
20811	// result: (Or32 x y)
20812	for {
20813		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20814			x := v_0
20815			if v_1.Op != OpOr32 {
20816				continue
20817			}
20818			_ = v_1.Args[1]
20819			v_1_0 := v_1.Args[0]
20820			v_1_1 := v_1.Args[1]
20821			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
20822				if x != v_1_0 {
20823					continue
20824				}
20825				y := v_1_1
20826				v.reset(OpOr32)
20827				v.AddArg2(x, y)
20828				return true
20829			}
20830		}
20831		break
20832	}
20833	// match: (Or32 (And32 x (Const32 [c2])) (Const32 <t> [c1]))
20834	// cond: ^(c1 | c2) == 0
20835	// result: (Or32 (Const32 <t> [c1]) x)
20836	for {
20837		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20838			if v_0.Op != OpAnd32 {
20839				continue
20840			}
20841			_ = v_0.Args[1]
20842			v_0_0 := v_0.Args[0]
20843			v_0_1 := v_0.Args[1]
20844			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
20845				x := v_0_0
20846				if v_0_1.Op != OpConst32 {
20847					continue
20848				}
20849				c2 := auxIntToInt32(v_0_1.AuxInt)
20850				if v_1.Op != OpConst32 {
20851					continue
20852				}
20853				t := v_1.Type
20854				c1 := auxIntToInt32(v_1.AuxInt)
20855				if !(^(c1 | c2) == 0) {
20856					continue
20857				}
20858				v.reset(OpOr32)
20859				v0 := b.NewValue0(v.Pos, OpConst32, t)
20860				v0.AuxInt = int32ToAuxInt(c1)
20861				v.AddArg2(v0, x)
20862				return true
20863			}
20864		}
20865		break
20866	}
20867	// match: (Or32 (Or32 i:(Const32 <t>) z) x)
20868	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
20869	// result: (Or32 i (Or32 <t> z x))
20870	for {
20871		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20872			if v_0.Op != OpOr32 {
20873				continue
20874			}
20875			_ = v_0.Args[1]
20876			v_0_0 := v_0.Args[0]
20877			v_0_1 := v_0.Args[1]
20878			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
20879				i := v_0_0
20880				if i.Op != OpConst32 {
20881					continue
20882				}
20883				t := i.Type
20884				z := v_0_1
20885				x := v_1
20886				if !(z.Op != OpConst32 && x.Op != OpConst32) {
20887					continue
20888				}
20889				v.reset(OpOr32)
20890				v0 := b.NewValue0(v.Pos, OpOr32, t)
20891				v0.AddArg2(z, x)
20892				v.AddArg2(i, v0)
20893				return true
20894			}
20895		}
20896		break
20897	}
20898	// match: (Or32 (Const32 <t> [c]) (Or32 (Const32 <t> [d]) x))
20899	// result: (Or32 (Const32 <t> [c|d]) x)
20900	for {
20901		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20902			if v_0.Op != OpConst32 {
20903				continue
20904			}
20905			t := v_0.Type
20906			c := auxIntToInt32(v_0.AuxInt)
20907			if v_1.Op != OpOr32 {
20908				continue
20909			}
20910			_ = v_1.Args[1]
20911			v_1_0 := v_1.Args[0]
20912			v_1_1 := v_1.Args[1]
20913			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
20914				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
20915					continue
20916				}
20917				d := auxIntToInt32(v_1_0.AuxInt)
20918				x := v_1_1
20919				v.reset(OpOr32)
20920				v0 := b.NewValue0(v.Pos, OpConst32, t)
20921				v0.AuxInt = int32ToAuxInt(c | d)
20922				v.AddArg2(v0, x)
20923				return true
20924			}
20925		}
20926		break
20927	}
20928	// match: (Or32 (Lsh32x64 x z:(Const64 <t> [c])) (Rsh32Ux64 x (Const64 [d])))
20929	// cond: c < 32 && d == 32-c && canRotate(config, 32)
20930	// result: (RotateLeft32 x z)
20931	for {
20932		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20933			if v_0.Op != OpLsh32x64 {
20934				continue
20935			}
20936			_ = v_0.Args[1]
20937			x := v_0.Args[0]
20938			z := v_0.Args[1]
20939			if z.Op != OpConst64 {
20940				continue
20941			}
20942			c := auxIntToInt64(z.AuxInt)
20943			if v_1.Op != OpRsh32Ux64 {
20944				continue
20945			}
20946			_ = v_1.Args[1]
20947			if x != v_1.Args[0] {
20948				continue
20949			}
20950			v_1_1 := v_1.Args[1]
20951			if v_1_1.Op != OpConst64 {
20952				continue
20953			}
20954			d := auxIntToInt64(v_1_1.AuxInt)
20955			if !(c < 32 && d == 32-c && canRotate(config, 32)) {
20956				continue
20957			}
20958			v.reset(OpRotateLeft32)
20959			v.AddArg2(x, z)
20960			return true
20961		}
20962		break
20963	}
20964	// match: (Or32 left:(Lsh32x64 x y) right:(Rsh32Ux64 x (Sub64 (Const64 [32]) y)))
20965	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
20966	// result: (RotateLeft32 x y)
20967	for {
20968		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20969			left := v_0
20970			if left.Op != OpLsh32x64 {
20971				continue
20972			}
20973			y := left.Args[1]
20974			x := left.Args[0]
20975			right := v_1
20976			if right.Op != OpRsh32Ux64 {
20977				continue
20978			}
20979			_ = right.Args[1]
20980			if x != right.Args[0] {
20981				continue
20982			}
20983			right_1 := right.Args[1]
20984			if right_1.Op != OpSub64 {
20985				continue
20986			}
20987			_ = right_1.Args[1]
20988			right_1_0 := right_1.Args[0]
20989			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
20990				continue
20991			}
20992			v.reset(OpRotateLeft32)
20993			v.AddArg2(x, y)
20994			return true
20995		}
20996		break
20997	}
20998	// match: (Or32 left:(Lsh32x32 x y) right:(Rsh32Ux32 x (Sub32 (Const32 [32]) y)))
20999	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
21000	// result: (RotateLeft32 x y)
21001	for {
21002		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21003			left := v_0
21004			if left.Op != OpLsh32x32 {
21005				continue
21006			}
21007			y := left.Args[1]
21008			x := left.Args[0]
21009			right := v_1
21010			if right.Op != OpRsh32Ux32 {
21011				continue
21012			}
21013			_ = right.Args[1]
21014			if x != right.Args[0] {
21015				continue
21016			}
21017			right_1 := right.Args[1]
21018			if right_1.Op != OpSub32 {
21019				continue
21020			}
21021			_ = right_1.Args[1]
21022			right_1_0 := right_1.Args[0]
21023			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
21024				continue
21025			}
21026			v.reset(OpRotateLeft32)
21027			v.AddArg2(x, y)
21028			return true
21029		}
21030		break
21031	}
21032	// match: (Or32 left:(Lsh32x16 x y) right:(Rsh32Ux16 x (Sub16 (Const16 [32]) y)))
21033	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
21034	// result: (RotateLeft32 x y)
21035	for {
21036		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21037			left := v_0
21038			if left.Op != OpLsh32x16 {
21039				continue
21040			}
21041			y := left.Args[1]
21042			x := left.Args[0]
21043			right := v_1
21044			if right.Op != OpRsh32Ux16 {
21045				continue
21046			}
21047			_ = right.Args[1]
21048			if x != right.Args[0] {
21049				continue
21050			}
21051			right_1 := right.Args[1]
21052			if right_1.Op != OpSub16 {
21053				continue
21054			}
21055			_ = right_1.Args[1]
21056			right_1_0 := right_1.Args[0]
21057			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
21058				continue
21059			}
21060			v.reset(OpRotateLeft32)
21061			v.AddArg2(x, y)
21062			return true
21063		}
21064		break
21065	}
21066	// match: (Or32 left:(Lsh32x8 x y) right:(Rsh32Ux8 x (Sub8 (Const8 [32]) y)))
21067	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
21068	// result: (RotateLeft32 x y)
21069	for {
21070		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21071			left := v_0
21072			if left.Op != OpLsh32x8 {
21073				continue
21074			}
21075			y := left.Args[1]
21076			x := left.Args[0]
21077			right := v_1
21078			if right.Op != OpRsh32Ux8 {
21079				continue
21080			}
21081			_ = right.Args[1]
21082			if x != right.Args[0] {
21083				continue
21084			}
21085			right_1 := right.Args[1]
21086			if right_1.Op != OpSub8 {
21087				continue
21088			}
21089			_ = right_1.Args[1]
21090			right_1_0 := right_1.Args[0]
21091			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
21092				continue
21093			}
21094			v.reset(OpRotateLeft32)
21095			v.AddArg2(x, y)
21096			return true
21097		}
21098		break
21099	}
21100	// match: (Or32 right:(Rsh32Ux64 x y) left:(Lsh32x64 x z:(Sub64 (Const64 [32]) y)))
21101	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
21102	// result: (RotateLeft32 x z)
21103	for {
21104		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21105			right := v_0
21106			if right.Op != OpRsh32Ux64 {
21107				continue
21108			}
21109			y := right.Args[1]
21110			x := right.Args[0]
21111			left := v_1
21112			if left.Op != OpLsh32x64 {
21113				continue
21114			}
21115			_ = left.Args[1]
21116			if x != left.Args[0] {
21117				continue
21118			}
21119			z := left.Args[1]
21120			if z.Op != OpSub64 {
21121				continue
21122			}
21123			_ = z.Args[1]
21124			z_0 := z.Args[0]
21125			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
21126				continue
21127			}
21128			v.reset(OpRotateLeft32)
21129			v.AddArg2(x, z)
21130			return true
21131		}
21132		break
21133	}
21134	// match: (Or32 right:(Rsh32Ux32 x y) left:(Lsh32x32 x z:(Sub32 (Const32 [32]) y)))
21135	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
21136	// result: (RotateLeft32 x z)
21137	for {
21138		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21139			right := v_0
21140			if right.Op != OpRsh32Ux32 {
21141				continue
21142			}
21143			y := right.Args[1]
21144			x := right.Args[0]
21145			left := v_1
21146			if left.Op != OpLsh32x32 {
21147				continue
21148			}
21149			_ = left.Args[1]
21150			if x != left.Args[0] {
21151				continue
21152			}
21153			z := left.Args[1]
21154			if z.Op != OpSub32 {
21155				continue
21156			}
21157			_ = z.Args[1]
21158			z_0 := z.Args[0]
21159			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
21160				continue
21161			}
21162			v.reset(OpRotateLeft32)
21163			v.AddArg2(x, z)
21164			return true
21165		}
21166		break
21167	}
21168	// match: (Or32 right:(Rsh32Ux16 x y) left:(Lsh32x16 x z:(Sub16 (Const16 [32]) y)))
21169	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
21170	// result: (RotateLeft32 x z)
21171	for {
21172		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21173			right := v_0
21174			if right.Op != OpRsh32Ux16 {
21175				continue
21176			}
21177			y := right.Args[1]
21178			x := right.Args[0]
21179			left := v_1
21180			if left.Op != OpLsh32x16 {
21181				continue
21182			}
21183			_ = left.Args[1]
21184			if x != left.Args[0] {
21185				continue
21186			}
21187			z := left.Args[1]
21188			if z.Op != OpSub16 {
21189				continue
21190			}
21191			_ = z.Args[1]
21192			z_0 := z.Args[0]
21193			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
21194				continue
21195			}
21196			v.reset(OpRotateLeft32)
21197			v.AddArg2(x, z)
21198			return true
21199		}
21200		break
21201	}
21202	// match: (Or32 right:(Rsh32Ux8 x y) left:(Lsh32x8 x z:(Sub8 (Const8 [32]) y)))
21203	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
21204	// result: (RotateLeft32 x z)
21205	for {
21206		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21207			right := v_0
21208			if right.Op != OpRsh32Ux8 {
21209				continue
21210			}
21211			y := right.Args[1]
21212			x := right.Args[0]
21213			left := v_1
21214			if left.Op != OpLsh32x8 {
21215				continue
21216			}
21217			_ = left.Args[1]
21218			if x != left.Args[0] {
21219				continue
21220			}
21221			z := left.Args[1]
21222			if z.Op != OpSub8 {
21223				continue
21224			}
21225			_ = z.Args[1]
21226			z_0 := z.Args[0]
21227			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
21228				continue
21229			}
21230			v.reset(OpRotateLeft32)
21231			v.AddArg2(x, z)
21232			return true
21233		}
21234		break
21235	}
21236	return false
21237}
21238func rewriteValuegeneric_OpOr64(v *Value) bool {
21239	v_1 := v.Args[1]
21240	v_0 := v.Args[0]
21241	b := v.Block
21242	config := b.Func.Config
21243	// match: (Or64 (Const64 [c]) (Const64 [d]))
21244	// result: (Const64 [c|d])
21245	for {
21246		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21247			if v_0.Op != OpConst64 {
21248				continue
21249			}
21250			c := auxIntToInt64(v_0.AuxInt)
21251			if v_1.Op != OpConst64 {
21252				continue
21253			}
21254			d := auxIntToInt64(v_1.AuxInt)
21255			v.reset(OpConst64)
21256			v.AuxInt = int64ToAuxInt(c | d)
21257			return true
21258		}
21259		break
21260	}
21261	// match: (Or64 <t> (Com64 x) (Com64 y))
21262	// result: (Com64 (And64 <t> x y))
21263	for {
21264		t := v.Type
21265		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21266			if v_0.Op != OpCom64 {
21267				continue
21268			}
21269			x := v_0.Args[0]
21270			if v_1.Op != OpCom64 {
21271				continue
21272			}
21273			y := v_1.Args[0]
21274			v.reset(OpCom64)
21275			v0 := b.NewValue0(v.Pos, OpAnd64, t)
21276			v0.AddArg2(x, y)
21277			v.AddArg(v0)
21278			return true
21279		}
21280		break
21281	}
21282	// match: (Or64 x x)
21283	// result: x
21284	for {
21285		x := v_0
21286		if x != v_1 {
21287			break
21288		}
21289		v.copyOf(x)
21290		return true
21291	}
21292	// match: (Or64 (Const64 [0]) x)
21293	// result: x
21294	for {
21295		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21296			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
21297				continue
21298			}
21299			x := v_1
21300			v.copyOf(x)
21301			return true
21302		}
21303		break
21304	}
21305	// match: (Or64 (Const64 [-1]) _)
21306	// result: (Const64 [-1])
21307	for {
21308		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21309			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
21310				continue
21311			}
21312			v.reset(OpConst64)
21313			v.AuxInt = int64ToAuxInt(-1)
21314			return true
21315		}
21316		break
21317	}
21318	// match: (Or64 (Com64 x) x)
21319	// result: (Const64 [-1])
21320	for {
21321		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21322			if v_0.Op != OpCom64 {
21323				continue
21324			}
21325			x := v_0.Args[0]
21326			if x != v_1 {
21327				continue
21328			}
21329			v.reset(OpConst64)
21330			v.AuxInt = int64ToAuxInt(-1)
21331			return true
21332		}
21333		break
21334	}
21335	// match: (Or64 x (Or64 x y))
21336	// result: (Or64 x y)
21337	for {
21338		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21339			x := v_0
21340			if v_1.Op != OpOr64 {
21341				continue
21342			}
21343			_ = v_1.Args[1]
21344			v_1_0 := v_1.Args[0]
21345			v_1_1 := v_1.Args[1]
21346			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
21347				if x != v_1_0 {
21348					continue
21349				}
21350				y := v_1_1
21351				v.reset(OpOr64)
21352				v.AddArg2(x, y)
21353				return true
21354			}
21355		}
21356		break
21357	}
21358	// match: (Or64 (And64 x (Const64 [c2])) (Const64 <t> [c1]))
21359	// cond: ^(c1 | c2) == 0
21360	// result: (Or64 (Const64 <t> [c1]) x)
21361	for {
21362		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21363			if v_0.Op != OpAnd64 {
21364				continue
21365			}
21366			_ = v_0.Args[1]
21367			v_0_0 := v_0.Args[0]
21368			v_0_1 := v_0.Args[1]
21369			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
21370				x := v_0_0
21371				if v_0_1.Op != OpConst64 {
21372					continue
21373				}
21374				c2 := auxIntToInt64(v_0_1.AuxInt)
21375				if v_1.Op != OpConst64 {
21376					continue
21377				}
21378				t := v_1.Type
21379				c1 := auxIntToInt64(v_1.AuxInt)
21380				if !(^(c1 | c2) == 0) {
21381					continue
21382				}
21383				v.reset(OpOr64)
21384				v0 := b.NewValue0(v.Pos, OpConst64, t)
21385				v0.AuxInt = int64ToAuxInt(c1)
21386				v.AddArg2(v0, x)
21387				return true
21388			}
21389		}
21390		break
21391	}
21392	// match: (Or64 (Or64 i:(Const64 <t>) z) x)
21393	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
21394	// result: (Or64 i (Or64 <t> z x))
21395	for {
21396		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21397			if v_0.Op != OpOr64 {
21398				continue
21399			}
21400			_ = v_0.Args[1]
21401			v_0_0 := v_0.Args[0]
21402			v_0_1 := v_0.Args[1]
21403			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
21404				i := v_0_0
21405				if i.Op != OpConst64 {
21406					continue
21407				}
21408				t := i.Type
21409				z := v_0_1
21410				x := v_1
21411				if !(z.Op != OpConst64 && x.Op != OpConst64) {
21412					continue
21413				}
21414				v.reset(OpOr64)
21415				v0 := b.NewValue0(v.Pos, OpOr64, t)
21416				v0.AddArg2(z, x)
21417				v.AddArg2(i, v0)
21418				return true
21419			}
21420		}
21421		break
21422	}
21423	// match: (Or64 (Const64 <t> [c]) (Or64 (Const64 <t> [d]) x))
21424	// result: (Or64 (Const64 <t> [c|d]) x)
21425	for {
21426		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21427			if v_0.Op != OpConst64 {
21428				continue
21429			}
21430			t := v_0.Type
21431			c := auxIntToInt64(v_0.AuxInt)
21432			if v_1.Op != OpOr64 {
21433				continue
21434			}
21435			_ = v_1.Args[1]
21436			v_1_0 := v_1.Args[0]
21437			v_1_1 := v_1.Args[1]
21438			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
21439				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
21440					continue
21441				}
21442				d := auxIntToInt64(v_1_0.AuxInt)
21443				x := v_1_1
21444				v.reset(OpOr64)
21445				v0 := b.NewValue0(v.Pos, OpConst64, t)
21446				v0.AuxInt = int64ToAuxInt(c | d)
21447				v.AddArg2(v0, x)
21448				return true
21449			}
21450		}
21451		break
21452	}
21453	// match: (Or64 (Lsh64x64 x z:(Const64 <t> [c])) (Rsh64Ux64 x (Const64 [d])))
21454	// cond: c < 64 && d == 64-c && canRotate(config, 64)
21455	// result: (RotateLeft64 x z)
21456	for {
21457		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21458			if v_0.Op != OpLsh64x64 {
21459				continue
21460			}
21461			_ = v_0.Args[1]
21462			x := v_0.Args[0]
21463			z := v_0.Args[1]
21464			if z.Op != OpConst64 {
21465				continue
21466			}
21467			c := auxIntToInt64(z.AuxInt)
21468			if v_1.Op != OpRsh64Ux64 {
21469				continue
21470			}
21471			_ = v_1.Args[1]
21472			if x != v_1.Args[0] {
21473				continue
21474			}
21475			v_1_1 := v_1.Args[1]
21476			if v_1_1.Op != OpConst64 {
21477				continue
21478			}
21479			d := auxIntToInt64(v_1_1.AuxInt)
21480			if !(c < 64 && d == 64-c && canRotate(config, 64)) {
21481				continue
21482			}
21483			v.reset(OpRotateLeft64)
21484			v.AddArg2(x, z)
21485			return true
21486		}
21487		break
21488	}
21489	// match: (Or64 left:(Lsh64x64 x y) right:(Rsh64Ux64 x (Sub64 (Const64 [64]) y)))
21490	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
21491	// result: (RotateLeft64 x y)
21492	for {
21493		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21494			left := v_0
21495			if left.Op != OpLsh64x64 {
21496				continue
21497			}
21498			y := left.Args[1]
21499			x := left.Args[0]
21500			right := v_1
21501			if right.Op != OpRsh64Ux64 {
21502				continue
21503			}
21504			_ = right.Args[1]
21505			if x != right.Args[0] {
21506				continue
21507			}
21508			right_1 := right.Args[1]
21509			if right_1.Op != OpSub64 {
21510				continue
21511			}
21512			_ = right_1.Args[1]
21513			right_1_0 := right_1.Args[0]
21514			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
21515				continue
21516			}
21517			v.reset(OpRotateLeft64)
21518			v.AddArg2(x, y)
21519			return true
21520		}
21521		break
21522	}
21523	// match: (Or64 left:(Lsh64x32 x y) right:(Rsh64Ux32 x (Sub32 (Const32 [64]) y)))
21524	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
21525	// result: (RotateLeft64 x y)
21526	for {
21527		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21528			left := v_0
21529			if left.Op != OpLsh64x32 {
21530				continue
21531			}
21532			y := left.Args[1]
21533			x := left.Args[0]
21534			right := v_1
21535			if right.Op != OpRsh64Ux32 {
21536				continue
21537			}
21538			_ = right.Args[1]
21539			if x != right.Args[0] {
21540				continue
21541			}
21542			right_1 := right.Args[1]
21543			if right_1.Op != OpSub32 {
21544				continue
21545			}
21546			_ = right_1.Args[1]
21547			right_1_0 := right_1.Args[0]
21548			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
21549				continue
21550			}
21551			v.reset(OpRotateLeft64)
21552			v.AddArg2(x, y)
21553			return true
21554		}
21555		break
21556	}
21557	// match: (Or64 left:(Lsh64x16 x y) right:(Rsh64Ux16 x (Sub16 (Const16 [64]) y)))
21558	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
21559	// result: (RotateLeft64 x y)
21560	for {
21561		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21562			left := v_0
21563			if left.Op != OpLsh64x16 {
21564				continue
21565			}
21566			y := left.Args[1]
21567			x := left.Args[0]
21568			right := v_1
21569			if right.Op != OpRsh64Ux16 {
21570				continue
21571			}
21572			_ = right.Args[1]
21573			if x != right.Args[0] {
21574				continue
21575			}
21576			right_1 := right.Args[1]
21577			if right_1.Op != OpSub16 {
21578				continue
21579			}
21580			_ = right_1.Args[1]
21581			right_1_0 := right_1.Args[0]
21582			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
21583				continue
21584			}
21585			v.reset(OpRotateLeft64)
21586			v.AddArg2(x, y)
21587			return true
21588		}
21589		break
21590	}
21591	// match: (Or64 left:(Lsh64x8 x y) right:(Rsh64Ux8 x (Sub8 (Const8 [64]) y)))
21592	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
21593	// result: (RotateLeft64 x y)
21594	for {
21595		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21596			left := v_0
21597			if left.Op != OpLsh64x8 {
21598				continue
21599			}
21600			y := left.Args[1]
21601			x := left.Args[0]
21602			right := v_1
21603			if right.Op != OpRsh64Ux8 {
21604				continue
21605			}
21606			_ = right.Args[1]
21607			if x != right.Args[0] {
21608				continue
21609			}
21610			right_1 := right.Args[1]
21611			if right_1.Op != OpSub8 {
21612				continue
21613			}
21614			_ = right_1.Args[1]
21615			right_1_0 := right_1.Args[0]
21616			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
21617				continue
21618			}
21619			v.reset(OpRotateLeft64)
21620			v.AddArg2(x, y)
21621			return true
21622		}
21623		break
21624	}
21625	// match: (Or64 right:(Rsh64Ux64 x y) left:(Lsh64x64 x z:(Sub64 (Const64 [64]) y)))
21626	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
21627	// result: (RotateLeft64 x z)
21628	for {
21629		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21630			right := v_0
21631			if right.Op != OpRsh64Ux64 {
21632				continue
21633			}
21634			y := right.Args[1]
21635			x := right.Args[0]
21636			left := v_1
21637			if left.Op != OpLsh64x64 {
21638				continue
21639			}
21640			_ = left.Args[1]
21641			if x != left.Args[0] {
21642				continue
21643			}
21644			z := left.Args[1]
21645			if z.Op != OpSub64 {
21646				continue
21647			}
21648			_ = z.Args[1]
21649			z_0 := z.Args[0]
21650			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
21651				continue
21652			}
21653			v.reset(OpRotateLeft64)
21654			v.AddArg2(x, z)
21655			return true
21656		}
21657		break
21658	}
21659	// match: (Or64 right:(Rsh64Ux32 x y) left:(Lsh64x32 x z:(Sub32 (Const32 [64]) y)))
21660	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
21661	// result: (RotateLeft64 x z)
21662	for {
21663		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21664			right := v_0
21665			if right.Op != OpRsh64Ux32 {
21666				continue
21667			}
21668			y := right.Args[1]
21669			x := right.Args[0]
21670			left := v_1
21671			if left.Op != OpLsh64x32 {
21672				continue
21673			}
21674			_ = left.Args[1]
21675			if x != left.Args[0] {
21676				continue
21677			}
21678			z := left.Args[1]
21679			if z.Op != OpSub32 {
21680				continue
21681			}
21682			_ = z.Args[1]
21683			z_0 := z.Args[0]
21684			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
21685				continue
21686			}
21687			v.reset(OpRotateLeft64)
21688			v.AddArg2(x, z)
21689			return true
21690		}
21691		break
21692	}
21693	// match: (Or64 right:(Rsh64Ux16 x y) left:(Lsh64x16 x z:(Sub16 (Const16 [64]) y)))
21694	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
21695	// result: (RotateLeft64 x z)
21696	for {
21697		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21698			right := v_0
21699			if right.Op != OpRsh64Ux16 {
21700				continue
21701			}
21702			y := right.Args[1]
21703			x := right.Args[0]
21704			left := v_1
21705			if left.Op != OpLsh64x16 {
21706				continue
21707			}
21708			_ = left.Args[1]
21709			if x != left.Args[0] {
21710				continue
21711			}
21712			z := left.Args[1]
21713			if z.Op != OpSub16 {
21714				continue
21715			}
21716			_ = z.Args[1]
21717			z_0 := z.Args[0]
21718			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
21719				continue
21720			}
21721			v.reset(OpRotateLeft64)
21722			v.AddArg2(x, z)
21723			return true
21724		}
21725		break
21726	}
21727	// match: (Or64 right:(Rsh64Ux8 x y) left:(Lsh64x8 x z:(Sub8 (Const8 [64]) y)))
21728	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
21729	// result: (RotateLeft64 x z)
21730	for {
21731		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21732			right := v_0
21733			if right.Op != OpRsh64Ux8 {
21734				continue
21735			}
21736			y := right.Args[1]
21737			x := right.Args[0]
21738			left := v_1
21739			if left.Op != OpLsh64x8 {
21740				continue
21741			}
21742			_ = left.Args[1]
21743			if x != left.Args[0] {
21744				continue
21745			}
21746			z := left.Args[1]
21747			if z.Op != OpSub8 {
21748				continue
21749			}
21750			_ = z.Args[1]
21751			z_0 := z.Args[0]
21752			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
21753				continue
21754			}
21755			v.reset(OpRotateLeft64)
21756			v.AddArg2(x, z)
21757			return true
21758		}
21759		break
21760	}
21761	return false
21762}
21763func rewriteValuegeneric_OpOr8(v *Value) bool {
21764	v_1 := v.Args[1]
21765	v_0 := v.Args[0]
21766	b := v.Block
21767	config := b.Func.Config
21768	// match: (Or8 (Const8 [c]) (Const8 [d]))
21769	// result: (Const8 [c|d])
21770	for {
21771		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21772			if v_0.Op != OpConst8 {
21773				continue
21774			}
21775			c := auxIntToInt8(v_0.AuxInt)
21776			if v_1.Op != OpConst8 {
21777				continue
21778			}
21779			d := auxIntToInt8(v_1.AuxInt)
21780			v.reset(OpConst8)
21781			v.AuxInt = int8ToAuxInt(c | d)
21782			return true
21783		}
21784		break
21785	}
21786	// match: (Or8 <t> (Com8 x) (Com8 y))
21787	// result: (Com8 (And8 <t> x y))
21788	for {
21789		t := v.Type
21790		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21791			if v_0.Op != OpCom8 {
21792				continue
21793			}
21794			x := v_0.Args[0]
21795			if v_1.Op != OpCom8 {
21796				continue
21797			}
21798			y := v_1.Args[0]
21799			v.reset(OpCom8)
21800			v0 := b.NewValue0(v.Pos, OpAnd8, t)
21801			v0.AddArg2(x, y)
21802			v.AddArg(v0)
21803			return true
21804		}
21805		break
21806	}
21807	// match: (Or8 x x)
21808	// result: x
21809	for {
21810		x := v_0
21811		if x != v_1 {
21812			break
21813		}
21814		v.copyOf(x)
21815		return true
21816	}
21817	// match: (Or8 (Const8 [0]) x)
21818	// result: x
21819	for {
21820		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21821			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
21822				continue
21823			}
21824			x := v_1
21825			v.copyOf(x)
21826			return true
21827		}
21828		break
21829	}
21830	// match: (Or8 (Const8 [-1]) _)
21831	// result: (Const8 [-1])
21832	for {
21833		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21834			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
21835				continue
21836			}
21837			v.reset(OpConst8)
21838			v.AuxInt = int8ToAuxInt(-1)
21839			return true
21840		}
21841		break
21842	}
21843	// match: (Or8 (Com8 x) x)
21844	// result: (Const8 [-1])
21845	for {
21846		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21847			if v_0.Op != OpCom8 {
21848				continue
21849			}
21850			x := v_0.Args[0]
21851			if x != v_1 {
21852				continue
21853			}
21854			v.reset(OpConst8)
21855			v.AuxInt = int8ToAuxInt(-1)
21856			return true
21857		}
21858		break
21859	}
21860	// match: (Or8 x (Or8 x y))
21861	// result: (Or8 x y)
21862	for {
21863		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21864			x := v_0
21865			if v_1.Op != OpOr8 {
21866				continue
21867			}
21868			_ = v_1.Args[1]
21869			v_1_0 := v_1.Args[0]
21870			v_1_1 := v_1.Args[1]
21871			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
21872				if x != v_1_0 {
21873					continue
21874				}
21875				y := v_1_1
21876				v.reset(OpOr8)
21877				v.AddArg2(x, y)
21878				return true
21879			}
21880		}
21881		break
21882	}
21883	// match: (Or8 (And8 x (Const8 [c2])) (Const8 <t> [c1]))
21884	// cond: ^(c1 | c2) == 0
21885	// result: (Or8 (Const8 <t> [c1]) x)
21886	for {
21887		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21888			if v_0.Op != OpAnd8 {
21889				continue
21890			}
21891			_ = v_0.Args[1]
21892			v_0_0 := v_0.Args[0]
21893			v_0_1 := v_0.Args[1]
21894			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
21895				x := v_0_0
21896				if v_0_1.Op != OpConst8 {
21897					continue
21898				}
21899				c2 := auxIntToInt8(v_0_1.AuxInt)
21900				if v_1.Op != OpConst8 {
21901					continue
21902				}
21903				t := v_1.Type
21904				c1 := auxIntToInt8(v_1.AuxInt)
21905				if !(^(c1 | c2) == 0) {
21906					continue
21907				}
21908				v.reset(OpOr8)
21909				v0 := b.NewValue0(v.Pos, OpConst8, t)
21910				v0.AuxInt = int8ToAuxInt(c1)
21911				v.AddArg2(v0, x)
21912				return true
21913			}
21914		}
21915		break
21916	}
21917	// match: (Or8 (Or8 i:(Const8 <t>) z) x)
21918	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
21919	// result: (Or8 i (Or8 <t> z x))
21920	for {
21921		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21922			if v_0.Op != OpOr8 {
21923				continue
21924			}
21925			_ = v_0.Args[1]
21926			v_0_0 := v_0.Args[0]
21927			v_0_1 := v_0.Args[1]
21928			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
21929				i := v_0_0
21930				if i.Op != OpConst8 {
21931					continue
21932				}
21933				t := i.Type
21934				z := v_0_1
21935				x := v_1
21936				if !(z.Op != OpConst8 && x.Op != OpConst8) {
21937					continue
21938				}
21939				v.reset(OpOr8)
21940				v0 := b.NewValue0(v.Pos, OpOr8, t)
21941				v0.AddArg2(z, x)
21942				v.AddArg2(i, v0)
21943				return true
21944			}
21945		}
21946		break
21947	}
21948	// match: (Or8 (Const8 <t> [c]) (Or8 (Const8 <t> [d]) x))
21949	// result: (Or8 (Const8 <t> [c|d]) x)
21950	for {
21951		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21952			if v_0.Op != OpConst8 {
21953				continue
21954			}
21955			t := v_0.Type
21956			c := auxIntToInt8(v_0.AuxInt)
21957			if v_1.Op != OpOr8 {
21958				continue
21959			}
21960			_ = v_1.Args[1]
21961			v_1_0 := v_1.Args[0]
21962			v_1_1 := v_1.Args[1]
21963			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
21964				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
21965					continue
21966				}
21967				d := auxIntToInt8(v_1_0.AuxInt)
21968				x := v_1_1
21969				v.reset(OpOr8)
21970				v0 := b.NewValue0(v.Pos, OpConst8, t)
21971				v0.AuxInt = int8ToAuxInt(c | d)
21972				v.AddArg2(v0, x)
21973				return true
21974			}
21975		}
21976		break
21977	}
21978	// match: (Or8 (Lsh8x64 x z:(Const64 <t> [c])) (Rsh8Ux64 x (Const64 [d])))
21979	// cond: c < 8 && d == 8-c && canRotate(config, 8)
21980	// result: (RotateLeft8 x z)
21981	for {
21982		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21983			if v_0.Op != OpLsh8x64 {
21984				continue
21985			}
21986			_ = v_0.Args[1]
21987			x := v_0.Args[0]
21988			z := v_0.Args[1]
21989			if z.Op != OpConst64 {
21990				continue
21991			}
21992			c := auxIntToInt64(z.AuxInt)
21993			if v_1.Op != OpRsh8Ux64 {
21994				continue
21995			}
21996			_ = v_1.Args[1]
21997			if x != v_1.Args[0] {
21998				continue
21999			}
22000			v_1_1 := v_1.Args[1]
22001			if v_1_1.Op != OpConst64 {
22002				continue
22003			}
22004			d := auxIntToInt64(v_1_1.AuxInt)
22005			if !(c < 8 && d == 8-c && canRotate(config, 8)) {
22006				continue
22007			}
22008			v.reset(OpRotateLeft8)
22009			v.AddArg2(x, z)
22010			return true
22011		}
22012		break
22013	}
22014	// match: (Or8 left:(Lsh8x64 x y) right:(Rsh8Ux64 x (Sub64 (Const64 [8]) y)))
22015	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
22016	// result: (RotateLeft8 x y)
22017	for {
22018		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22019			left := v_0
22020			if left.Op != OpLsh8x64 {
22021				continue
22022			}
22023			y := left.Args[1]
22024			x := left.Args[0]
22025			right := v_1
22026			if right.Op != OpRsh8Ux64 {
22027				continue
22028			}
22029			_ = right.Args[1]
22030			if x != right.Args[0] {
22031				continue
22032			}
22033			right_1 := right.Args[1]
22034			if right_1.Op != OpSub64 {
22035				continue
22036			}
22037			_ = right_1.Args[1]
22038			right_1_0 := right_1.Args[0]
22039			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
22040				continue
22041			}
22042			v.reset(OpRotateLeft8)
22043			v.AddArg2(x, y)
22044			return true
22045		}
22046		break
22047	}
22048	// match: (Or8 left:(Lsh8x32 x y) right:(Rsh8Ux32 x (Sub32 (Const32 [8]) y)))
22049	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
22050	// result: (RotateLeft8 x y)
22051	for {
22052		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22053			left := v_0
22054			if left.Op != OpLsh8x32 {
22055				continue
22056			}
22057			y := left.Args[1]
22058			x := left.Args[0]
22059			right := v_1
22060			if right.Op != OpRsh8Ux32 {
22061				continue
22062			}
22063			_ = right.Args[1]
22064			if x != right.Args[0] {
22065				continue
22066			}
22067			right_1 := right.Args[1]
22068			if right_1.Op != OpSub32 {
22069				continue
22070			}
22071			_ = right_1.Args[1]
22072			right_1_0 := right_1.Args[0]
22073			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
22074				continue
22075			}
22076			v.reset(OpRotateLeft8)
22077			v.AddArg2(x, y)
22078			return true
22079		}
22080		break
22081	}
22082	// match: (Or8 left:(Lsh8x16 x y) right:(Rsh8Ux16 x (Sub16 (Const16 [8]) y)))
22083	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
22084	// result: (RotateLeft8 x y)
22085	for {
22086		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22087			left := v_0
22088			if left.Op != OpLsh8x16 {
22089				continue
22090			}
22091			y := left.Args[1]
22092			x := left.Args[0]
22093			right := v_1
22094			if right.Op != OpRsh8Ux16 {
22095				continue
22096			}
22097			_ = right.Args[1]
22098			if x != right.Args[0] {
22099				continue
22100			}
22101			right_1 := right.Args[1]
22102			if right_1.Op != OpSub16 {
22103				continue
22104			}
22105			_ = right_1.Args[1]
22106			right_1_0 := right_1.Args[0]
22107			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
22108				continue
22109			}
22110			v.reset(OpRotateLeft8)
22111			v.AddArg2(x, y)
22112			return true
22113		}
22114		break
22115	}
22116	// match: (Or8 left:(Lsh8x8 x y) right:(Rsh8Ux8 x (Sub8 (Const8 [8]) y)))
22117	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
22118	// result: (RotateLeft8 x y)
22119	for {
22120		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22121			left := v_0
22122			if left.Op != OpLsh8x8 {
22123				continue
22124			}
22125			y := left.Args[1]
22126			x := left.Args[0]
22127			right := v_1
22128			if right.Op != OpRsh8Ux8 {
22129				continue
22130			}
22131			_ = right.Args[1]
22132			if x != right.Args[0] {
22133				continue
22134			}
22135			right_1 := right.Args[1]
22136			if right_1.Op != OpSub8 {
22137				continue
22138			}
22139			_ = right_1.Args[1]
22140			right_1_0 := right_1.Args[0]
22141			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
22142				continue
22143			}
22144			v.reset(OpRotateLeft8)
22145			v.AddArg2(x, y)
22146			return true
22147		}
22148		break
22149	}
22150	// match: (Or8 right:(Rsh8Ux64 x y) left:(Lsh8x64 x z:(Sub64 (Const64 [8]) y)))
22151	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
22152	// result: (RotateLeft8 x z)
22153	for {
22154		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22155			right := v_0
22156			if right.Op != OpRsh8Ux64 {
22157				continue
22158			}
22159			y := right.Args[1]
22160			x := right.Args[0]
22161			left := v_1
22162			if left.Op != OpLsh8x64 {
22163				continue
22164			}
22165			_ = left.Args[1]
22166			if x != left.Args[0] {
22167				continue
22168			}
22169			z := left.Args[1]
22170			if z.Op != OpSub64 {
22171				continue
22172			}
22173			_ = z.Args[1]
22174			z_0 := z.Args[0]
22175			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
22176				continue
22177			}
22178			v.reset(OpRotateLeft8)
22179			v.AddArg2(x, z)
22180			return true
22181		}
22182		break
22183	}
22184	// match: (Or8 right:(Rsh8Ux32 x y) left:(Lsh8x32 x z:(Sub32 (Const32 [8]) y)))
22185	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
22186	// result: (RotateLeft8 x z)
22187	for {
22188		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22189			right := v_0
22190			if right.Op != OpRsh8Ux32 {
22191				continue
22192			}
22193			y := right.Args[1]
22194			x := right.Args[0]
22195			left := v_1
22196			if left.Op != OpLsh8x32 {
22197				continue
22198			}
22199			_ = left.Args[1]
22200			if x != left.Args[0] {
22201				continue
22202			}
22203			z := left.Args[1]
22204			if z.Op != OpSub32 {
22205				continue
22206			}
22207			_ = z.Args[1]
22208			z_0 := z.Args[0]
22209			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
22210				continue
22211			}
22212			v.reset(OpRotateLeft8)
22213			v.AddArg2(x, z)
22214			return true
22215		}
22216		break
22217	}
22218	// match: (Or8 right:(Rsh8Ux16 x y) left:(Lsh8x16 x z:(Sub16 (Const16 [8]) y)))
22219	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
22220	// result: (RotateLeft8 x z)
22221	for {
22222		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22223			right := v_0
22224			if right.Op != OpRsh8Ux16 {
22225				continue
22226			}
22227			y := right.Args[1]
22228			x := right.Args[0]
22229			left := v_1
22230			if left.Op != OpLsh8x16 {
22231				continue
22232			}
22233			_ = left.Args[1]
22234			if x != left.Args[0] {
22235				continue
22236			}
22237			z := left.Args[1]
22238			if z.Op != OpSub16 {
22239				continue
22240			}
22241			_ = z.Args[1]
22242			z_0 := z.Args[0]
22243			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
22244				continue
22245			}
22246			v.reset(OpRotateLeft8)
22247			v.AddArg2(x, z)
22248			return true
22249		}
22250		break
22251	}
22252	// match: (Or8 right:(Rsh8Ux8 x y) left:(Lsh8x8 x z:(Sub8 (Const8 [8]) y)))
22253	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
22254	// result: (RotateLeft8 x z)
22255	for {
22256		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22257			right := v_0
22258			if right.Op != OpRsh8Ux8 {
22259				continue
22260			}
22261			y := right.Args[1]
22262			x := right.Args[0]
22263			left := v_1
22264			if left.Op != OpLsh8x8 {
22265				continue
22266			}
22267			_ = left.Args[1]
22268			if x != left.Args[0] {
22269				continue
22270			}
22271			z := left.Args[1]
22272			if z.Op != OpSub8 {
22273				continue
22274			}
22275			_ = z.Args[1]
22276			z_0 := z.Args[0]
22277			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
22278				continue
22279			}
22280			v.reset(OpRotateLeft8)
22281			v.AddArg2(x, z)
22282			return true
22283		}
22284		break
22285	}
22286	return false
22287}
22288func rewriteValuegeneric_OpOrB(v *Value) bool {
22289	v_1 := v.Args[1]
22290	v_0 := v.Args[0]
22291	b := v.Block
22292	// match: (OrB (Less64 (Const64 [c]) x) (Less64 x (Const64 [d])))
22293	// cond: c >= d
22294	// result: (Less64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
22295	for {
22296		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22297			if v_0.Op != OpLess64 {
22298				continue
22299			}
22300			x := v_0.Args[1]
22301			v_0_0 := v_0.Args[0]
22302			if v_0_0.Op != OpConst64 {
22303				continue
22304			}
22305			c := auxIntToInt64(v_0_0.AuxInt)
22306			if v_1.Op != OpLess64 {
22307				continue
22308			}
22309			_ = v_1.Args[1]
22310			if x != v_1.Args[0] {
22311				continue
22312			}
22313			v_1_1 := v_1.Args[1]
22314			if v_1_1.Op != OpConst64 {
22315				continue
22316			}
22317			d := auxIntToInt64(v_1_1.AuxInt)
22318			if !(c >= d) {
22319				continue
22320			}
22321			v.reset(OpLess64U)
22322			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
22323			v0.AuxInt = int64ToAuxInt(c - d)
22324			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
22325			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
22326			v2.AuxInt = int64ToAuxInt(d)
22327			v1.AddArg2(x, v2)
22328			v.AddArg2(v0, v1)
22329			return true
22330		}
22331		break
22332	}
22333	// match: (OrB (Leq64 (Const64 [c]) x) (Less64 x (Const64 [d])))
22334	// cond: c >= d
22335	// result: (Leq64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
22336	for {
22337		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22338			if v_0.Op != OpLeq64 {
22339				continue
22340			}
22341			x := v_0.Args[1]
22342			v_0_0 := v_0.Args[0]
22343			if v_0_0.Op != OpConst64 {
22344				continue
22345			}
22346			c := auxIntToInt64(v_0_0.AuxInt)
22347			if v_1.Op != OpLess64 {
22348				continue
22349			}
22350			_ = v_1.Args[1]
22351			if x != v_1.Args[0] {
22352				continue
22353			}
22354			v_1_1 := v_1.Args[1]
22355			if v_1_1.Op != OpConst64 {
22356				continue
22357			}
22358			d := auxIntToInt64(v_1_1.AuxInt)
22359			if !(c >= d) {
22360				continue
22361			}
22362			v.reset(OpLeq64U)
22363			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
22364			v0.AuxInt = int64ToAuxInt(c - d)
22365			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
22366			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
22367			v2.AuxInt = int64ToAuxInt(d)
22368			v1.AddArg2(x, v2)
22369			v.AddArg2(v0, v1)
22370			return true
22371		}
22372		break
22373	}
22374	// match: (OrB (Less32 (Const32 [c]) x) (Less32 x (Const32 [d])))
22375	// cond: c >= d
22376	// result: (Less32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
22377	for {
22378		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22379			if v_0.Op != OpLess32 {
22380				continue
22381			}
22382			x := v_0.Args[1]
22383			v_0_0 := v_0.Args[0]
22384			if v_0_0.Op != OpConst32 {
22385				continue
22386			}
22387			c := auxIntToInt32(v_0_0.AuxInt)
22388			if v_1.Op != OpLess32 {
22389				continue
22390			}
22391			_ = v_1.Args[1]
22392			if x != v_1.Args[0] {
22393				continue
22394			}
22395			v_1_1 := v_1.Args[1]
22396			if v_1_1.Op != OpConst32 {
22397				continue
22398			}
22399			d := auxIntToInt32(v_1_1.AuxInt)
22400			if !(c >= d) {
22401				continue
22402			}
22403			v.reset(OpLess32U)
22404			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
22405			v0.AuxInt = int32ToAuxInt(c - d)
22406			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
22407			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
22408			v2.AuxInt = int32ToAuxInt(d)
22409			v1.AddArg2(x, v2)
22410			v.AddArg2(v0, v1)
22411			return true
22412		}
22413		break
22414	}
22415	// match: (OrB (Leq32 (Const32 [c]) x) (Less32 x (Const32 [d])))
22416	// cond: c >= d
22417	// result: (Leq32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
22418	for {
22419		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22420			if v_0.Op != OpLeq32 {
22421				continue
22422			}
22423			x := v_0.Args[1]
22424			v_0_0 := v_0.Args[0]
22425			if v_0_0.Op != OpConst32 {
22426				continue
22427			}
22428			c := auxIntToInt32(v_0_0.AuxInt)
22429			if v_1.Op != OpLess32 {
22430				continue
22431			}
22432			_ = v_1.Args[1]
22433			if x != v_1.Args[0] {
22434				continue
22435			}
22436			v_1_1 := v_1.Args[1]
22437			if v_1_1.Op != OpConst32 {
22438				continue
22439			}
22440			d := auxIntToInt32(v_1_1.AuxInt)
22441			if !(c >= d) {
22442				continue
22443			}
22444			v.reset(OpLeq32U)
22445			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
22446			v0.AuxInt = int32ToAuxInt(c - d)
22447			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
22448			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
22449			v2.AuxInt = int32ToAuxInt(d)
22450			v1.AddArg2(x, v2)
22451			v.AddArg2(v0, v1)
22452			return true
22453		}
22454		break
22455	}
22456	// match: (OrB (Less16 (Const16 [c]) x) (Less16 x (Const16 [d])))
22457	// cond: c >= d
22458	// result: (Less16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
22459	for {
22460		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22461			if v_0.Op != OpLess16 {
22462				continue
22463			}
22464			x := v_0.Args[1]
22465			v_0_0 := v_0.Args[0]
22466			if v_0_0.Op != OpConst16 {
22467				continue
22468			}
22469			c := auxIntToInt16(v_0_0.AuxInt)
22470			if v_1.Op != OpLess16 {
22471				continue
22472			}
22473			_ = v_1.Args[1]
22474			if x != v_1.Args[0] {
22475				continue
22476			}
22477			v_1_1 := v_1.Args[1]
22478			if v_1_1.Op != OpConst16 {
22479				continue
22480			}
22481			d := auxIntToInt16(v_1_1.AuxInt)
22482			if !(c >= d) {
22483				continue
22484			}
22485			v.reset(OpLess16U)
22486			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
22487			v0.AuxInt = int16ToAuxInt(c - d)
22488			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
22489			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
22490			v2.AuxInt = int16ToAuxInt(d)
22491			v1.AddArg2(x, v2)
22492			v.AddArg2(v0, v1)
22493			return true
22494		}
22495		break
22496	}
22497	// match: (OrB (Leq16 (Const16 [c]) x) (Less16 x (Const16 [d])))
22498	// cond: c >= d
22499	// result: (Leq16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
22500	for {
22501		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22502			if v_0.Op != OpLeq16 {
22503				continue
22504			}
22505			x := v_0.Args[1]
22506			v_0_0 := v_0.Args[0]
22507			if v_0_0.Op != OpConst16 {
22508				continue
22509			}
22510			c := auxIntToInt16(v_0_0.AuxInt)
22511			if v_1.Op != OpLess16 {
22512				continue
22513			}
22514			_ = v_1.Args[1]
22515			if x != v_1.Args[0] {
22516				continue
22517			}
22518			v_1_1 := v_1.Args[1]
22519			if v_1_1.Op != OpConst16 {
22520				continue
22521			}
22522			d := auxIntToInt16(v_1_1.AuxInt)
22523			if !(c >= d) {
22524				continue
22525			}
22526			v.reset(OpLeq16U)
22527			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
22528			v0.AuxInt = int16ToAuxInt(c - d)
22529			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
22530			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
22531			v2.AuxInt = int16ToAuxInt(d)
22532			v1.AddArg2(x, v2)
22533			v.AddArg2(v0, v1)
22534			return true
22535		}
22536		break
22537	}
22538	// match: (OrB (Less8 (Const8 [c]) x) (Less8 x (Const8 [d])))
22539	// cond: c >= d
22540	// result: (Less8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
22541	for {
22542		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22543			if v_0.Op != OpLess8 {
22544				continue
22545			}
22546			x := v_0.Args[1]
22547			v_0_0 := v_0.Args[0]
22548			if v_0_0.Op != OpConst8 {
22549				continue
22550			}
22551			c := auxIntToInt8(v_0_0.AuxInt)
22552			if v_1.Op != OpLess8 {
22553				continue
22554			}
22555			_ = v_1.Args[1]
22556			if x != v_1.Args[0] {
22557				continue
22558			}
22559			v_1_1 := v_1.Args[1]
22560			if v_1_1.Op != OpConst8 {
22561				continue
22562			}
22563			d := auxIntToInt8(v_1_1.AuxInt)
22564			if !(c >= d) {
22565				continue
22566			}
22567			v.reset(OpLess8U)
22568			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
22569			v0.AuxInt = int8ToAuxInt(c - d)
22570			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
22571			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
22572			v2.AuxInt = int8ToAuxInt(d)
22573			v1.AddArg2(x, v2)
22574			v.AddArg2(v0, v1)
22575			return true
22576		}
22577		break
22578	}
22579	// match: (OrB (Leq8 (Const8 [c]) x) (Less8 x (Const8 [d])))
22580	// cond: c >= d
22581	// result: (Leq8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
22582	for {
22583		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22584			if v_0.Op != OpLeq8 {
22585				continue
22586			}
22587			x := v_0.Args[1]
22588			v_0_0 := v_0.Args[0]
22589			if v_0_0.Op != OpConst8 {
22590				continue
22591			}
22592			c := auxIntToInt8(v_0_0.AuxInt)
22593			if v_1.Op != OpLess8 {
22594				continue
22595			}
22596			_ = v_1.Args[1]
22597			if x != v_1.Args[0] {
22598				continue
22599			}
22600			v_1_1 := v_1.Args[1]
22601			if v_1_1.Op != OpConst8 {
22602				continue
22603			}
22604			d := auxIntToInt8(v_1_1.AuxInt)
22605			if !(c >= d) {
22606				continue
22607			}
22608			v.reset(OpLeq8U)
22609			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
22610			v0.AuxInt = int8ToAuxInt(c - d)
22611			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
22612			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
22613			v2.AuxInt = int8ToAuxInt(d)
22614			v1.AddArg2(x, v2)
22615			v.AddArg2(v0, v1)
22616			return true
22617		}
22618		break
22619	}
22620	// match: (OrB (Less64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
22621	// cond: c >= d+1 && d+1 > d
22622	// result: (Less64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
22623	for {
22624		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22625			if v_0.Op != OpLess64 {
22626				continue
22627			}
22628			x := v_0.Args[1]
22629			v_0_0 := v_0.Args[0]
22630			if v_0_0.Op != OpConst64 {
22631				continue
22632			}
22633			c := auxIntToInt64(v_0_0.AuxInt)
22634			if v_1.Op != OpLeq64 {
22635				continue
22636			}
22637			_ = v_1.Args[1]
22638			if x != v_1.Args[0] {
22639				continue
22640			}
22641			v_1_1 := v_1.Args[1]
22642			if v_1_1.Op != OpConst64 {
22643				continue
22644			}
22645			d := auxIntToInt64(v_1_1.AuxInt)
22646			if !(c >= d+1 && d+1 > d) {
22647				continue
22648			}
22649			v.reset(OpLess64U)
22650			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
22651			v0.AuxInt = int64ToAuxInt(c - d - 1)
22652			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
22653			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
22654			v2.AuxInt = int64ToAuxInt(d + 1)
22655			v1.AddArg2(x, v2)
22656			v.AddArg2(v0, v1)
22657			return true
22658		}
22659		break
22660	}
22661	// match: (OrB (Leq64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
22662	// cond: c >= d+1 && d+1 > d
22663	// result: (Leq64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
22664	for {
22665		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22666			if v_0.Op != OpLeq64 {
22667				continue
22668			}
22669			x := v_0.Args[1]
22670			v_0_0 := v_0.Args[0]
22671			if v_0_0.Op != OpConst64 {
22672				continue
22673			}
22674			c := auxIntToInt64(v_0_0.AuxInt)
22675			if v_1.Op != OpLeq64 {
22676				continue
22677			}
22678			_ = v_1.Args[1]
22679			if x != v_1.Args[0] {
22680				continue
22681			}
22682			v_1_1 := v_1.Args[1]
22683			if v_1_1.Op != OpConst64 {
22684				continue
22685			}
22686			d := auxIntToInt64(v_1_1.AuxInt)
22687			if !(c >= d+1 && d+1 > d) {
22688				continue
22689			}
22690			v.reset(OpLeq64U)
22691			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
22692			v0.AuxInt = int64ToAuxInt(c - d - 1)
22693			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
22694			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
22695			v2.AuxInt = int64ToAuxInt(d + 1)
22696			v1.AddArg2(x, v2)
22697			v.AddArg2(v0, v1)
22698			return true
22699		}
22700		break
22701	}
22702	// match: (OrB (Less32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
22703	// cond: c >= d+1 && d+1 > d
22704	// result: (Less32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
22705	for {
22706		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22707			if v_0.Op != OpLess32 {
22708				continue
22709			}
22710			x := v_0.Args[1]
22711			v_0_0 := v_0.Args[0]
22712			if v_0_0.Op != OpConst32 {
22713				continue
22714			}
22715			c := auxIntToInt32(v_0_0.AuxInt)
22716			if v_1.Op != OpLeq32 {
22717				continue
22718			}
22719			_ = v_1.Args[1]
22720			if x != v_1.Args[0] {
22721				continue
22722			}
22723			v_1_1 := v_1.Args[1]
22724			if v_1_1.Op != OpConst32 {
22725				continue
22726			}
22727			d := auxIntToInt32(v_1_1.AuxInt)
22728			if !(c >= d+1 && d+1 > d) {
22729				continue
22730			}
22731			v.reset(OpLess32U)
22732			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
22733			v0.AuxInt = int32ToAuxInt(c - d - 1)
22734			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
22735			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
22736			v2.AuxInt = int32ToAuxInt(d + 1)
22737			v1.AddArg2(x, v2)
22738			v.AddArg2(v0, v1)
22739			return true
22740		}
22741		break
22742	}
22743	// match: (OrB (Leq32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
22744	// cond: c >= d+1 && d+1 > d
22745	// result: (Leq32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
22746	for {
22747		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22748			if v_0.Op != OpLeq32 {
22749				continue
22750			}
22751			x := v_0.Args[1]
22752			v_0_0 := v_0.Args[0]
22753			if v_0_0.Op != OpConst32 {
22754				continue
22755			}
22756			c := auxIntToInt32(v_0_0.AuxInt)
22757			if v_1.Op != OpLeq32 {
22758				continue
22759			}
22760			_ = v_1.Args[1]
22761			if x != v_1.Args[0] {
22762				continue
22763			}
22764			v_1_1 := v_1.Args[1]
22765			if v_1_1.Op != OpConst32 {
22766				continue
22767			}
22768			d := auxIntToInt32(v_1_1.AuxInt)
22769			if !(c >= d+1 && d+1 > d) {
22770				continue
22771			}
22772			v.reset(OpLeq32U)
22773			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
22774			v0.AuxInt = int32ToAuxInt(c - d - 1)
22775			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
22776			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
22777			v2.AuxInt = int32ToAuxInt(d + 1)
22778			v1.AddArg2(x, v2)
22779			v.AddArg2(v0, v1)
22780			return true
22781		}
22782		break
22783	}
22784	// match: (OrB (Less16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
22785	// cond: c >= d+1 && d+1 > d
22786	// result: (Less16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
22787	for {
22788		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22789			if v_0.Op != OpLess16 {
22790				continue
22791			}
22792			x := v_0.Args[1]
22793			v_0_0 := v_0.Args[0]
22794			if v_0_0.Op != OpConst16 {
22795				continue
22796			}
22797			c := auxIntToInt16(v_0_0.AuxInt)
22798			if v_1.Op != OpLeq16 {
22799				continue
22800			}
22801			_ = v_1.Args[1]
22802			if x != v_1.Args[0] {
22803				continue
22804			}
22805			v_1_1 := v_1.Args[1]
22806			if v_1_1.Op != OpConst16 {
22807				continue
22808			}
22809			d := auxIntToInt16(v_1_1.AuxInt)
22810			if !(c >= d+1 && d+1 > d) {
22811				continue
22812			}
22813			v.reset(OpLess16U)
22814			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
22815			v0.AuxInt = int16ToAuxInt(c - d - 1)
22816			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
22817			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
22818			v2.AuxInt = int16ToAuxInt(d + 1)
22819			v1.AddArg2(x, v2)
22820			v.AddArg2(v0, v1)
22821			return true
22822		}
22823		break
22824	}
22825	// match: (OrB (Leq16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
22826	// cond: c >= d+1 && d+1 > d
22827	// result: (Leq16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
22828	for {
22829		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22830			if v_0.Op != OpLeq16 {
22831				continue
22832			}
22833			x := v_0.Args[1]
22834			v_0_0 := v_0.Args[0]
22835			if v_0_0.Op != OpConst16 {
22836				continue
22837			}
22838			c := auxIntToInt16(v_0_0.AuxInt)
22839			if v_1.Op != OpLeq16 {
22840				continue
22841			}
22842			_ = v_1.Args[1]
22843			if x != v_1.Args[0] {
22844				continue
22845			}
22846			v_1_1 := v_1.Args[1]
22847			if v_1_1.Op != OpConst16 {
22848				continue
22849			}
22850			d := auxIntToInt16(v_1_1.AuxInt)
22851			if !(c >= d+1 && d+1 > d) {
22852				continue
22853			}
22854			v.reset(OpLeq16U)
22855			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
22856			v0.AuxInt = int16ToAuxInt(c - d - 1)
22857			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
22858			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
22859			v2.AuxInt = int16ToAuxInt(d + 1)
22860			v1.AddArg2(x, v2)
22861			v.AddArg2(v0, v1)
22862			return true
22863		}
22864		break
22865	}
22866	// match: (OrB (Less8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
22867	// cond: c >= d+1 && d+1 > d
22868	// result: (Less8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
22869	for {
22870		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22871			if v_0.Op != OpLess8 {
22872				continue
22873			}
22874			x := v_0.Args[1]
22875			v_0_0 := v_0.Args[0]
22876			if v_0_0.Op != OpConst8 {
22877				continue
22878			}
22879			c := auxIntToInt8(v_0_0.AuxInt)
22880			if v_1.Op != OpLeq8 {
22881				continue
22882			}
22883			_ = v_1.Args[1]
22884			if x != v_1.Args[0] {
22885				continue
22886			}
22887			v_1_1 := v_1.Args[1]
22888			if v_1_1.Op != OpConst8 {
22889				continue
22890			}
22891			d := auxIntToInt8(v_1_1.AuxInt)
22892			if !(c >= d+1 && d+1 > d) {
22893				continue
22894			}
22895			v.reset(OpLess8U)
22896			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
22897			v0.AuxInt = int8ToAuxInt(c - d - 1)
22898			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
22899			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
22900			v2.AuxInt = int8ToAuxInt(d + 1)
22901			v1.AddArg2(x, v2)
22902			v.AddArg2(v0, v1)
22903			return true
22904		}
22905		break
22906	}
22907	// match: (OrB (Leq8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
22908	// cond: c >= d+1 && d+1 > d
22909	// result: (Leq8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
22910	for {
22911		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22912			if v_0.Op != OpLeq8 {
22913				continue
22914			}
22915			x := v_0.Args[1]
22916			v_0_0 := v_0.Args[0]
22917			if v_0_0.Op != OpConst8 {
22918				continue
22919			}
22920			c := auxIntToInt8(v_0_0.AuxInt)
22921			if v_1.Op != OpLeq8 {
22922				continue
22923			}
22924			_ = v_1.Args[1]
22925			if x != v_1.Args[0] {
22926				continue
22927			}
22928			v_1_1 := v_1.Args[1]
22929			if v_1_1.Op != OpConst8 {
22930				continue
22931			}
22932			d := auxIntToInt8(v_1_1.AuxInt)
22933			if !(c >= d+1 && d+1 > d) {
22934				continue
22935			}
22936			v.reset(OpLeq8U)
22937			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
22938			v0.AuxInt = int8ToAuxInt(c - d - 1)
22939			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
22940			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
22941			v2.AuxInt = int8ToAuxInt(d + 1)
22942			v1.AddArg2(x, v2)
22943			v.AddArg2(v0, v1)
22944			return true
22945		}
22946		break
22947	}
22948	// match: (OrB (Less64U (Const64 [c]) x) (Less64U x (Const64 [d])))
22949	// cond: uint64(c) >= uint64(d)
22950	// result: (Less64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
22951	for {
22952		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22953			if v_0.Op != OpLess64U {
22954				continue
22955			}
22956			x := v_0.Args[1]
22957			v_0_0 := v_0.Args[0]
22958			if v_0_0.Op != OpConst64 {
22959				continue
22960			}
22961			c := auxIntToInt64(v_0_0.AuxInt)
22962			if v_1.Op != OpLess64U {
22963				continue
22964			}
22965			_ = v_1.Args[1]
22966			if x != v_1.Args[0] {
22967				continue
22968			}
22969			v_1_1 := v_1.Args[1]
22970			if v_1_1.Op != OpConst64 {
22971				continue
22972			}
22973			d := auxIntToInt64(v_1_1.AuxInt)
22974			if !(uint64(c) >= uint64(d)) {
22975				continue
22976			}
22977			v.reset(OpLess64U)
22978			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
22979			v0.AuxInt = int64ToAuxInt(c - d)
22980			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
22981			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
22982			v2.AuxInt = int64ToAuxInt(d)
22983			v1.AddArg2(x, v2)
22984			v.AddArg2(v0, v1)
22985			return true
22986		}
22987		break
22988	}
22989	// match: (OrB (Leq64U (Const64 [c]) x) (Less64U x (Const64 [d])))
22990	// cond: uint64(c) >= uint64(d)
22991	// result: (Leq64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
22992	for {
22993		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22994			if v_0.Op != OpLeq64U {
22995				continue
22996			}
22997			x := v_0.Args[1]
22998			v_0_0 := v_0.Args[0]
22999			if v_0_0.Op != OpConst64 {
23000				continue
23001			}
23002			c := auxIntToInt64(v_0_0.AuxInt)
23003			if v_1.Op != OpLess64U {
23004				continue
23005			}
23006			_ = v_1.Args[1]
23007			if x != v_1.Args[0] {
23008				continue
23009			}
23010			v_1_1 := v_1.Args[1]
23011			if v_1_1.Op != OpConst64 {
23012				continue
23013			}
23014			d := auxIntToInt64(v_1_1.AuxInt)
23015			if !(uint64(c) >= uint64(d)) {
23016				continue
23017			}
23018			v.reset(OpLeq64U)
23019			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
23020			v0.AuxInt = int64ToAuxInt(c - d)
23021			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
23022			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
23023			v2.AuxInt = int64ToAuxInt(d)
23024			v1.AddArg2(x, v2)
23025			v.AddArg2(v0, v1)
23026			return true
23027		}
23028		break
23029	}
23030	// match: (OrB (Less32U (Const32 [c]) x) (Less32U x (Const32 [d])))
23031	// cond: uint32(c) >= uint32(d)
23032	// result: (Less32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
23033	for {
23034		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
23035			if v_0.Op != OpLess32U {
23036				continue
23037			}
23038			x := v_0.Args[1]
23039			v_0_0 := v_0.Args[0]
23040			if v_0_0.Op != OpConst32 {
23041				continue
23042			}
23043			c := auxIntToInt32(v_0_0.AuxInt)
23044			if v_1.Op != OpLess32U {
23045				continue
23046			}
23047			_ = v_1.Args[1]
23048			if x != v_1.Args[0] {
23049				continue
23050			}
23051			v_1_1 := v_1.Args[1]
23052			if v_1_1.Op != OpConst32 {
23053				continue
23054			}
23055			d := auxIntToInt32(v_1_1.AuxInt)
23056			if !(uint32(c) >= uint32(d)) {
23057				continue
23058			}
23059			v.reset(OpLess32U)
23060			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
23061			v0.AuxInt = int32ToAuxInt(c - d)
23062			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
23063			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
23064			v2.AuxInt = int32ToAuxInt(d)
23065			v1.AddArg2(x, v2)
23066			v.AddArg2(v0, v1)
23067			return true
23068		}
23069		break
23070	}
23071	// match: (OrB (Leq32U (Const32 [c]) x) (Less32U x (Const32 [d])))
23072	// cond: uint32(c) >= uint32(d)
23073	// result: (Leq32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
23074	for {
23075		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
23076			if v_0.Op != OpLeq32U {
23077				continue
23078			}
23079			x := v_0.Args[1]
23080			v_0_0 := v_0.Args[0]
23081			if v_0_0.Op != OpConst32 {
23082				continue
23083			}
23084			c := auxIntToInt32(v_0_0.AuxInt)
23085			if v_1.Op != OpLess32U {
23086				continue
23087			}
23088			_ = v_1.Args[1]
23089			if x != v_1.Args[0] {
23090				continue
23091			}
23092			v_1_1 := v_1.Args[1]
23093			if v_1_1.Op != OpConst32 {
23094				continue
23095			}
23096			d := auxIntToInt32(v_1_1.AuxInt)
23097			if !(uint32(c) >= uint32(d)) {
23098				continue
23099			}
23100			v.reset(OpLeq32U)
23101			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
23102			v0.AuxInt = int32ToAuxInt(c - d)
23103			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
23104			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
23105			v2.AuxInt = int32ToAuxInt(d)
23106			v1.AddArg2(x, v2)
23107			v.AddArg2(v0, v1)
23108			return true
23109		}
23110		break
23111	}
23112	// match: (OrB (Less16U (Const16 [c]) x) (Less16U x (Const16 [d])))
23113	// cond: uint16(c) >= uint16(d)
23114	// result: (Less16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
23115	for {
23116		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
23117			if v_0.Op != OpLess16U {
23118				continue
23119			}
23120			x := v_0.Args[1]
23121			v_0_0 := v_0.Args[0]
23122			if v_0_0.Op != OpConst16 {
23123				continue
23124			}
23125			c := auxIntToInt16(v_0_0.AuxInt)
23126			if v_1.Op != OpLess16U {
23127				continue
23128			}
23129			_ = v_1.Args[1]
23130			if x != v_1.Args[0] {
23131				continue
23132			}
23133			v_1_1 := v_1.Args[1]
23134			if v_1_1.Op != OpConst16 {
23135				continue
23136			}
23137			d := auxIntToInt16(v_1_1.AuxInt)
23138			if !(uint16(c) >= uint16(d)) {
23139				continue
23140			}
23141			v.reset(OpLess16U)
23142			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
23143			v0.AuxInt = int16ToAuxInt(c - d)
23144			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
23145			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
23146			v2.AuxInt = int16ToAuxInt(d)
23147			v1.AddArg2(x, v2)
23148			v.AddArg2(v0, v1)
23149			return true
23150		}
23151		break
23152	}
23153	// match: (OrB (Leq16U (Const16 [c]) x) (Less16U x (Const16 [d])))
23154	// cond: uint16(c) >= uint16(d)
23155	// result: (Leq16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
23156	for {
23157		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
23158			if v_0.Op != OpLeq16U {
23159				continue
23160			}
23161			x := v_0.Args[1]
23162			v_0_0 := v_0.Args[0]
23163			if v_0_0.Op != OpConst16 {
23164				continue
23165			}
23166			c := auxIntToInt16(v_0_0.AuxInt)
23167			if v_1.Op != OpLess16U {
23168				continue
23169			}
23170			_ = v_1.Args[1]
23171			if x != v_1.Args[0] {
23172				continue
23173			}
23174			v_1_1 := v_1.Args[1]
23175			if v_1_1.Op != OpConst16 {
23176				continue
23177			}
23178			d := auxIntToInt16(v_1_1.AuxInt)
23179			if !(uint16(c) >= uint16(d)) {
23180				continue
23181			}
23182			v.reset(OpLeq16U)
23183			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
23184			v0.AuxInt = int16ToAuxInt(c - d)
23185			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
23186			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
23187			v2.AuxInt = int16ToAuxInt(d)
23188			v1.AddArg2(x, v2)
23189			v.AddArg2(v0, v1)
23190			return true
23191		}
23192		break
23193	}
23194	// match: (OrB (Less8U (Const8 [c]) x) (Less8U x (Const8 [d])))
23195	// cond: uint8(c) >= uint8(d)
23196	// result: (Less8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
23197	for {
23198		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
23199			if v_0.Op != OpLess8U {
23200				continue
23201			}
23202			x := v_0.Args[1]
23203			v_0_0 := v_0.Args[0]
23204			if v_0_0.Op != OpConst8 {
23205				continue
23206			}
23207			c := auxIntToInt8(v_0_0.AuxInt)
23208			if v_1.Op != OpLess8U {
23209				continue
23210			}
23211			_ = v_1.Args[1]
23212			if x != v_1.Args[0] {
23213				continue
23214			}
23215			v_1_1 := v_1.Args[1]
23216			if v_1_1.Op != OpConst8 {
23217				continue
23218			}
23219			d := auxIntToInt8(v_1_1.AuxInt)
23220			if !(uint8(c) >= uint8(d)) {
23221				continue
23222			}
23223			v.reset(OpLess8U)
23224			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
23225			v0.AuxInt = int8ToAuxInt(c - d)
23226			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
23227			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
23228			v2.AuxInt = int8ToAuxInt(d)
23229			v1.AddArg2(x, v2)
23230			v.AddArg2(v0, v1)
23231			return true
23232		}
23233		break
23234	}
23235	// match: (OrB (Leq8U (Const8 [c]) x) (Less8U x (Const8 [d])))
23236	// cond: uint8(c) >= uint8(d)
23237	// result: (Leq8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
23238	for {
23239		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
23240			if v_0.Op != OpLeq8U {
23241				continue
23242			}
23243			x := v_0.Args[1]
23244			v_0_0 := v_0.Args[0]
23245			if v_0_0.Op != OpConst8 {
23246				continue
23247			}
23248			c := auxIntToInt8(v_0_0.AuxInt)
23249			if v_1.Op != OpLess8U {
23250				continue
23251			}
23252			_ = v_1.Args[1]
23253			if x != v_1.Args[0] {
23254				continue
23255			}
23256			v_1_1 := v_1.Args[1]
23257			if v_1_1.Op != OpConst8 {
23258				continue
23259			}
23260			d := auxIntToInt8(v_1_1.AuxInt)
23261			if !(uint8(c) >= uint8(d)) {
23262				continue
23263			}
23264			v.reset(OpLeq8U)
23265			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
23266			v0.AuxInt = int8ToAuxInt(c - d)
23267			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
23268			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
23269			v2.AuxInt = int8ToAuxInt(d)
23270			v1.AddArg2(x, v2)
23271			v.AddArg2(v0, v1)
23272			return true
23273		}
23274		break
23275	}
23276	// match: (OrB (Less64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
23277	// cond: uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)
23278	// result: (Less64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
23279	for {
23280		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
23281			if v_0.Op != OpLess64U {
23282				continue
23283			}
23284			x := v_0.Args[1]
23285			v_0_0 := v_0.Args[0]
23286			if v_0_0.Op != OpConst64 {
23287				continue
23288			}
23289			c := auxIntToInt64(v_0_0.AuxInt)
23290			if v_1.Op != OpLeq64U {
23291				continue
23292			}
23293			_ = v_1.Args[1]
23294			if x != v_1.Args[0] {
23295				continue
23296			}
23297			v_1_1 := v_1.Args[1]
23298			if v_1_1.Op != OpConst64 {
23299				continue
23300			}
23301			d := auxIntToInt64(v_1_1.AuxInt)
23302			if !(uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)) {
23303				continue
23304			}
23305			v.reset(OpLess64U)
23306			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
23307			v0.AuxInt = int64ToAuxInt(c - d - 1)
23308			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
23309			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
23310			v2.AuxInt = int64ToAuxInt(d + 1)
23311			v1.AddArg2(x, v2)
23312			v.AddArg2(v0, v1)
23313			return true
23314		}
23315		break
23316	}
23317	// match: (OrB (Leq64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
23318	// cond: uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)
23319	// result: (Leq64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
23320	for {
23321		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
23322			if v_0.Op != OpLeq64U {
23323				continue
23324			}
23325			x := v_0.Args[1]
23326			v_0_0 := v_0.Args[0]
23327			if v_0_0.Op != OpConst64 {
23328				continue
23329			}
23330			c := auxIntToInt64(v_0_0.AuxInt)
23331			if v_1.Op != OpLeq64U {
23332				continue
23333			}
23334			_ = v_1.Args[1]
23335			if x != v_1.Args[0] {
23336				continue
23337			}
23338			v_1_1 := v_1.Args[1]
23339			if v_1_1.Op != OpConst64 {
23340				continue
23341			}
23342			d := auxIntToInt64(v_1_1.AuxInt)
23343			if !(uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)) {
23344				continue
23345			}
23346			v.reset(OpLeq64U)
23347			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
23348			v0.AuxInt = int64ToAuxInt(c - d - 1)
23349			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
23350			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
23351			v2.AuxInt = int64ToAuxInt(d + 1)
23352			v1.AddArg2(x, v2)
23353			v.AddArg2(v0, v1)
23354			return true
23355		}
23356		break
23357	}
23358	// match: (OrB (Less32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
23359	// cond: uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)
23360	// result: (Less32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
23361	for {
23362		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
23363			if v_0.Op != OpLess32U {
23364				continue
23365			}
23366			x := v_0.Args[1]
23367			v_0_0 := v_0.Args[0]
23368			if v_0_0.Op != OpConst32 {
23369				continue
23370			}
23371			c := auxIntToInt32(v_0_0.AuxInt)
23372			if v_1.Op != OpLeq32U {
23373				continue
23374			}
23375			_ = v_1.Args[1]
23376			if x != v_1.Args[0] {
23377				continue
23378			}
23379			v_1_1 := v_1.Args[1]
23380			if v_1_1.Op != OpConst32 {
23381				continue
23382			}
23383			d := auxIntToInt32(v_1_1.AuxInt)
23384			if !(uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)) {
23385				continue
23386			}
23387			v.reset(OpLess32U)
23388			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
23389			v0.AuxInt = int32ToAuxInt(c - d - 1)
23390			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
23391			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
23392			v2.AuxInt = int32ToAuxInt(d + 1)
23393			v1.AddArg2(x, v2)
23394			v.AddArg2(v0, v1)
23395			return true
23396		}
23397		break
23398	}
23399	// match: (OrB (Leq32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
23400	// cond: uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)
23401	// result: (Leq32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
23402	for {
23403		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
23404			if v_0.Op != OpLeq32U {
23405				continue
23406			}
23407			x := v_0.Args[1]
23408			v_0_0 := v_0.Args[0]
23409			if v_0_0.Op != OpConst32 {
23410				continue
23411			}
23412			c := auxIntToInt32(v_0_0.AuxInt)
23413			if v_1.Op != OpLeq32U {
23414				continue
23415			}
23416			_ = v_1.Args[1]
23417			if x != v_1.Args[0] {
23418				continue
23419			}
23420			v_1_1 := v_1.Args[1]
23421			if v_1_1.Op != OpConst32 {
23422				continue
23423			}
23424			d := auxIntToInt32(v_1_1.AuxInt)
23425			if !(uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)) {
23426				continue
23427			}
23428			v.reset(OpLeq32U)
23429			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
23430			v0.AuxInt = int32ToAuxInt(c - d - 1)
23431			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
23432			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
23433			v2.AuxInt = int32ToAuxInt(d + 1)
23434			v1.AddArg2(x, v2)
23435			v.AddArg2(v0, v1)
23436			return true
23437		}
23438		break
23439	}
23440	// match: (OrB (Less16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
23441	// cond: uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)
23442	// result: (Less16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
23443	for {
23444		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
23445			if v_0.Op != OpLess16U {
23446				continue
23447			}
23448			x := v_0.Args[1]
23449			v_0_0 := v_0.Args[0]
23450			if v_0_0.Op != OpConst16 {
23451				continue
23452			}
23453			c := auxIntToInt16(v_0_0.AuxInt)
23454			if v_1.Op != OpLeq16U {
23455				continue
23456			}
23457			_ = v_1.Args[1]
23458			if x != v_1.Args[0] {
23459				continue
23460			}
23461			v_1_1 := v_1.Args[1]
23462			if v_1_1.Op != OpConst16 {
23463				continue
23464			}
23465			d := auxIntToInt16(v_1_1.AuxInt)
23466			if !(uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)) {
23467				continue
23468			}
23469			v.reset(OpLess16U)
23470			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
23471			v0.AuxInt = int16ToAuxInt(c - d - 1)
23472			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
23473			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
23474			v2.AuxInt = int16ToAuxInt(d + 1)
23475			v1.AddArg2(x, v2)
23476			v.AddArg2(v0, v1)
23477			return true
23478		}
23479		break
23480	}
23481	// match: (OrB (Leq16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
23482	// cond: uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)
23483	// result: (Leq16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
23484	for {
23485		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
23486			if v_0.Op != OpLeq16U {
23487				continue
23488			}
23489			x := v_0.Args[1]
23490			v_0_0 := v_0.Args[0]
23491			if v_0_0.Op != OpConst16 {
23492				continue
23493			}
23494			c := auxIntToInt16(v_0_0.AuxInt)
23495			if v_1.Op != OpLeq16U {
23496				continue
23497			}
23498			_ = v_1.Args[1]
23499			if x != v_1.Args[0] {
23500				continue
23501			}
23502			v_1_1 := v_1.Args[1]
23503			if v_1_1.Op != OpConst16 {
23504				continue
23505			}
23506			d := auxIntToInt16(v_1_1.AuxInt)
23507			if !(uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)) {
23508				continue
23509			}
23510			v.reset(OpLeq16U)
23511			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
23512			v0.AuxInt = int16ToAuxInt(c - d - 1)
23513			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
23514			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
23515			v2.AuxInt = int16ToAuxInt(d + 1)
23516			v1.AddArg2(x, v2)
23517			v.AddArg2(v0, v1)
23518			return true
23519		}
23520		break
23521	}
23522	// match: (OrB (Less8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
23523	// cond: uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)
23524	// result: (Less8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
23525	for {
23526		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
23527			if v_0.Op != OpLess8U {
23528				continue
23529			}
23530			x := v_0.Args[1]
23531			v_0_0 := v_0.Args[0]
23532			if v_0_0.Op != OpConst8 {
23533				continue
23534			}
23535			c := auxIntToInt8(v_0_0.AuxInt)
23536			if v_1.Op != OpLeq8U {
23537				continue
23538			}
23539			_ = v_1.Args[1]
23540			if x != v_1.Args[0] {
23541				continue
23542			}
23543			v_1_1 := v_1.Args[1]
23544			if v_1_1.Op != OpConst8 {
23545				continue
23546			}
23547			d := auxIntToInt8(v_1_1.AuxInt)
23548			if !(uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)) {
23549				continue
23550			}
23551			v.reset(OpLess8U)
23552			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
23553			v0.AuxInt = int8ToAuxInt(c - d - 1)
23554			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
23555			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
23556			v2.AuxInt = int8ToAuxInt(d + 1)
23557			v1.AddArg2(x, v2)
23558			v.AddArg2(v0, v1)
23559			return true
23560		}
23561		break
23562	}
23563	// match: (OrB (Leq8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
23564	// cond: uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)
23565	// result: (Leq8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
23566	for {
23567		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
23568			if v_0.Op != OpLeq8U {
23569				continue
23570			}
23571			x := v_0.Args[1]
23572			v_0_0 := v_0.Args[0]
23573			if v_0_0.Op != OpConst8 {
23574				continue
23575			}
23576			c := auxIntToInt8(v_0_0.AuxInt)
23577			if v_1.Op != OpLeq8U {
23578				continue
23579			}
23580			_ = v_1.Args[1]
23581			if x != v_1.Args[0] {
23582				continue
23583			}
23584			v_1_1 := v_1.Args[1]
23585			if v_1_1.Op != OpConst8 {
23586				continue
23587			}
23588			d := auxIntToInt8(v_1_1.AuxInt)
23589			if !(uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)) {
23590				continue
23591			}
23592			v.reset(OpLeq8U)
23593			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
23594			v0.AuxInt = int8ToAuxInt(c - d - 1)
23595			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
23596			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
23597			v2.AuxInt = int8ToAuxInt(d + 1)
23598			v1.AddArg2(x, v2)
23599			v.AddArg2(v0, v1)
23600			return true
23601		}
23602		break
23603	}
23604	return false
23605}
23606func rewriteValuegeneric_OpPhi(v *Value) bool {
23607	b := v.Block
23608	// match: (Phi (Const8 [c]) (Const8 [c]))
23609	// result: (Const8 [c])
23610	for {
23611		if len(v.Args) != 2 {
23612			break
23613		}
23614		_ = v.Args[1]
23615		v_0 := v.Args[0]
23616		if v_0.Op != OpConst8 {
23617			break
23618		}
23619		c := auxIntToInt8(v_0.AuxInt)
23620		v_1 := v.Args[1]
23621		if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != c {
23622			break
23623		}
23624		v.reset(OpConst8)
23625		v.AuxInt = int8ToAuxInt(c)
23626		return true
23627	}
23628	// match: (Phi (Const16 [c]) (Const16 [c]))
23629	// result: (Const16 [c])
23630	for {
23631		if len(v.Args) != 2 {
23632			break
23633		}
23634		_ = v.Args[1]
23635		v_0 := v.Args[0]
23636		if v_0.Op != OpConst16 {
23637			break
23638		}
23639		c := auxIntToInt16(v_0.AuxInt)
23640		v_1 := v.Args[1]
23641		if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != c {
23642			break
23643		}
23644		v.reset(OpConst16)
23645		v.AuxInt = int16ToAuxInt(c)
23646		return true
23647	}
23648	// match: (Phi (Const32 [c]) (Const32 [c]))
23649	// result: (Const32 [c])
23650	for {
23651		if len(v.Args) != 2 {
23652			break
23653		}
23654		_ = v.Args[1]
23655		v_0 := v.Args[0]
23656		if v_0.Op != OpConst32 {
23657			break
23658		}
23659		c := auxIntToInt32(v_0.AuxInt)
23660		v_1 := v.Args[1]
23661		if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != c {
23662			break
23663		}
23664		v.reset(OpConst32)
23665		v.AuxInt = int32ToAuxInt(c)
23666		return true
23667	}
23668	// match: (Phi (Const64 [c]) (Const64 [c]))
23669	// result: (Const64 [c])
23670	for {
23671		if len(v.Args) != 2 {
23672			break
23673		}
23674		_ = v.Args[1]
23675		v_0 := v.Args[0]
23676		if v_0.Op != OpConst64 {
23677			break
23678		}
23679		c := auxIntToInt64(v_0.AuxInt)
23680		v_1 := v.Args[1]
23681		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c {
23682			break
23683		}
23684		v.reset(OpConst64)
23685		v.AuxInt = int64ToAuxInt(c)
23686		return true
23687	}
23688	// match: (Phi <t> nx:(Not x) ny:(Not y))
23689	// cond: nx.Uses == 1 && ny.Uses == 1
23690	// result: (Not (Phi <t> x y))
23691	for {
23692		if len(v.Args) != 2 {
23693			break
23694		}
23695		t := v.Type
23696		_ = v.Args[1]
23697		nx := v.Args[0]
23698		if nx.Op != OpNot {
23699			break
23700		}
23701		x := nx.Args[0]
23702		ny := v.Args[1]
23703		if ny.Op != OpNot {
23704			break
23705		}
23706		y := ny.Args[0]
23707		if !(nx.Uses == 1 && ny.Uses == 1) {
23708			break
23709		}
23710		v.reset(OpNot)
23711		v0 := b.NewValue0(v.Pos, OpPhi, t)
23712		v0.AddArg2(x, y)
23713		v.AddArg(v0)
23714		return true
23715	}
23716	return false
23717}
23718func rewriteValuegeneric_OpPtrIndex(v *Value) bool {
23719	v_1 := v.Args[1]
23720	v_0 := v.Args[0]
23721	b := v.Block
23722	config := b.Func.Config
23723	typ := &b.Func.Config.Types
23724	// match: (PtrIndex <t> ptr idx)
23725	// cond: config.PtrSize == 4 && is32Bit(t.Elem().Size())
23726	// result: (AddPtr ptr (Mul32 <typ.Int> idx (Const32 <typ.Int> [int32(t.Elem().Size())])))
23727	for {
23728		t := v.Type
23729		ptr := v_0
23730		idx := v_1
23731		if !(config.PtrSize == 4 && is32Bit(t.Elem().Size())) {
23732			break
23733		}
23734		v.reset(OpAddPtr)
23735		v0 := b.NewValue0(v.Pos, OpMul32, typ.Int)
23736		v1 := b.NewValue0(v.Pos, OpConst32, typ.Int)
23737		v1.AuxInt = int32ToAuxInt(int32(t.Elem().Size()))
23738		v0.AddArg2(idx, v1)
23739		v.AddArg2(ptr, v0)
23740		return true
23741	}
23742	// match: (PtrIndex <t> ptr idx)
23743	// cond: config.PtrSize == 8
23744	// result: (AddPtr ptr (Mul64 <typ.Int> idx (Const64 <typ.Int> [t.Elem().Size()])))
23745	for {
23746		t := v.Type
23747		ptr := v_0
23748		idx := v_1
23749		if !(config.PtrSize == 8) {
23750			break
23751		}
23752		v.reset(OpAddPtr)
23753		v0 := b.NewValue0(v.Pos, OpMul64, typ.Int)
23754		v1 := b.NewValue0(v.Pos, OpConst64, typ.Int)
23755		v1.AuxInt = int64ToAuxInt(t.Elem().Size())
23756		v0.AddArg2(idx, v1)
23757		v.AddArg2(ptr, v0)
23758		return true
23759	}
23760	return false
23761}
23762func rewriteValuegeneric_OpRotateLeft16(v *Value) bool {
23763	v_1 := v.Args[1]
23764	v_0 := v.Args[0]
23765	b := v.Block
23766	config := b.Func.Config
23767	// match: (RotateLeft16 x (Const16 [c]))
23768	// cond: c%16 == 0
23769	// result: x
23770	for {
23771		x := v_0
23772		if v_1.Op != OpConst16 {
23773			break
23774		}
23775		c := auxIntToInt16(v_1.AuxInt)
23776		if !(c%16 == 0) {
23777			break
23778		}
23779		v.copyOf(x)
23780		return true
23781	}
23782	// match: (RotateLeft16 x (And64 y (Const64 [c])))
23783	// cond: c&15 == 15
23784	// result: (RotateLeft16 x y)
23785	for {
23786		x := v_0
23787		if v_1.Op != OpAnd64 {
23788			break
23789		}
23790		_ = v_1.Args[1]
23791		v_1_0 := v_1.Args[0]
23792		v_1_1 := v_1.Args[1]
23793		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23794			y := v_1_0
23795			if v_1_1.Op != OpConst64 {
23796				continue
23797			}
23798			c := auxIntToInt64(v_1_1.AuxInt)
23799			if !(c&15 == 15) {
23800				continue
23801			}
23802			v.reset(OpRotateLeft16)
23803			v.AddArg2(x, y)
23804			return true
23805		}
23806		break
23807	}
23808	// match: (RotateLeft16 x (And32 y (Const32 [c])))
23809	// cond: c&15 == 15
23810	// result: (RotateLeft16 x y)
23811	for {
23812		x := v_0
23813		if v_1.Op != OpAnd32 {
23814			break
23815		}
23816		_ = v_1.Args[1]
23817		v_1_0 := v_1.Args[0]
23818		v_1_1 := v_1.Args[1]
23819		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23820			y := v_1_0
23821			if v_1_1.Op != OpConst32 {
23822				continue
23823			}
23824			c := auxIntToInt32(v_1_1.AuxInt)
23825			if !(c&15 == 15) {
23826				continue
23827			}
23828			v.reset(OpRotateLeft16)
23829			v.AddArg2(x, y)
23830			return true
23831		}
23832		break
23833	}
23834	// match: (RotateLeft16 x (And16 y (Const16 [c])))
23835	// cond: c&15 == 15
23836	// result: (RotateLeft16 x y)
23837	for {
23838		x := v_0
23839		if v_1.Op != OpAnd16 {
23840			break
23841		}
23842		_ = v_1.Args[1]
23843		v_1_0 := v_1.Args[0]
23844		v_1_1 := v_1.Args[1]
23845		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23846			y := v_1_0
23847			if v_1_1.Op != OpConst16 {
23848				continue
23849			}
23850			c := auxIntToInt16(v_1_1.AuxInt)
23851			if !(c&15 == 15) {
23852				continue
23853			}
23854			v.reset(OpRotateLeft16)
23855			v.AddArg2(x, y)
23856			return true
23857		}
23858		break
23859	}
23860	// match: (RotateLeft16 x (And8 y (Const8 [c])))
23861	// cond: c&15 == 15
23862	// result: (RotateLeft16 x y)
23863	for {
23864		x := v_0
23865		if v_1.Op != OpAnd8 {
23866			break
23867		}
23868		_ = v_1.Args[1]
23869		v_1_0 := v_1.Args[0]
23870		v_1_1 := v_1.Args[1]
23871		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23872			y := v_1_0
23873			if v_1_1.Op != OpConst8 {
23874				continue
23875			}
23876			c := auxIntToInt8(v_1_1.AuxInt)
23877			if !(c&15 == 15) {
23878				continue
23879			}
23880			v.reset(OpRotateLeft16)
23881			v.AddArg2(x, y)
23882			return true
23883		}
23884		break
23885	}
23886	// match: (RotateLeft16 x (Neg64 (And64 y (Const64 [c]))))
23887	// cond: c&15 == 15
23888	// result: (RotateLeft16 x (Neg64 <y.Type> y))
23889	for {
23890		x := v_0
23891		if v_1.Op != OpNeg64 {
23892			break
23893		}
23894		v_1_0 := v_1.Args[0]
23895		if v_1_0.Op != OpAnd64 {
23896			break
23897		}
23898		_ = v_1_0.Args[1]
23899		v_1_0_0 := v_1_0.Args[0]
23900		v_1_0_1 := v_1_0.Args[1]
23901		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
23902			y := v_1_0_0
23903			if v_1_0_1.Op != OpConst64 {
23904				continue
23905			}
23906			c := auxIntToInt64(v_1_0_1.AuxInt)
23907			if !(c&15 == 15) {
23908				continue
23909			}
23910			v.reset(OpRotateLeft16)
23911			v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
23912			v0.AddArg(y)
23913			v.AddArg2(x, v0)
23914			return true
23915		}
23916		break
23917	}
23918	// match: (RotateLeft16 x (Neg32 (And32 y (Const32 [c]))))
23919	// cond: c&15 == 15
23920	// result: (RotateLeft16 x (Neg32 <y.Type> y))
23921	for {
23922		x := v_0
23923		if v_1.Op != OpNeg32 {
23924			break
23925		}
23926		v_1_0 := v_1.Args[0]
23927		if v_1_0.Op != OpAnd32 {
23928			break
23929		}
23930		_ = v_1_0.Args[1]
23931		v_1_0_0 := v_1_0.Args[0]
23932		v_1_0_1 := v_1_0.Args[1]
23933		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
23934			y := v_1_0_0
23935			if v_1_0_1.Op != OpConst32 {
23936				continue
23937			}
23938			c := auxIntToInt32(v_1_0_1.AuxInt)
23939			if !(c&15 == 15) {
23940				continue
23941			}
23942			v.reset(OpRotateLeft16)
23943			v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
23944			v0.AddArg(y)
23945			v.AddArg2(x, v0)
23946			return true
23947		}
23948		break
23949	}
23950	// match: (RotateLeft16 x (Neg16 (And16 y (Const16 [c]))))
23951	// cond: c&15 == 15
23952	// result: (RotateLeft16 x (Neg16 <y.Type> y))
23953	for {
23954		x := v_0
23955		if v_1.Op != OpNeg16 {
23956			break
23957		}
23958		v_1_0 := v_1.Args[0]
23959		if v_1_0.Op != OpAnd16 {
23960			break
23961		}
23962		_ = v_1_0.Args[1]
23963		v_1_0_0 := v_1_0.Args[0]
23964		v_1_0_1 := v_1_0.Args[1]
23965		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
23966			y := v_1_0_0
23967			if v_1_0_1.Op != OpConst16 {
23968				continue
23969			}
23970			c := auxIntToInt16(v_1_0_1.AuxInt)
23971			if !(c&15 == 15) {
23972				continue
23973			}
23974			v.reset(OpRotateLeft16)
23975			v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
23976			v0.AddArg(y)
23977			v.AddArg2(x, v0)
23978			return true
23979		}
23980		break
23981	}
23982	// match: (RotateLeft16 x (Neg8 (And8 y (Const8 [c]))))
23983	// cond: c&15 == 15
23984	// result: (RotateLeft16 x (Neg8 <y.Type> y))
23985	for {
23986		x := v_0
23987		if v_1.Op != OpNeg8 {
23988			break
23989		}
23990		v_1_0 := v_1.Args[0]
23991		if v_1_0.Op != OpAnd8 {
23992			break
23993		}
23994		_ = v_1_0.Args[1]
23995		v_1_0_0 := v_1_0.Args[0]
23996		v_1_0_1 := v_1_0.Args[1]
23997		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
23998			y := v_1_0_0
23999			if v_1_0_1.Op != OpConst8 {
24000				continue
24001			}
24002			c := auxIntToInt8(v_1_0_1.AuxInt)
24003			if !(c&15 == 15) {
24004				continue
24005			}
24006			v.reset(OpRotateLeft16)
24007			v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
24008			v0.AddArg(y)
24009			v.AddArg2(x, v0)
24010			return true
24011		}
24012		break
24013	}
24014	// match: (RotateLeft16 x (Add64 y (Const64 [c])))
24015	// cond: c&15 == 0
24016	// result: (RotateLeft16 x y)
24017	for {
24018		x := v_0
24019		if v_1.Op != OpAdd64 {
24020			break
24021		}
24022		_ = v_1.Args[1]
24023		v_1_0 := v_1.Args[0]
24024		v_1_1 := v_1.Args[1]
24025		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24026			y := v_1_0
24027			if v_1_1.Op != OpConst64 {
24028				continue
24029			}
24030			c := auxIntToInt64(v_1_1.AuxInt)
24031			if !(c&15 == 0) {
24032				continue
24033			}
24034			v.reset(OpRotateLeft16)
24035			v.AddArg2(x, y)
24036			return true
24037		}
24038		break
24039	}
24040	// match: (RotateLeft16 x (Add32 y (Const32 [c])))
24041	// cond: c&15 == 0
24042	// result: (RotateLeft16 x y)
24043	for {
24044		x := v_0
24045		if v_1.Op != OpAdd32 {
24046			break
24047		}
24048		_ = v_1.Args[1]
24049		v_1_0 := v_1.Args[0]
24050		v_1_1 := v_1.Args[1]
24051		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24052			y := v_1_0
24053			if v_1_1.Op != OpConst32 {
24054				continue
24055			}
24056			c := auxIntToInt32(v_1_1.AuxInt)
24057			if !(c&15 == 0) {
24058				continue
24059			}
24060			v.reset(OpRotateLeft16)
24061			v.AddArg2(x, y)
24062			return true
24063		}
24064		break
24065	}
24066	// match: (RotateLeft16 x (Add16 y (Const16 [c])))
24067	// cond: c&15 == 0
24068	// result: (RotateLeft16 x y)
24069	for {
24070		x := v_0
24071		if v_1.Op != OpAdd16 {
24072			break
24073		}
24074		_ = v_1.Args[1]
24075		v_1_0 := v_1.Args[0]
24076		v_1_1 := v_1.Args[1]
24077		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24078			y := v_1_0
24079			if v_1_1.Op != OpConst16 {
24080				continue
24081			}
24082			c := auxIntToInt16(v_1_1.AuxInt)
24083			if !(c&15 == 0) {
24084				continue
24085			}
24086			v.reset(OpRotateLeft16)
24087			v.AddArg2(x, y)
24088			return true
24089		}
24090		break
24091	}
24092	// match: (RotateLeft16 x (Add8 y (Const8 [c])))
24093	// cond: c&15 == 0
24094	// result: (RotateLeft16 x y)
24095	for {
24096		x := v_0
24097		if v_1.Op != OpAdd8 {
24098			break
24099		}
24100		_ = v_1.Args[1]
24101		v_1_0 := v_1.Args[0]
24102		v_1_1 := v_1.Args[1]
24103		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24104			y := v_1_0
24105			if v_1_1.Op != OpConst8 {
24106				continue
24107			}
24108			c := auxIntToInt8(v_1_1.AuxInt)
24109			if !(c&15 == 0) {
24110				continue
24111			}
24112			v.reset(OpRotateLeft16)
24113			v.AddArg2(x, y)
24114			return true
24115		}
24116		break
24117	}
24118	// match: (RotateLeft16 x (Sub64 (Const64 [c]) y))
24119	// cond: c&15 == 0
24120	// result: (RotateLeft16 x (Neg64 <y.Type> y))
24121	for {
24122		x := v_0
24123		if v_1.Op != OpSub64 {
24124			break
24125		}
24126		y := v_1.Args[1]
24127		v_1_0 := v_1.Args[0]
24128		if v_1_0.Op != OpConst64 {
24129			break
24130		}
24131		c := auxIntToInt64(v_1_0.AuxInt)
24132		if !(c&15 == 0) {
24133			break
24134		}
24135		v.reset(OpRotateLeft16)
24136		v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
24137		v0.AddArg(y)
24138		v.AddArg2(x, v0)
24139		return true
24140	}
24141	// match: (RotateLeft16 x (Sub32 (Const32 [c]) y))
24142	// cond: c&15 == 0
24143	// result: (RotateLeft16 x (Neg32 <y.Type> y))
24144	for {
24145		x := v_0
24146		if v_1.Op != OpSub32 {
24147			break
24148		}
24149		y := v_1.Args[1]
24150		v_1_0 := v_1.Args[0]
24151		if v_1_0.Op != OpConst32 {
24152			break
24153		}
24154		c := auxIntToInt32(v_1_0.AuxInt)
24155		if !(c&15 == 0) {
24156			break
24157		}
24158		v.reset(OpRotateLeft16)
24159		v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
24160		v0.AddArg(y)
24161		v.AddArg2(x, v0)
24162		return true
24163	}
24164	// match: (RotateLeft16 x (Sub16 (Const16 [c]) y))
24165	// cond: c&15 == 0
24166	// result: (RotateLeft16 x (Neg16 <y.Type> y))
24167	for {
24168		x := v_0
24169		if v_1.Op != OpSub16 {
24170			break
24171		}
24172		y := v_1.Args[1]
24173		v_1_0 := v_1.Args[0]
24174		if v_1_0.Op != OpConst16 {
24175			break
24176		}
24177		c := auxIntToInt16(v_1_0.AuxInt)
24178		if !(c&15 == 0) {
24179			break
24180		}
24181		v.reset(OpRotateLeft16)
24182		v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
24183		v0.AddArg(y)
24184		v.AddArg2(x, v0)
24185		return true
24186	}
24187	// match: (RotateLeft16 x (Sub8 (Const8 [c]) y))
24188	// cond: c&15 == 0
24189	// result: (RotateLeft16 x (Neg8 <y.Type> y))
24190	for {
24191		x := v_0
24192		if v_1.Op != OpSub8 {
24193			break
24194		}
24195		y := v_1.Args[1]
24196		v_1_0 := v_1.Args[0]
24197		if v_1_0.Op != OpConst8 {
24198			break
24199		}
24200		c := auxIntToInt8(v_1_0.AuxInt)
24201		if !(c&15 == 0) {
24202			break
24203		}
24204		v.reset(OpRotateLeft16)
24205		v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
24206		v0.AddArg(y)
24207		v.AddArg2(x, v0)
24208		return true
24209	}
24210	// match: (RotateLeft16 x (Const64 <t> [c]))
24211	// cond: config.PtrSize == 4
24212	// result: (RotateLeft16 x (Const32 <t> [int32(c)]))
24213	for {
24214		x := v_0
24215		if v_1.Op != OpConst64 {
24216			break
24217		}
24218		t := v_1.Type
24219		c := auxIntToInt64(v_1.AuxInt)
24220		if !(config.PtrSize == 4) {
24221			break
24222		}
24223		v.reset(OpRotateLeft16)
24224		v0 := b.NewValue0(v.Pos, OpConst32, t)
24225		v0.AuxInt = int32ToAuxInt(int32(c))
24226		v.AddArg2(x, v0)
24227		return true
24228	}
24229	// match: (RotateLeft16 (RotateLeft16 x c) d)
24230	// cond: c.Type.Size() == 8 && d.Type.Size() == 8
24231	// result: (RotateLeft16 x (Add64 <c.Type> c d))
24232	for {
24233		if v_0.Op != OpRotateLeft16 {
24234			break
24235		}
24236		c := v_0.Args[1]
24237		x := v_0.Args[0]
24238		d := v_1
24239		if !(c.Type.Size() == 8 && d.Type.Size() == 8) {
24240			break
24241		}
24242		v.reset(OpRotateLeft16)
24243		v0 := b.NewValue0(v.Pos, OpAdd64, c.Type)
24244		v0.AddArg2(c, d)
24245		v.AddArg2(x, v0)
24246		return true
24247	}
24248	// match: (RotateLeft16 (RotateLeft16 x c) d)
24249	// cond: c.Type.Size() == 4 && d.Type.Size() == 4
24250	// result: (RotateLeft16 x (Add32 <c.Type> c d))
24251	for {
24252		if v_0.Op != OpRotateLeft16 {
24253			break
24254		}
24255		c := v_0.Args[1]
24256		x := v_0.Args[0]
24257		d := v_1
24258		if !(c.Type.Size() == 4 && d.Type.Size() == 4) {
24259			break
24260		}
24261		v.reset(OpRotateLeft16)
24262		v0 := b.NewValue0(v.Pos, OpAdd32, c.Type)
24263		v0.AddArg2(c, d)
24264		v.AddArg2(x, v0)
24265		return true
24266	}
24267	// match: (RotateLeft16 (RotateLeft16 x c) d)
24268	// cond: c.Type.Size() == 2 && d.Type.Size() == 2
24269	// result: (RotateLeft16 x (Add16 <c.Type> c d))
24270	for {
24271		if v_0.Op != OpRotateLeft16 {
24272			break
24273		}
24274		c := v_0.Args[1]
24275		x := v_0.Args[0]
24276		d := v_1
24277		if !(c.Type.Size() == 2 && d.Type.Size() == 2) {
24278			break
24279		}
24280		v.reset(OpRotateLeft16)
24281		v0 := b.NewValue0(v.Pos, OpAdd16, c.Type)
24282		v0.AddArg2(c, d)
24283		v.AddArg2(x, v0)
24284		return true
24285	}
24286	// match: (RotateLeft16 (RotateLeft16 x c) d)
24287	// cond: c.Type.Size() == 1 && d.Type.Size() == 1
24288	// result: (RotateLeft16 x (Add8 <c.Type> c d))
24289	for {
24290		if v_0.Op != OpRotateLeft16 {
24291			break
24292		}
24293		c := v_0.Args[1]
24294		x := v_0.Args[0]
24295		d := v_1
24296		if !(c.Type.Size() == 1 && d.Type.Size() == 1) {
24297			break
24298		}
24299		v.reset(OpRotateLeft16)
24300		v0 := b.NewValue0(v.Pos, OpAdd8, c.Type)
24301		v0.AddArg2(c, d)
24302		v.AddArg2(x, v0)
24303		return true
24304	}
24305	return false
24306}
24307func rewriteValuegeneric_OpRotateLeft32(v *Value) bool {
24308	v_1 := v.Args[1]
24309	v_0 := v.Args[0]
24310	b := v.Block
24311	config := b.Func.Config
24312	// match: (RotateLeft32 x (Const32 [c]))
24313	// cond: c%32 == 0
24314	// result: x
24315	for {
24316		x := v_0
24317		if v_1.Op != OpConst32 {
24318			break
24319		}
24320		c := auxIntToInt32(v_1.AuxInt)
24321		if !(c%32 == 0) {
24322			break
24323		}
24324		v.copyOf(x)
24325		return true
24326	}
24327	// match: (RotateLeft32 x (And64 y (Const64 [c])))
24328	// cond: c&31 == 31
24329	// result: (RotateLeft32 x y)
24330	for {
24331		x := v_0
24332		if v_1.Op != OpAnd64 {
24333			break
24334		}
24335		_ = v_1.Args[1]
24336		v_1_0 := v_1.Args[0]
24337		v_1_1 := v_1.Args[1]
24338		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24339			y := v_1_0
24340			if v_1_1.Op != OpConst64 {
24341				continue
24342			}
24343			c := auxIntToInt64(v_1_1.AuxInt)
24344			if !(c&31 == 31) {
24345				continue
24346			}
24347			v.reset(OpRotateLeft32)
24348			v.AddArg2(x, y)
24349			return true
24350		}
24351		break
24352	}
24353	// match: (RotateLeft32 x (And32 y (Const32 [c])))
24354	// cond: c&31 == 31
24355	// result: (RotateLeft32 x y)
24356	for {
24357		x := v_0
24358		if v_1.Op != OpAnd32 {
24359			break
24360		}
24361		_ = v_1.Args[1]
24362		v_1_0 := v_1.Args[0]
24363		v_1_1 := v_1.Args[1]
24364		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24365			y := v_1_0
24366			if v_1_1.Op != OpConst32 {
24367				continue
24368			}
24369			c := auxIntToInt32(v_1_1.AuxInt)
24370			if !(c&31 == 31) {
24371				continue
24372			}
24373			v.reset(OpRotateLeft32)
24374			v.AddArg2(x, y)
24375			return true
24376		}
24377		break
24378	}
24379	// match: (RotateLeft32 x (And16 y (Const16 [c])))
24380	// cond: c&31 == 31
24381	// result: (RotateLeft32 x y)
24382	for {
24383		x := v_0
24384		if v_1.Op != OpAnd16 {
24385			break
24386		}
24387		_ = v_1.Args[1]
24388		v_1_0 := v_1.Args[0]
24389		v_1_1 := v_1.Args[1]
24390		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24391			y := v_1_0
24392			if v_1_1.Op != OpConst16 {
24393				continue
24394			}
24395			c := auxIntToInt16(v_1_1.AuxInt)
24396			if !(c&31 == 31) {
24397				continue
24398			}
24399			v.reset(OpRotateLeft32)
24400			v.AddArg2(x, y)
24401			return true
24402		}
24403		break
24404	}
24405	// match: (RotateLeft32 x (And8 y (Const8 [c])))
24406	// cond: c&31 == 31
24407	// result: (RotateLeft32 x y)
24408	for {
24409		x := v_0
24410		if v_1.Op != OpAnd8 {
24411			break
24412		}
24413		_ = v_1.Args[1]
24414		v_1_0 := v_1.Args[0]
24415		v_1_1 := v_1.Args[1]
24416		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24417			y := v_1_0
24418			if v_1_1.Op != OpConst8 {
24419				continue
24420			}
24421			c := auxIntToInt8(v_1_1.AuxInt)
24422			if !(c&31 == 31) {
24423				continue
24424			}
24425			v.reset(OpRotateLeft32)
24426			v.AddArg2(x, y)
24427			return true
24428		}
24429		break
24430	}
24431	// match: (RotateLeft32 x (Neg64 (And64 y (Const64 [c]))))
24432	// cond: c&31 == 31
24433	// result: (RotateLeft32 x (Neg64 <y.Type> y))
24434	for {
24435		x := v_0
24436		if v_1.Op != OpNeg64 {
24437			break
24438		}
24439		v_1_0 := v_1.Args[0]
24440		if v_1_0.Op != OpAnd64 {
24441			break
24442		}
24443		_ = v_1_0.Args[1]
24444		v_1_0_0 := v_1_0.Args[0]
24445		v_1_0_1 := v_1_0.Args[1]
24446		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
24447			y := v_1_0_0
24448			if v_1_0_1.Op != OpConst64 {
24449				continue
24450			}
24451			c := auxIntToInt64(v_1_0_1.AuxInt)
24452			if !(c&31 == 31) {
24453				continue
24454			}
24455			v.reset(OpRotateLeft32)
24456			v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
24457			v0.AddArg(y)
24458			v.AddArg2(x, v0)
24459			return true
24460		}
24461		break
24462	}
24463	// match: (RotateLeft32 x (Neg32 (And32 y (Const32 [c]))))
24464	// cond: c&31 == 31
24465	// result: (RotateLeft32 x (Neg32 <y.Type> y))
24466	for {
24467		x := v_0
24468		if v_1.Op != OpNeg32 {
24469			break
24470		}
24471		v_1_0 := v_1.Args[0]
24472		if v_1_0.Op != OpAnd32 {
24473			break
24474		}
24475		_ = v_1_0.Args[1]
24476		v_1_0_0 := v_1_0.Args[0]
24477		v_1_0_1 := v_1_0.Args[1]
24478		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
24479			y := v_1_0_0
24480			if v_1_0_1.Op != OpConst32 {
24481				continue
24482			}
24483			c := auxIntToInt32(v_1_0_1.AuxInt)
24484			if !(c&31 == 31) {
24485				continue
24486			}
24487			v.reset(OpRotateLeft32)
24488			v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
24489			v0.AddArg(y)
24490			v.AddArg2(x, v0)
24491			return true
24492		}
24493		break
24494	}
24495	// match: (RotateLeft32 x (Neg16 (And16 y (Const16 [c]))))
24496	// cond: c&31 == 31
24497	// result: (RotateLeft32 x (Neg16 <y.Type> y))
24498	for {
24499		x := v_0
24500		if v_1.Op != OpNeg16 {
24501			break
24502		}
24503		v_1_0 := v_1.Args[0]
24504		if v_1_0.Op != OpAnd16 {
24505			break
24506		}
24507		_ = v_1_0.Args[1]
24508		v_1_0_0 := v_1_0.Args[0]
24509		v_1_0_1 := v_1_0.Args[1]
24510		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
24511			y := v_1_0_0
24512			if v_1_0_1.Op != OpConst16 {
24513				continue
24514			}
24515			c := auxIntToInt16(v_1_0_1.AuxInt)
24516			if !(c&31 == 31) {
24517				continue
24518			}
24519			v.reset(OpRotateLeft32)
24520			v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
24521			v0.AddArg(y)
24522			v.AddArg2(x, v0)
24523			return true
24524		}
24525		break
24526	}
24527	// match: (RotateLeft32 x (Neg8 (And8 y (Const8 [c]))))
24528	// cond: c&31 == 31
24529	// result: (RotateLeft32 x (Neg8 <y.Type> y))
24530	for {
24531		x := v_0
24532		if v_1.Op != OpNeg8 {
24533			break
24534		}
24535		v_1_0 := v_1.Args[0]
24536		if v_1_0.Op != OpAnd8 {
24537			break
24538		}
24539		_ = v_1_0.Args[1]
24540		v_1_0_0 := v_1_0.Args[0]
24541		v_1_0_1 := v_1_0.Args[1]
24542		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
24543			y := v_1_0_0
24544			if v_1_0_1.Op != OpConst8 {
24545				continue
24546			}
24547			c := auxIntToInt8(v_1_0_1.AuxInt)
24548			if !(c&31 == 31) {
24549				continue
24550			}
24551			v.reset(OpRotateLeft32)
24552			v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
24553			v0.AddArg(y)
24554			v.AddArg2(x, v0)
24555			return true
24556		}
24557		break
24558	}
24559	// match: (RotateLeft32 x (Add64 y (Const64 [c])))
24560	// cond: c&31 == 0
24561	// result: (RotateLeft32 x y)
24562	for {
24563		x := v_0
24564		if v_1.Op != OpAdd64 {
24565			break
24566		}
24567		_ = v_1.Args[1]
24568		v_1_0 := v_1.Args[0]
24569		v_1_1 := v_1.Args[1]
24570		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24571			y := v_1_0
24572			if v_1_1.Op != OpConst64 {
24573				continue
24574			}
24575			c := auxIntToInt64(v_1_1.AuxInt)
24576			if !(c&31 == 0) {
24577				continue
24578			}
24579			v.reset(OpRotateLeft32)
24580			v.AddArg2(x, y)
24581			return true
24582		}
24583		break
24584	}
24585	// match: (RotateLeft32 x (Add32 y (Const32 [c])))
24586	// cond: c&31 == 0
24587	// result: (RotateLeft32 x y)
24588	for {
24589		x := v_0
24590		if v_1.Op != OpAdd32 {
24591			break
24592		}
24593		_ = v_1.Args[1]
24594		v_1_0 := v_1.Args[0]
24595		v_1_1 := v_1.Args[1]
24596		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24597			y := v_1_0
24598			if v_1_1.Op != OpConst32 {
24599				continue
24600			}
24601			c := auxIntToInt32(v_1_1.AuxInt)
24602			if !(c&31 == 0) {
24603				continue
24604			}
24605			v.reset(OpRotateLeft32)
24606			v.AddArg2(x, y)
24607			return true
24608		}
24609		break
24610	}
24611	// match: (RotateLeft32 x (Add16 y (Const16 [c])))
24612	// cond: c&31 == 0
24613	// result: (RotateLeft32 x y)
24614	for {
24615		x := v_0
24616		if v_1.Op != OpAdd16 {
24617			break
24618		}
24619		_ = v_1.Args[1]
24620		v_1_0 := v_1.Args[0]
24621		v_1_1 := v_1.Args[1]
24622		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24623			y := v_1_0
24624			if v_1_1.Op != OpConst16 {
24625				continue
24626			}
24627			c := auxIntToInt16(v_1_1.AuxInt)
24628			if !(c&31 == 0) {
24629				continue
24630			}
24631			v.reset(OpRotateLeft32)
24632			v.AddArg2(x, y)
24633			return true
24634		}
24635		break
24636	}
24637	// match: (RotateLeft32 x (Add8 y (Const8 [c])))
24638	// cond: c&31 == 0
24639	// result: (RotateLeft32 x y)
24640	for {
24641		x := v_0
24642		if v_1.Op != OpAdd8 {
24643			break
24644		}
24645		_ = v_1.Args[1]
24646		v_1_0 := v_1.Args[0]
24647		v_1_1 := v_1.Args[1]
24648		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24649			y := v_1_0
24650			if v_1_1.Op != OpConst8 {
24651				continue
24652			}
24653			c := auxIntToInt8(v_1_1.AuxInt)
24654			if !(c&31 == 0) {
24655				continue
24656			}
24657			v.reset(OpRotateLeft32)
24658			v.AddArg2(x, y)
24659			return true
24660		}
24661		break
24662	}
24663	// match: (RotateLeft32 x (Sub64 (Const64 [c]) y))
24664	// cond: c&31 == 0
24665	// result: (RotateLeft32 x (Neg64 <y.Type> y))
24666	for {
24667		x := v_0
24668		if v_1.Op != OpSub64 {
24669			break
24670		}
24671		y := v_1.Args[1]
24672		v_1_0 := v_1.Args[0]
24673		if v_1_0.Op != OpConst64 {
24674			break
24675		}
24676		c := auxIntToInt64(v_1_0.AuxInt)
24677		if !(c&31 == 0) {
24678			break
24679		}
24680		v.reset(OpRotateLeft32)
24681		v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
24682		v0.AddArg(y)
24683		v.AddArg2(x, v0)
24684		return true
24685	}
24686	// match: (RotateLeft32 x (Sub32 (Const32 [c]) y))
24687	// cond: c&31 == 0
24688	// result: (RotateLeft32 x (Neg32 <y.Type> y))
24689	for {
24690		x := v_0
24691		if v_1.Op != OpSub32 {
24692			break
24693		}
24694		y := v_1.Args[1]
24695		v_1_0 := v_1.Args[0]
24696		if v_1_0.Op != OpConst32 {
24697			break
24698		}
24699		c := auxIntToInt32(v_1_0.AuxInt)
24700		if !(c&31 == 0) {
24701			break
24702		}
24703		v.reset(OpRotateLeft32)
24704		v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
24705		v0.AddArg(y)
24706		v.AddArg2(x, v0)
24707		return true
24708	}
24709	// match: (RotateLeft32 x (Sub16 (Const16 [c]) y))
24710	// cond: c&31 == 0
24711	// result: (RotateLeft32 x (Neg16 <y.Type> y))
24712	for {
24713		x := v_0
24714		if v_1.Op != OpSub16 {
24715			break
24716		}
24717		y := v_1.Args[1]
24718		v_1_0 := v_1.Args[0]
24719		if v_1_0.Op != OpConst16 {
24720			break
24721		}
24722		c := auxIntToInt16(v_1_0.AuxInt)
24723		if !(c&31 == 0) {
24724			break
24725		}
24726		v.reset(OpRotateLeft32)
24727		v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
24728		v0.AddArg(y)
24729		v.AddArg2(x, v0)
24730		return true
24731	}
24732	// match: (RotateLeft32 x (Sub8 (Const8 [c]) y))
24733	// cond: c&31 == 0
24734	// result: (RotateLeft32 x (Neg8 <y.Type> y))
24735	for {
24736		x := v_0
24737		if v_1.Op != OpSub8 {
24738			break
24739		}
24740		y := v_1.Args[1]
24741		v_1_0 := v_1.Args[0]
24742		if v_1_0.Op != OpConst8 {
24743			break
24744		}
24745		c := auxIntToInt8(v_1_0.AuxInt)
24746		if !(c&31 == 0) {
24747			break
24748		}
24749		v.reset(OpRotateLeft32)
24750		v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
24751		v0.AddArg(y)
24752		v.AddArg2(x, v0)
24753		return true
24754	}
24755	// match: (RotateLeft32 x (Const64 <t> [c]))
24756	// cond: config.PtrSize == 4
24757	// result: (RotateLeft32 x (Const32 <t> [int32(c)]))
24758	for {
24759		x := v_0
24760		if v_1.Op != OpConst64 {
24761			break
24762		}
24763		t := v_1.Type
24764		c := auxIntToInt64(v_1.AuxInt)
24765		if !(config.PtrSize == 4) {
24766			break
24767		}
24768		v.reset(OpRotateLeft32)
24769		v0 := b.NewValue0(v.Pos, OpConst32, t)
24770		v0.AuxInt = int32ToAuxInt(int32(c))
24771		v.AddArg2(x, v0)
24772		return true
24773	}
24774	// match: (RotateLeft32 (RotateLeft32 x c) d)
24775	// cond: c.Type.Size() == 8 && d.Type.Size() == 8
24776	// result: (RotateLeft32 x (Add64 <c.Type> c d))
24777	for {
24778		if v_0.Op != OpRotateLeft32 {
24779			break
24780		}
24781		c := v_0.Args[1]
24782		x := v_0.Args[0]
24783		d := v_1
24784		if !(c.Type.Size() == 8 && d.Type.Size() == 8) {
24785			break
24786		}
24787		v.reset(OpRotateLeft32)
24788		v0 := b.NewValue0(v.Pos, OpAdd64, c.Type)
24789		v0.AddArg2(c, d)
24790		v.AddArg2(x, v0)
24791		return true
24792	}
24793	// match: (RotateLeft32 (RotateLeft32 x c) d)
24794	// cond: c.Type.Size() == 4 && d.Type.Size() == 4
24795	// result: (RotateLeft32 x (Add32 <c.Type> c d))
24796	for {
24797		if v_0.Op != OpRotateLeft32 {
24798			break
24799		}
24800		c := v_0.Args[1]
24801		x := v_0.Args[0]
24802		d := v_1
24803		if !(c.Type.Size() == 4 && d.Type.Size() == 4) {
24804			break
24805		}
24806		v.reset(OpRotateLeft32)
24807		v0 := b.NewValue0(v.Pos, OpAdd32, c.Type)
24808		v0.AddArg2(c, d)
24809		v.AddArg2(x, v0)
24810		return true
24811	}
24812	// match: (RotateLeft32 (RotateLeft32 x c) d)
24813	// cond: c.Type.Size() == 2 && d.Type.Size() == 2
24814	// result: (RotateLeft32 x (Add16 <c.Type> c d))
24815	for {
24816		if v_0.Op != OpRotateLeft32 {
24817			break
24818		}
24819		c := v_0.Args[1]
24820		x := v_0.Args[0]
24821		d := v_1
24822		if !(c.Type.Size() == 2 && d.Type.Size() == 2) {
24823			break
24824		}
24825		v.reset(OpRotateLeft32)
24826		v0 := b.NewValue0(v.Pos, OpAdd16, c.Type)
24827		v0.AddArg2(c, d)
24828		v.AddArg2(x, v0)
24829		return true
24830	}
24831	// match: (RotateLeft32 (RotateLeft32 x c) d)
24832	// cond: c.Type.Size() == 1 && d.Type.Size() == 1
24833	// result: (RotateLeft32 x (Add8 <c.Type> c d))
24834	for {
24835		if v_0.Op != OpRotateLeft32 {
24836			break
24837		}
24838		c := v_0.Args[1]
24839		x := v_0.Args[0]
24840		d := v_1
24841		if !(c.Type.Size() == 1 && d.Type.Size() == 1) {
24842			break
24843		}
24844		v.reset(OpRotateLeft32)
24845		v0 := b.NewValue0(v.Pos, OpAdd8, c.Type)
24846		v0.AddArg2(c, d)
24847		v.AddArg2(x, v0)
24848		return true
24849	}
24850	return false
24851}
24852func rewriteValuegeneric_OpRotateLeft64(v *Value) bool {
24853	v_1 := v.Args[1]
24854	v_0 := v.Args[0]
24855	b := v.Block
24856	config := b.Func.Config
24857	// match: (RotateLeft64 x (Const64 [c]))
24858	// cond: c%64 == 0
24859	// result: x
24860	for {
24861		x := v_0
24862		if v_1.Op != OpConst64 {
24863			break
24864		}
24865		c := auxIntToInt64(v_1.AuxInt)
24866		if !(c%64 == 0) {
24867			break
24868		}
24869		v.copyOf(x)
24870		return true
24871	}
24872	// match: (RotateLeft64 x (And64 y (Const64 [c])))
24873	// cond: c&63 == 63
24874	// result: (RotateLeft64 x y)
24875	for {
24876		x := v_0
24877		if v_1.Op != OpAnd64 {
24878			break
24879		}
24880		_ = v_1.Args[1]
24881		v_1_0 := v_1.Args[0]
24882		v_1_1 := v_1.Args[1]
24883		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24884			y := v_1_0
24885			if v_1_1.Op != OpConst64 {
24886				continue
24887			}
24888			c := auxIntToInt64(v_1_1.AuxInt)
24889			if !(c&63 == 63) {
24890				continue
24891			}
24892			v.reset(OpRotateLeft64)
24893			v.AddArg2(x, y)
24894			return true
24895		}
24896		break
24897	}
24898	// match: (RotateLeft64 x (And32 y (Const32 [c])))
24899	// cond: c&63 == 63
24900	// result: (RotateLeft64 x y)
24901	for {
24902		x := v_0
24903		if v_1.Op != OpAnd32 {
24904			break
24905		}
24906		_ = v_1.Args[1]
24907		v_1_0 := v_1.Args[0]
24908		v_1_1 := v_1.Args[1]
24909		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24910			y := v_1_0
24911			if v_1_1.Op != OpConst32 {
24912				continue
24913			}
24914			c := auxIntToInt32(v_1_1.AuxInt)
24915			if !(c&63 == 63) {
24916				continue
24917			}
24918			v.reset(OpRotateLeft64)
24919			v.AddArg2(x, y)
24920			return true
24921		}
24922		break
24923	}
24924	// match: (RotateLeft64 x (And16 y (Const16 [c])))
24925	// cond: c&63 == 63
24926	// result: (RotateLeft64 x y)
24927	for {
24928		x := v_0
24929		if v_1.Op != OpAnd16 {
24930			break
24931		}
24932		_ = v_1.Args[1]
24933		v_1_0 := v_1.Args[0]
24934		v_1_1 := v_1.Args[1]
24935		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24936			y := v_1_0
24937			if v_1_1.Op != OpConst16 {
24938				continue
24939			}
24940			c := auxIntToInt16(v_1_1.AuxInt)
24941			if !(c&63 == 63) {
24942				continue
24943			}
24944			v.reset(OpRotateLeft64)
24945			v.AddArg2(x, y)
24946			return true
24947		}
24948		break
24949	}
24950	// match: (RotateLeft64 x (And8 y (Const8 [c])))
24951	// cond: c&63 == 63
24952	// result: (RotateLeft64 x y)
24953	for {
24954		x := v_0
24955		if v_1.Op != OpAnd8 {
24956			break
24957		}
24958		_ = v_1.Args[1]
24959		v_1_0 := v_1.Args[0]
24960		v_1_1 := v_1.Args[1]
24961		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24962			y := v_1_0
24963			if v_1_1.Op != OpConst8 {
24964				continue
24965			}
24966			c := auxIntToInt8(v_1_1.AuxInt)
24967			if !(c&63 == 63) {
24968				continue
24969			}
24970			v.reset(OpRotateLeft64)
24971			v.AddArg2(x, y)
24972			return true
24973		}
24974		break
24975	}
24976	// match: (RotateLeft64 x (Neg64 (And64 y (Const64 [c]))))
24977	// cond: c&63 == 63
24978	// result: (RotateLeft64 x (Neg64 <y.Type> y))
24979	for {
24980		x := v_0
24981		if v_1.Op != OpNeg64 {
24982			break
24983		}
24984		v_1_0 := v_1.Args[0]
24985		if v_1_0.Op != OpAnd64 {
24986			break
24987		}
24988		_ = v_1_0.Args[1]
24989		v_1_0_0 := v_1_0.Args[0]
24990		v_1_0_1 := v_1_0.Args[1]
24991		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
24992			y := v_1_0_0
24993			if v_1_0_1.Op != OpConst64 {
24994				continue
24995			}
24996			c := auxIntToInt64(v_1_0_1.AuxInt)
24997			if !(c&63 == 63) {
24998				continue
24999			}
25000			v.reset(OpRotateLeft64)
25001			v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
25002			v0.AddArg(y)
25003			v.AddArg2(x, v0)
25004			return true
25005		}
25006		break
25007	}
25008	// match: (RotateLeft64 x (Neg32 (And32 y (Const32 [c]))))
25009	// cond: c&63 == 63
25010	// result: (RotateLeft64 x (Neg32 <y.Type> y))
25011	for {
25012		x := v_0
25013		if v_1.Op != OpNeg32 {
25014			break
25015		}
25016		v_1_0 := v_1.Args[0]
25017		if v_1_0.Op != OpAnd32 {
25018			break
25019		}
25020		_ = v_1_0.Args[1]
25021		v_1_0_0 := v_1_0.Args[0]
25022		v_1_0_1 := v_1_0.Args[1]
25023		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
25024			y := v_1_0_0
25025			if v_1_0_1.Op != OpConst32 {
25026				continue
25027			}
25028			c := auxIntToInt32(v_1_0_1.AuxInt)
25029			if !(c&63 == 63) {
25030				continue
25031			}
25032			v.reset(OpRotateLeft64)
25033			v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
25034			v0.AddArg(y)
25035			v.AddArg2(x, v0)
25036			return true
25037		}
25038		break
25039	}
25040	// match: (RotateLeft64 x (Neg16 (And16 y (Const16 [c]))))
25041	// cond: c&63 == 63
25042	// result: (RotateLeft64 x (Neg16 <y.Type> y))
25043	for {
25044		x := v_0
25045		if v_1.Op != OpNeg16 {
25046			break
25047		}
25048		v_1_0 := v_1.Args[0]
25049		if v_1_0.Op != OpAnd16 {
25050			break
25051		}
25052		_ = v_1_0.Args[1]
25053		v_1_0_0 := v_1_0.Args[0]
25054		v_1_0_1 := v_1_0.Args[1]
25055		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
25056			y := v_1_0_0
25057			if v_1_0_1.Op != OpConst16 {
25058				continue
25059			}
25060			c := auxIntToInt16(v_1_0_1.AuxInt)
25061			if !(c&63 == 63) {
25062				continue
25063			}
25064			v.reset(OpRotateLeft64)
25065			v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
25066			v0.AddArg(y)
25067			v.AddArg2(x, v0)
25068			return true
25069		}
25070		break
25071	}
25072	// match: (RotateLeft64 x (Neg8 (And8 y (Const8 [c]))))
25073	// cond: c&63 == 63
25074	// result: (RotateLeft64 x (Neg8 <y.Type> y))
25075	for {
25076		x := v_0
25077		if v_1.Op != OpNeg8 {
25078			break
25079		}
25080		v_1_0 := v_1.Args[0]
25081		if v_1_0.Op != OpAnd8 {
25082			break
25083		}
25084		_ = v_1_0.Args[1]
25085		v_1_0_0 := v_1_0.Args[0]
25086		v_1_0_1 := v_1_0.Args[1]
25087		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
25088			y := v_1_0_0
25089			if v_1_0_1.Op != OpConst8 {
25090				continue
25091			}
25092			c := auxIntToInt8(v_1_0_1.AuxInt)
25093			if !(c&63 == 63) {
25094				continue
25095			}
25096			v.reset(OpRotateLeft64)
25097			v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
25098			v0.AddArg(y)
25099			v.AddArg2(x, v0)
25100			return true
25101		}
25102		break
25103	}
25104	// match: (RotateLeft64 x (Add64 y (Const64 [c])))
25105	// cond: c&63 == 0
25106	// result: (RotateLeft64 x y)
25107	for {
25108		x := v_0
25109		if v_1.Op != OpAdd64 {
25110			break
25111		}
25112		_ = v_1.Args[1]
25113		v_1_0 := v_1.Args[0]
25114		v_1_1 := v_1.Args[1]
25115		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
25116			y := v_1_0
25117			if v_1_1.Op != OpConst64 {
25118				continue
25119			}
25120			c := auxIntToInt64(v_1_1.AuxInt)
25121			if !(c&63 == 0) {
25122				continue
25123			}
25124			v.reset(OpRotateLeft64)
25125			v.AddArg2(x, y)
25126			return true
25127		}
25128		break
25129	}
25130	// match: (RotateLeft64 x (Add32 y (Const32 [c])))
25131	// cond: c&63 == 0
25132	// result: (RotateLeft64 x y)
25133	for {
25134		x := v_0
25135		if v_1.Op != OpAdd32 {
25136			break
25137		}
25138		_ = v_1.Args[1]
25139		v_1_0 := v_1.Args[0]
25140		v_1_1 := v_1.Args[1]
25141		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
25142			y := v_1_0
25143			if v_1_1.Op != OpConst32 {
25144				continue
25145			}
25146			c := auxIntToInt32(v_1_1.AuxInt)
25147			if !(c&63 == 0) {
25148				continue
25149			}
25150			v.reset(OpRotateLeft64)
25151			v.AddArg2(x, y)
25152			return true
25153		}
25154		break
25155	}
25156	// match: (RotateLeft64 x (Add16 y (Const16 [c])))
25157	// cond: c&63 == 0
25158	// result: (RotateLeft64 x y)
25159	for {
25160		x := v_0
25161		if v_1.Op != OpAdd16 {
25162			break
25163		}
25164		_ = v_1.Args[1]
25165		v_1_0 := v_1.Args[0]
25166		v_1_1 := v_1.Args[1]
25167		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
25168			y := v_1_0
25169			if v_1_1.Op != OpConst16 {
25170				continue
25171			}
25172			c := auxIntToInt16(v_1_1.AuxInt)
25173			if !(c&63 == 0) {
25174				continue
25175			}
25176			v.reset(OpRotateLeft64)
25177			v.AddArg2(x, y)
25178			return true
25179		}
25180		break
25181	}
25182	// match: (RotateLeft64 x (Add8 y (Const8 [c])))
25183	// cond: c&63 == 0
25184	// result: (RotateLeft64 x y)
25185	for {
25186		x := v_0
25187		if v_1.Op != OpAdd8 {
25188			break
25189		}
25190		_ = v_1.Args[1]
25191		v_1_0 := v_1.Args[0]
25192		v_1_1 := v_1.Args[1]
25193		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
25194			y := v_1_0
25195			if v_1_1.Op != OpConst8 {
25196				continue
25197			}
25198			c := auxIntToInt8(v_1_1.AuxInt)
25199			if !(c&63 == 0) {
25200				continue
25201			}
25202			v.reset(OpRotateLeft64)
25203			v.AddArg2(x, y)
25204			return true
25205		}
25206		break
25207	}
25208	// match: (RotateLeft64 x (Sub64 (Const64 [c]) y))
25209	// cond: c&63 == 0
25210	// result: (RotateLeft64 x (Neg64 <y.Type> y))
25211	for {
25212		x := v_0
25213		if v_1.Op != OpSub64 {
25214			break
25215		}
25216		y := v_1.Args[1]
25217		v_1_0 := v_1.Args[0]
25218		if v_1_0.Op != OpConst64 {
25219			break
25220		}
25221		c := auxIntToInt64(v_1_0.AuxInt)
25222		if !(c&63 == 0) {
25223			break
25224		}
25225		v.reset(OpRotateLeft64)
25226		v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
25227		v0.AddArg(y)
25228		v.AddArg2(x, v0)
25229		return true
25230	}
25231	// match: (RotateLeft64 x (Sub32 (Const32 [c]) y))
25232	// cond: c&63 == 0
25233	// result: (RotateLeft64 x (Neg32 <y.Type> y))
25234	for {
25235		x := v_0
25236		if v_1.Op != OpSub32 {
25237			break
25238		}
25239		y := v_1.Args[1]
25240		v_1_0 := v_1.Args[0]
25241		if v_1_0.Op != OpConst32 {
25242			break
25243		}
25244		c := auxIntToInt32(v_1_0.AuxInt)
25245		if !(c&63 == 0) {
25246			break
25247		}
25248		v.reset(OpRotateLeft64)
25249		v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
25250		v0.AddArg(y)
25251		v.AddArg2(x, v0)
25252		return true
25253	}
25254	// match: (RotateLeft64 x (Sub16 (Const16 [c]) y))
25255	// cond: c&63 == 0
25256	// result: (RotateLeft64 x (Neg16 <y.Type> y))
25257	for {
25258		x := v_0
25259		if v_1.Op != OpSub16 {
25260			break
25261		}
25262		y := v_1.Args[1]
25263		v_1_0 := v_1.Args[0]
25264		if v_1_0.Op != OpConst16 {
25265			break
25266		}
25267		c := auxIntToInt16(v_1_0.AuxInt)
25268		if !(c&63 == 0) {
25269			break
25270		}
25271		v.reset(OpRotateLeft64)
25272		v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
25273		v0.AddArg(y)
25274		v.AddArg2(x, v0)
25275		return true
25276	}
25277	// match: (RotateLeft64 x (Sub8 (Const8 [c]) y))
25278	// cond: c&63 == 0
25279	// result: (RotateLeft64 x (Neg8 <y.Type> y))
25280	for {
25281		x := v_0
25282		if v_1.Op != OpSub8 {
25283			break
25284		}
25285		y := v_1.Args[1]
25286		v_1_0 := v_1.Args[0]
25287		if v_1_0.Op != OpConst8 {
25288			break
25289		}
25290		c := auxIntToInt8(v_1_0.AuxInt)
25291		if !(c&63 == 0) {
25292			break
25293		}
25294		v.reset(OpRotateLeft64)
25295		v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
25296		v0.AddArg(y)
25297		v.AddArg2(x, v0)
25298		return true
25299	}
25300	// match: (RotateLeft64 x (Const64 <t> [c]))
25301	// cond: config.PtrSize == 4
25302	// result: (RotateLeft64 x (Const32 <t> [int32(c)]))
25303	for {
25304		x := v_0
25305		if v_1.Op != OpConst64 {
25306			break
25307		}
25308		t := v_1.Type
25309		c := auxIntToInt64(v_1.AuxInt)
25310		if !(config.PtrSize == 4) {
25311			break
25312		}
25313		v.reset(OpRotateLeft64)
25314		v0 := b.NewValue0(v.Pos, OpConst32, t)
25315		v0.AuxInt = int32ToAuxInt(int32(c))
25316		v.AddArg2(x, v0)
25317		return true
25318	}
25319	// match: (RotateLeft64 (RotateLeft64 x c) d)
25320	// cond: c.Type.Size() == 8 && d.Type.Size() == 8
25321	// result: (RotateLeft64 x (Add64 <c.Type> c d))
25322	for {
25323		if v_0.Op != OpRotateLeft64 {
25324			break
25325		}
25326		c := v_0.Args[1]
25327		x := v_0.Args[0]
25328		d := v_1
25329		if !(c.Type.Size() == 8 && d.Type.Size() == 8) {
25330			break
25331		}
25332		v.reset(OpRotateLeft64)
25333		v0 := b.NewValue0(v.Pos, OpAdd64, c.Type)
25334		v0.AddArg2(c, d)
25335		v.AddArg2(x, v0)
25336		return true
25337	}
25338	// match: (RotateLeft64 (RotateLeft64 x c) d)
25339	// cond: c.Type.Size() == 4 && d.Type.Size() == 4
25340	// result: (RotateLeft64 x (Add32 <c.Type> c d))
25341	for {
25342		if v_0.Op != OpRotateLeft64 {
25343			break
25344		}
25345		c := v_0.Args[1]
25346		x := v_0.Args[0]
25347		d := v_1
25348		if !(c.Type.Size() == 4 && d.Type.Size() == 4) {
25349			break
25350		}
25351		v.reset(OpRotateLeft64)
25352		v0 := b.NewValue0(v.Pos, OpAdd32, c.Type)
25353		v0.AddArg2(c, d)
25354		v.AddArg2(x, v0)
25355		return true
25356	}
25357	// match: (RotateLeft64 (RotateLeft64 x c) d)
25358	// cond: c.Type.Size() == 2 && d.Type.Size() == 2
25359	// result: (RotateLeft64 x (Add16 <c.Type> c d))
25360	for {
25361		if v_0.Op != OpRotateLeft64 {
25362			break
25363		}
25364		c := v_0.Args[1]
25365		x := v_0.Args[0]
25366		d := v_1
25367		if !(c.Type.Size() == 2 && d.Type.Size() == 2) {
25368			break
25369		}
25370		v.reset(OpRotateLeft64)
25371		v0 := b.NewValue0(v.Pos, OpAdd16, c.Type)
25372		v0.AddArg2(c, d)
25373		v.AddArg2(x, v0)
25374		return true
25375	}
25376	// match: (RotateLeft64 (RotateLeft64 x c) d)
25377	// cond: c.Type.Size() == 1 && d.Type.Size() == 1
25378	// result: (RotateLeft64 x (Add8 <c.Type> c d))
25379	for {
25380		if v_0.Op != OpRotateLeft64 {
25381			break
25382		}
25383		c := v_0.Args[1]
25384		x := v_0.Args[0]
25385		d := v_1
25386		if !(c.Type.Size() == 1 && d.Type.Size() == 1) {
25387			break
25388		}
25389		v.reset(OpRotateLeft64)
25390		v0 := b.NewValue0(v.Pos, OpAdd8, c.Type)
25391		v0.AddArg2(c, d)
25392		v.AddArg2(x, v0)
25393		return true
25394	}
25395	return false
25396}
25397func rewriteValuegeneric_OpRotateLeft8(v *Value) bool {
25398	v_1 := v.Args[1]
25399	v_0 := v.Args[0]
25400	b := v.Block
25401	config := b.Func.Config
25402	// match: (RotateLeft8 x (Const8 [c]))
25403	// cond: c%8 == 0
25404	// result: x
25405	for {
25406		x := v_0
25407		if v_1.Op != OpConst8 {
25408			break
25409		}
25410		c := auxIntToInt8(v_1.AuxInt)
25411		if !(c%8 == 0) {
25412			break
25413		}
25414		v.copyOf(x)
25415		return true
25416	}
25417	// match: (RotateLeft8 x (And64 y (Const64 [c])))
25418	// cond: c&7 == 7
25419	// result: (RotateLeft8 x y)
25420	for {
25421		x := v_0
25422		if v_1.Op != OpAnd64 {
25423			break
25424		}
25425		_ = v_1.Args[1]
25426		v_1_0 := v_1.Args[0]
25427		v_1_1 := v_1.Args[1]
25428		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
25429			y := v_1_0
25430			if v_1_1.Op != OpConst64 {
25431				continue
25432			}
25433			c := auxIntToInt64(v_1_1.AuxInt)
25434			if !(c&7 == 7) {
25435				continue
25436			}
25437			v.reset(OpRotateLeft8)
25438			v.AddArg2(x, y)
25439			return true
25440		}
25441		break
25442	}
25443	// match: (RotateLeft8 x (And32 y (Const32 [c])))
25444	// cond: c&7 == 7
25445	// result: (RotateLeft8 x y)
25446	for {
25447		x := v_0
25448		if v_1.Op != OpAnd32 {
25449			break
25450		}
25451		_ = v_1.Args[1]
25452		v_1_0 := v_1.Args[0]
25453		v_1_1 := v_1.Args[1]
25454		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
25455			y := v_1_0
25456			if v_1_1.Op != OpConst32 {
25457				continue
25458			}
25459			c := auxIntToInt32(v_1_1.AuxInt)
25460			if !(c&7 == 7) {
25461				continue
25462			}
25463			v.reset(OpRotateLeft8)
25464			v.AddArg2(x, y)
25465			return true
25466		}
25467		break
25468	}
25469	// match: (RotateLeft8 x (And16 y (Const16 [c])))
25470	// cond: c&7 == 7
25471	// result: (RotateLeft8 x y)
25472	for {
25473		x := v_0
25474		if v_1.Op != OpAnd16 {
25475			break
25476		}
25477		_ = v_1.Args[1]
25478		v_1_0 := v_1.Args[0]
25479		v_1_1 := v_1.Args[1]
25480		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
25481			y := v_1_0
25482			if v_1_1.Op != OpConst16 {
25483				continue
25484			}
25485			c := auxIntToInt16(v_1_1.AuxInt)
25486			if !(c&7 == 7) {
25487				continue
25488			}
25489			v.reset(OpRotateLeft8)
25490			v.AddArg2(x, y)
25491			return true
25492		}
25493		break
25494	}
25495	// match: (RotateLeft8 x (And8 y (Const8 [c])))
25496	// cond: c&7 == 7
25497	// result: (RotateLeft8 x y)
25498	for {
25499		x := v_0
25500		if v_1.Op != OpAnd8 {
25501			break
25502		}
25503		_ = v_1.Args[1]
25504		v_1_0 := v_1.Args[0]
25505		v_1_1 := v_1.Args[1]
25506		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
25507			y := v_1_0
25508			if v_1_1.Op != OpConst8 {
25509				continue
25510			}
25511			c := auxIntToInt8(v_1_1.AuxInt)
25512			if !(c&7 == 7) {
25513				continue
25514			}
25515			v.reset(OpRotateLeft8)
25516			v.AddArg2(x, y)
25517			return true
25518		}
25519		break
25520	}
25521	// match: (RotateLeft8 x (Neg64 (And64 y (Const64 [c]))))
25522	// cond: c&7 == 7
25523	// result: (RotateLeft8 x (Neg64 <y.Type> y))
25524	for {
25525		x := v_0
25526		if v_1.Op != OpNeg64 {
25527			break
25528		}
25529		v_1_0 := v_1.Args[0]
25530		if v_1_0.Op != OpAnd64 {
25531			break
25532		}
25533		_ = v_1_0.Args[1]
25534		v_1_0_0 := v_1_0.Args[0]
25535		v_1_0_1 := v_1_0.Args[1]
25536		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
25537			y := v_1_0_0
25538			if v_1_0_1.Op != OpConst64 {
25539				continue
25540			}
25541			c := auxIntToInt64(v_1_0_1.AuxInt)
25542			if !(c&7 == 7) {
25543				continue
25544			}
25545			v.reset(OpRotateLeft8)
25546			v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
25547			v0.AddArg(y)
25548			v.AddArg2(x, v0)
25549			return true
25550		}
25551		break
25552	}
25553	// match: (RotateLeft8 x (Neg32 (And32 y (Const32 [c]))))
25554	// cond: c&7 == 7
25555	// result: (RotateLeft8 x (Neg32 <y.Type> y))
25556	for {
25557		x := v_0
25558		if v_1.Op != OpNeg32 {
25559			break
25560		}
25561		v_1_0 := v_1.Args[0]
25562		if v_1_0.Op != OpAnd32 {
25563			break
25564		}
25565		_ = v_1_0.Args[1]
25566		v_1_0_0 := v_1_0.Args[0]
25567		v_1_0_1 := v_1_0.Args[1]
25568		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
25569			y := v_1_0_0
25570			if v_1_0_1.Op != OpConst32 {
25571				continue
25572			}
25573			c := auxIntToInt32(v_1_0_1.AuxInt)
25574			if !(c&7 == 7) {
25575				continue
25576			}
25577			v.reset(OpRotateLeft8)
25578			v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
25579			v0.AddArg(y)
25580			v.AddArg2(x, v0)
25581			return true
25582		}
25583		break
25584	}
25585	// match: (RotateLeft8 x (Neg16 (And16 y (Const16 [c]))))
25586	// cond: c&7 == 7
25587	// result: (RotateLeft8 x (Neg16 <y.Type> y))
25588	for {
25589		x := v_0
25590		if v_1.Op != OpNeg16 {
25591			break
25592		}
25593		v_1_0 := v_1.Args[0]
25594		if v_1_0.Op != OpAnd16 {
25595			break
25596		}
25597		_ = v_1_0.Args[1]
25598		v_1_0_0 := v_1_0.Args[0]
25599		v_1_0_1 := v_1_0.Args[1]
25600		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
25601			y := v_1_0_0
25602			if v_1_0_1.Op != OpConst16 {
25603				continue
25604			}
25605			c := auxIntToInt16(v_1_0_1.AuxInt)
25606			if !(c&7 == 7) {
25607				continue
25608			}
25609			v.reset(OpRotateLeft8)
25610			v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
25611			v0.AddArg(y)
25612			v.AddArg2(x, v0)
25613			return true
25614		}
25615		break
25616	}
25617	// match: (RotateLeft8 x (Neg8 (And8 y (Const8 [c]))))
25618	// cond: c&7 == 7
25619	// result: (RotateLeft8 x (Neg8 <y.Type> y))
25620	for {
25621		x := v_0
25622		if v_1.Op != OpNeg8 {
25623			break
25624		}
25625		v_1_0 := v_1.Args[0]
25626		if v_1_0.Op != OpAnd8 {
25627			break
25628		}
25629		_ = v_1_0.Args[1]
25630		v_1_0_0 := v_1_0.Args[0]
25631		v_1_0_1 := v_1_0.Args[1]
25632		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
25633			y := v_1_0_0
25634			if v_1_0_1.Op != OpConst8 {
25635				continue
25636			}
25637			c := auxIntToInt8(v_1_0_1.AuxInt)
25638			if !(c&7 == 7) {
25639				continue
25640			}
25641			v.reset(OpRotateLeft8)
25642			v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
25643			v0.AddArg(y)
25644			v.AddArg2(x, v0)
25645			return true
25646		}
25647		break
25648	}
25649	// match: (RotateLeft8 x (Add64 y (Const64 [c])))
25650	// cond: c&7 == 0
25651	// result: (RotateLeft8 x y)
25652	for {
25653		x := v_0
25654		if v_1.Op != OpAdd64 {
25655			break
25656		}
25657		_ = v_1.Args[1]
25658		v_1_0 := v_1.Args[0]
25659		v_1_1 := v_1.Args[1]
25660		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
25661			y := v_1_0
25662			if v_1_1.Op != OpConst64 {
25663				continue
25664			}
25665			c := auxIntToInt64(v_1_1.AuxInt)
25666			if !(c&7 == 0) {
25667				continue
25668			}
25669			v.reset(OpRotateLeft8)
25670			v.AddArg2(x, y)
25671			return true
25672		}
25673		break
25674	}
25675	// match: (RotateLeft8 x (Add32 y (Const32 [c])))
25676	// cond: c&7 == 0
25677	// result: (RotateLeft8 x y)
25678	for {
25679		x := v_0
25680		if v_1.Op != OpAdd32 {
25681			break
25682		}
25683		_ = v_1.Args[1]
25684		v_1_0 := v_1.Args[0]
25685		v_1_1 := v_1.Args[1]
25686		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
25687			y := v_1_0
25688			if v_1_1.Op != OpConst32 {
25689				continue
25690			}
25691			c := auxIntToInt32(v_1_1.AuxInt)
25692			if !(c&7 == 0) {
25693				continue
25694			}
25695			v.reset(OpRotateLeft8)
25696			v.AddArg2(x, y)
25697			return true
25698		}
25699		break
25700	}
25701	// match: (RotateLeft8 x (Add16 y (Const16 [c])))
25702	// cond: c&7 == 0
25703	// result: (RotateLeft8 x y)
25704	for {
25705		x := v_0
25706		if v_1.Op != OpAdd16 {
25707			break
25708		}
25709		_ = v_1.Args[1]
25710		v_1_0 := v_1.Args[0]
25711		v_1_1 := v_1.Args[1]
25712		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
25713			y := v_1_0
25714			if v_1_1.Op != OpConst16 {
25715				continue
25716			}
25717			c := auxIntToInt16(v_1_1.AuxInt)
25718			if !(c&7 == 0) {
25719				continue
25720			}
25721			v.reset(OpRotateLeft8)
25722			v.AddArg2(x, y)
25723			return true
25724		}
25725		break
25726	}
25727	// match: (RotateLeft8 x (Add8 y (Const8 [c])))
25728	// cond: c&7 == 0
25729	// result: (RotateLeft8 x y)
25730	for {
25731		x := v_0
25732		if v_1.Op != OpAdd8 {
25733			break
25734		}
25735		_ = v_1.Args[1]
25736		v_1_0 := v_1.Args[0]
25737		v_1_1 := v_1.Args[1]
25738		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
25739			y := v_1_0
25740			if v_1_1.Op != OpConst8 {
25741				continue
25742			}
25743			c := auxIntToInt8(v_1_1.AuxInt)
25744			if !(c&7 == 0) {
25745				continue
25746			}
25747			v.reset(OpRotateLeft8)
25748			v.AddArg2(x, y)
25749			return true
25750		}
25751		break
25752	}
25753	// match: (RotateLeft8 x (Sub64 (Const64 [c]) y))
25754	// cond: c&7 == 0
25755	// result: (RotateLeft8 x (Neg64 <y.Type> y))
25756	for {
25757		x := v_0
25758		if v_1.Op != OpSub64 {
25759			break
25760		}
25761		y := v_1.Args[1]
25762		v_1_0 := v_1.Args[0]
25763		if v_1_0.Op != OpConst64 {
25764			break
25765		}
25766		c := auxIntToInt64(v_1_0.AuxInt)
25767		if !(c&7 == 0) {
25768			break
25769		}
25770		v.reset(OpRotateLeft8)
25771		v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
25772		v0.AddArg(y)
25773		v.AddArg2(x, v0)
25774		return true
25775	}
25776	// match: (RotateLeft8 x (Sub32 (Const32 [c]) y))
25777	// cond: c&7 == 0
25778	// result: (RotateLeft8 x (Neg32 <y.Type> y))
25779	for {
25780		x := v_0
25781		if v_1.Op != OpSub32 {
25782			break
25783		}
25784		y := v_1.Args[1]
25785		v_1_0 := v_1.Args[0]
25786		if v_1_0.Op != OpConst32 {
25787			break
25788		}
25789		c := auxIntToInt32(v_1_0.AuxInt)
25790		if !(c&7 == 0) {
25791			break
25792		}
25793		v.reset(OpRotateLeft8)
25794		v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
25795		v0.AddArg(y)
25796		v.AddArg2(x, v0)
25797		return true
25798	}
25799	// match: (RotateLeft8 x (Sub16 (Const16 [c]) y))
25800	// cond: c&7 == 0
25801	// result: (RotateLeft8 x (Neg16 <y.Type> y))
25802	for {
25803		x := v_0
25804		if v_1.Op != OpSub16 {
25805			break
25806		}
25807		y := v_1.Args[1]
25808		v_1_0 := v_1.Args[0]
25809		if v_1_0.Op != OpConst16 {
25810			break
25811		}
25812		c := auxIntToInt16(v_1_0.AuxInt)
25813		if !(c&7 == 0) {
25814			break
25815		}
25816		v.reset(OpRotateLeft8)
25817		v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
25818		v0.AddArg(y)
25819		v.AddArg2(x, v0)
25820		return true
25821	}
25822	// match: (RotateLeft8 x (Sub8 (Const8 [c]) y))
25823	// cond: c&7 == 0
25824	// result: (RotateLeft8 x (Neg8 <y.Type> y))
25825	for {
25826		x := v_0
25827		if v_1.Op != OpSub8 {
25828			break
25829		}
25830		y := v_1.Args[1]
25831		v_1_0 := v_1.Args[0]
25832		if v_1_0.Op != OpConst8 {
25833			break
25834		}
25835		c := auxIntToInt8(v_1_0.AuxInt)
25836		if !(c&7 == 0) {
25837			break
25838		}
25839		v.reset(OpRotateLeft8)
25840		v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
25841		v0.AddArg(y)
25842		v.AddArg2(x, v0)
25843		return true
25844	}
25845	// match: (RotateLeft8 x (Const64 <t> [c]))
25846	// cond: config.PtrSize == 4
25847	// result: (RotateLeft8 x (Const32 <t> [int32(c)]))
25848	for {
25849		x := v_0
25850		if v_1.Op != OpConst64 {
25851			break
25852		}
25853		t := v_1.Type
25854		c := auxIntToInt64(v_1.AuxInt)
25855		if !(config.PtrSize == 4) {
25856			break
25857		}
25858		v.reset(OpRotateLeft8)
25859		v0 := b.NewValue0(v.Pos, OpConst32, t)
25860		v0.AuxInt = int32ToAuxInt(int32(c))
25861		v.AddArg2(x, v0)
25862		return true
25863	}
25864	// match: (RotateLeft8 (RotateLeft8 x c) d)
25865	// cond: c.Type.Size() == 8 && d.Type.Size() == 8
25866	// result: (RotateLeft8 x (Add64 <c.Type> c d))
25867	for {
25868		if v_0.Op != OpRotateLeft8 {
25869			break
25870		}
25871		c := v_0.Args[1]
25872		x := v_0.Args[0]
25873		d := v_1
25874		if !(c.Type.Size() == 8 && d.Type.Size() == 8) {
25875			break
25876		}
25877		v.reset(OpRotateLeft8)
25878		v0 := b.NewValue0(v.Pos, OpAdd64, c.Type)
25879		v0.AddArg2(c, d)
25880		v.AddArg2(x, v0)
25881		return true
25882	}
25883	// match: (RotateLeft8 (RotateLeft8 x c) d)
25884	// cond: c.Type.Size() == 4 && d.Type.Size() == 4
25885	// result: (RotateLeft8 x (Add32 <c.Type> c d))
25886	for {
25887		if v_0.Op != OpRotateLeft8 {
25888			break
25889		}
25890		c := v_0.Args[1]
25891		x := v_0.Args[0]
25892		d := v_1
25893		if !(c.Type.Size() == 4 && d.Type.Size() == 4) {
25894			break
25895		}
25896		v.reset(OpRotateLeft8)
25897		v0 := b.NewValue0(v.Pos, OpAdd32, c.Type)
25898		v0.AddArg2(c, d)
25899		v.AddArg2(x, v0)
25900		return true
25901	}
25902	// match: (RotateLeft8 (RotateLeft8 x c) d)
25903	// cond: c.Type.Size() == 2 && d.Type.Size() == 2
25904	// result: (RotateLeft8 x (Add16 <c.Type> c d))
25905	for {
25906		if v_0.Op != OpRotateLeft8 {
25907			break
25908		}
25909		c := v_0.Args[1]
25910		x := v_0.Args[0]
25911		d := v_1
25912		if !(c.Type.Size() == 2 && d.Type.Size() == 2) {
25913			break
25914		}
25915		v.reset(OpRotateLeft8)
25916		v0 := b.NewValue0(v.Pos, OpAdd16, c.Type)
25917		v0.AddArg2(c, d)
25918		v.AddArg2(x, v0)
25919		return true
25920	}
25921	// match: (RotateLeft8 (RotateLeft8 x c) d)
25922	// cond: c.Type.Size() == 1 && d.Type.Size() == 1
25923	// result: (RotateLeft8 x (Add8 <c.Type> c d))
25924	for {
25925		if v_0.Op != OpRotateLeft8 {
25926			break
25927		}
25928		c := v_0.Args[1]
25929		x := v_0.Args[0]
25930		d := v_1
25931		if !(c.Type.Size() == 1 && d.Type.Size() == 1) {
25932			break
25933		}
25934		v.reset(OpRotateLeft8)
25935		v0 := b.NewValue0(v.Pos, OpAdd8, c.Type)
25936		v0.AddArg2(c, d)
25937		v.AddArg2(x, v0)
25938		return true
25939	}
25940	return false
25941}
25942func rewriteValuegeneric_OpRound32F(v *Value) bool {
25943	v_0 := v.Args[0]
25944	// match: (Round32F x:(Const32F))
25945	// result: x
25946	for {
25947		x := v_0
25948		if x.Op != OpConst32F {
25949			break
25950		}
25951		v.copyOf(x)
25952		return true
25953	}
25954	return false
25955}
25956func rewriteValuegeneric_OpRound64F(v *Value) bool {
25957	v_0 := v.Args[0]
25958	// match: (Round64F x:(Const64F))
25959	// result: x
25960	for {
25961		x := v_0
25962		if x.Op != OpConst64F {
25963			break
25964		}
25965		v.copyOf(x)
25966		return true
25967	}
25968	return false
25969}
25970func rewriteValuegeneric_OpRoundToEven(v *Value) bool {
25971	v_0 := v.Args[0]
25972	// match: (RoundToEven (Const64F [c]))
25973	// result: (Const64F [math.RoundToEven(c)])
25974	for {
25975		if v_0.Op != OpConst64F {
25976			break
25977		}
25978		c := auxIntToFloat64(v_0.AuxInt)
25979		v.reset(OpConst64F)
25980		v.AuxInt = float64ToAuxInt(math.RoundToEven(c))
25981		return true
25982	}
25983	return false
25984}
25985func rewriteValuegeneric_OpRsh16Ux16(v *Value) bool {
25986	v_1 := v.Args[1]
25987	v_0 := v.Args[0]
25988	b := v.Block
25989	// match: (Rsh16Ux16 <t> x (Const16 [c]))
25990	// result: (Rsh16Ux64 x (Const64 <t> [int64(uint16(c))]))
25991	for {
25992		t := v.Type
25993		x := v_0
25994		if v_1.Op != OpConst16 {
25995			break
25996		}
25997		c := auxIntToInt16(v_1.AuxInt)
25998		v.reset(OpRsh16Ux64)
25999		v0 := b.NewValue0(v.Pos, OpConst64, t)
26000		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
26001		v.AddArg2(x, v0)
26002		return true
26003	}
26004	// match: (Rsh16Ux16 (Const16 [0]) _)
26005	// result: (Const16 [0])
26006	for {
26007		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
26008			break
26009		}
26010		v.reset(OpConst16)
26011		v.AuxInt = int16ToAuxInt(0)
26012		return true
26013	}
26014	return false
26015}
26016func rewriteValuegeneric_OpRsh16Ux32(v *Value) bool {
26017	v_1 := v.Args[1]
26018	v_0 := v.Args[0]
26019	b := v.Block
26020	// match: (Rsh16Ux32 <t> x (Const32 [c]))
26021	// result: (Rsh16Ux64 x (Const64 <t> [int64(uint32(c))]))
26022	for {
26023		t := v.Type
26024		x := v_0
26025		if v_1.Op != OpConst32 {
26026			break
26027		}
26028		c := auxIntToInt32(v_1.AuxInt)
26029		v.reset(OpRsh16Ux64)
26030		v0 := b.NewValue0(v.Pos, OpConst64, t)
26031		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
26032		v.AddArg2(x, v0)
26033		return true
26034	}
26035	// match: (Rsh16Ux32 (Const16 [0]) _)
26036	// result: (Const16 [0])
26037	for {
26038		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
26039			break
26040		}
26041		v.reset(OpConst16)
26042		v.AuxInt = int16ToAuxInt(0)
26043		return true
26044	}
26045	return false
26046}
26047func rewriteValuegeneric_OpRsh16Ux64(v *Value) bool {
26048	v_1 := v.Args[1]
26049	v_0 := v.Args[0]
26050	b := v.Block
26051	typ := &b.Func.Config.Types
26052	// match: (Rsh16Ux64 (Const16 [c]) (Const64 [d]))
26053	// result: (Const16 [int16(uint16(c) >> uint64(d))])
26054	for {
26055		if v_0.Op != OpConst16 {
26056			break
26057		}
26058		c := auxIntToInt16(v_0.AuxInt)
26059		if v_1.Op != OpConst64 {
26060			break
26061		}
26062		d := auxIntToInt64(v_1.AuxInt)
26063		v.reset(OpConst16)
26064		v.AuxInt = int16ToAuxInt(int16(uint16(c) >> uint64(d)))
26065		return true
26066	}
26067	// match: (Rsh16Ux64 x (Const64 [0]))
26068	// result: x
26069	for {
26070		x := v_0
26071		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
26072			break
26073		}
26074		v.copyOf(x)
26075		return true
26076	}
26077	// match: (Rsh16Ux64 (Const16 [0]) _)
26078	// result: (Const16 [0])
26079	for {
26080		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
26081			break
26082		}
26083		v.reset(OpConst16)
26084		v.AuxInt = int16ToAuxInt(0)
26085		return true
26086	}
26087	// match: (Rsh16Ux64 _ (Const64 [c]))
26088	// cond: uint64(c) >= 16
26089	// result: (Const16 [0])
26090	for {
26091		if v_1.Op != OpConst64 {
26092			break
26093		}
26094		c := auxIntToInt64(v_1.AuxInt)
26095		if !(uint64(c) >= 16) {
26096			break
26097		}
26098		v.reset(OpConst16)
26099		v.AuxInt = int16ToAuxInt(0)
26100		return true
26101	}
26102	// match: (Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d]))
26103	// cond: !uaddOvf(c,d)
26104	// result: (Rsh16Ux64 x (Const64 <t> [c+d]))
26105	for {
26106		t := v.Type
26107		if v_0.Op != OpRsh16Ux64 {
26108			break
26109		}
26110		_ = v_0.Args[1]
26111		x := v_0.Args[0]
26112		v_0_1 := v_0.Args[1]
26113		if v_0_1.Op != OpConst64 {
26114			break
26115		}
26116		c := auxIntToInt64(v_0_1.AuxInt)
26117		if v_1.Op != OpConst64 {
26118			break
26119		}
26120		d := auxIntToInt64(v_1.AuxInt)
26121		if !(!uaddOvf(c, d)) {
26122			break
26123		}
26124		v.reset(OpRsh16Ux64)
26125		v0 := b.NewValue0(v.Pos, OpConst64, t)
26126		v0.AuxInt = int64ToAuxInt(c + d)
26127		v.AddArg2(x, v0)
26128		return true
26129	}
26130	// match: (Rsh16Ux64 (Rsh16x64 x _) (Const64 <t> [15]))
26131	// result: (Rsh16Ux64 x (Const64 <t> [15]))
26132	for {
26133		if v_0.Op != OpRsh16x64 {
26134			break
26135		}
26136		x := v_0.Args[0]
26137		if v_1.Op != OpConst64 {
26138			break
26139		}
26140		t := v_1.Type
26141		if auxIntToInt64(v_1.AuxInt) != 15 {
26142			break
26143		}
26144		v.reset(OpRsh16Ux64)
26145		v0 := b.NewValue0(v.Pos, OpConst64, t)
26146		v0.AuxInt = int64ToAuxInt(15)
26147		v.AddArg2(x, v0)
26148		return true
26149	}
26150	// match: (Rsh16Ux64 i:(Lsh16x64 x (Const64 [c])) (Const64 [c]))
26151	// cond: c >= 0 && c < 16 && i.Uses == 1
26152	// result: (And16 x (Const16 <v.Type> [int16(^uint16(0)>>c)]))
26153	for {
26154		i := v_0
26155		if i.Op != OpLsh16x64 {
26156			break
26157		}
26158		_ = i.Args[1]
26159		x := i.Args[0]
26160		i_1 := i.Args[1]
26161		if i_1.Op != OpConst64 {
26162			break
26163		}
26164		c := auxIntToInt64(i_1.AuxInt)
26165		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 16 && i.Uses == 1) {
26166			break
26167		}
26168		v.reset(OpAnd16)
26169		v0 := b.NewValue0(v.Pos, OpConst16, v.Type)
26170		v0.AuxInt = int16ToAuxInt(int16(^uint16(0) >> c))
26171		v.AddArg2(x, v0)
26172		return true
26173	}
26174	// match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
26175	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
26176	// result: (Rsh16Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
26177	for {
26178		if v_0.Op != OpLsh16x64 {
26179			break
26180		}
26181		_ = v_0.Args[1]
26182		v_0_0 := v_0.Args[0]
26183		if v_0_0.Op != OpRsh16Ux64 {
26184			break
26185		}
26186		_ = v_0_0.Args[1]
26187		x := v_0_0.Args[0]
26188		v_0_0_1 := v_0_0.Args[1]
26189		if v_0_0_1.Op != OpConst64 {
26190			break
26191		}
26192		c1 := auxIntToInt64(v_0_0_1.AuxInt)
26193		v_0_1 := v_0.Args[1]
26194		if v_0_1.Op != OpConst64 {
26195			break
26196		}
26197		c2 := auxIntToInt64(v_0_1.AuxInt)
26198		if v_1.Op != OpConst64 {
26199			break
26200		}
26201		c3 := auxIntToInt64(v_1.AuxInt)
26202		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
26203			break
26204		}
26205		v.reset(OpRsh16Ux64)
26206		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
26207		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
26208		v.AddArg2(x, v0)
26209		return true
26210	}
26211	// match: (Rsh16Ux64 (Lsh16x64 x (Const64 [8])) (Const64 [8]))
26212	// result: (ZeroExt8to16 (Trunc16to8 <typ.UInt8> x))
26213	for {
26214		if v_0.Op != OpLsh16x64 {
26215			break
26216		}
26217		_ = v_0.Args[1]
26218		x := v_0.Args[0]
26219		v_0_1 := v_0.Args[1]
26220		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 8 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 8 {
26221			break
26222		}
26223		v.reset(OpZeroExt8to16)
26224		v0 := b.NewValue0(v.Pos, OpTrunc16to8, typ.UInt8)
26225		v0.AddArg(x)
26226		v.AddArg(v0)
26227		return true
26228	}
26229	return false
26230}
26231func rewriteValuegeneric_OpRsh16Ux8(v *Value) bool {
26232	v_1 := v.Args[1]
26233	v_0 := v.Args[0]
26234	b := v.Block
26235	// match: (Rsh16Ux8 <t> x (Const8 [c]))
26236	// result: (Rsh16Ux64 x (Const64 <t> [int64(uint8(c))]))
26237	for {
26238		t := v.Type
26239		x := v_0
26240		if v_1.Op != OpConst8 {
26241			break
26242		}
26243		c := auxIntToInt8(v_1.AuxInt)
26244		v.reset(OpRsh16Ux64)
26245		v0 := b.NewValue0(v.Pos, OpConst64, t)
26246		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
26247		v.AddArg2(x, v0)
26248		return true
26249	}
26250	// match: (Rsh16Ux8 (Const16 [0]) _)
26251	// result: (Const16 [0])
26252	for {
26253		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
26254			break
26255		}
26256		v.reset(OpConst16)
26257		v.AuxInt = int16ToAuxInt(0)
26258		return true
26259	}
26260	return false
26261}
26262func rewriteValuegeneric_OpRsh16x16(v *Value) bool {
26263	v_1 := v.Args[1]
26264	v_0 := v.Args[0]
26265	b := v.Block
26266	// match: (Rsh16x16 <t> x (Const16 [c]))
26267	// result: (Rsh16x64 x (Const64 <t> [int64(uint16(c))]))
26268	for {
26269		t := v.Type
26270		x := v_0
26271		if v_1.Op != OpConst16 {
26272			break
26273		}
26274		c := auxIntToInt16(v_1.AuxInt)
26275		v.reset(OpRsh16x64)
26276		v0 := b.NewValue0(v.Pos, OpConst64, t)
26277		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
26278		v.AddArg2(x, v0)
26279		return true
26280	}
26281	// match: (Rsh16x16 (Const16 [0]) _)
26282	// result: (Const16 [0])
26283	for {
26284		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
26285			break
26286		}
26287		v.reset(OpConst16)
26288		v.AuxInt = int16ToAuxInt(0)
26289		return true
26290	}
26291	return false
26292}
26293func rewriteValuegeneric_OpRsh16x32(v *Value) bool {
26294	v_1 := v.Args[1]
26295	v_0 := v.Args[0]
26296	b := v.Block
26297	// match: (Rsh16x32 <t> x (Const32 [c]))
26298	// result: (Rsh16x64 x (Const64 <t> [int64(uint32(c))]))
26299	for {
26300		t := v.Type
26301		x := v_0
26302		if v_1.Op != OpConst32 {
26303			break
26304		}
26305		c := auxIntToInt32(v_1.AuxInt)
26306		v.reset(OpRsh16x64)
26307		v0 := b.NewValue0(v.Pos, OpConst64, t)
26308		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
26309		v.AddArg2(x, v0)
26310		return true
26311	}
26312	// match: (Rsh16x32 (Const16 [0]) _)
26313	// result: (Const16 [0])
26314	for {
26315		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
26316			break
26317		}
26318		v.reset(OpConst16)
26319		v.AuxInt = int16ToAuxInt(0)
26320		return true
26321	}
26322	return false
26323}
26324func rewriteValuegeneric_OpRsh16x64(v *Value) bool {
26325	v_1 := v.Args[1]
26326	v_0 := v.Args[0]
26327	b := v.Block
26328	typ := &b.Func.Config.Types
26329	// match: (Rsh16x64 (Const16 [c]) (Const64 [d]))
26330	// result: (Const16 [c >> uint64(d)])
26331	for {
26332		if v_0.Op != OpConst16 {
26333			break
26334		}
26335		c := auxIntToInt16(v_0.AuxInt)
26336		if v_1.Op != OpConst64 {
26337			break
26338		}
26339		d := auxIntToInt64(v_1.AuxInt)
26340		v.reset(OpConst16)
26341		v.AuxInt = int16ToAuxInt(c >> uint64(d))
26342		return true
26343	}
26344	// match: (Rsh16x64 x (Const64 [0]))
26345	// result: x
26346	for {
26347		x := v_0
26348		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
26349			break
26350		}
26351		v.copyOf(x)
26352		return true
26353	}
26354	// match: (Rsh16x64 (Const16 [0]) _)
26355	// result: (Const16 [0])
26356	for {
26357		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
26358			break
26359		}
26360		v.reset(OpConst16)
26361		v.AuxInt = int16ToAuxInt(0)
26362		return true
26363	}
26364	// match: (Rsh16x64 <t> (Rsh16x64 x (Const64 [c])) (Const64 [d]))
26365	// cond: !uaddOvf(c,d)
26366	// result: (Rsh16x64 x (Const64 <t> [c+d]))
26367	for {
26368		t := v.Type
26369		if v_0.Op != OpRsh16x64 {
26370			break
26371		}
26372		_ = v_0.Args[1]
26373		x := v_0.Args[0]
26374		v_0_1 := v_0.Args[1]
26375		if v_0_1.Op != OpConst64 {
26376			break
26377		}
26378		c := auxIntToInt64(v_0_1.AuxInt)
26379		if v_1.Op != OpConst64 {
26380			break
26381		}
26382		d := auxIntToInt64(v_1.AuxInt)
26383		if !(!uaddOvf(c, d)) {
26384			break
26385		}
26386		v.reset(OpRsh16x64)
26387		v0 := b.NewValue0(v.Pos, OpConst64, t)
26388		v0.AuxInt = int64ToAuxInt(c + d)
26389		v.AddArg2(x, v0)
26390		return true
26391	}
26392	// match: (Rsh16x64 (Lsh16x64 x (Const64 [8])) (Const64 [8]))
26393	// result: (SignExt8to16 (Trunc16to8 <typ.Int8> x))
26394	for {
26395		if v_0.Op != OpLsh16x64 {
26396			break
26397		}
26398		_ = v_0.Args[1]
26399		x := v_0.Args[0]
26400		v_0_1 := v_0.Args[1]
26401		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 8 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 8 {
26402			break
26403		}
26404		v.reset(OpSignExt8to16)
26405		v0 := b.NewValue0(v.Pos, OpTrunc16to8, typ.Int8)
26406		v0.AddArg(x)
26407		v.AddArg(v0)
26408		return true
26409	}
26410	return false
26411}
26412func rewriteValuegeneric_OpRsh16x8(v *Value) bool {
26413	v_1 := v.Args[1]
26414	v_0 := v.Args[0]
26415	b := v.Block
26416	// match: (Rsh16x8 <t> x (Const8 [c]))
26417	// result: (Rsh16x64 x (Const64 <t> [int64(uint8(c))]))
26418	for {
26419		t := v.Type
26420		x := v_0
26421		if v_1.Op != OpConst8 {
26422			break
26423		}
26424		c := auxIntToInt8(v_1.AuxInt)
26425		v.reset(OpRsh16x64)
26426		v0 := b.NewValue0(v.Pos, OpConst64, t)
26427		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
26428		v.AddArg2(x, v0)
26429		return true
26430	}
26431	// match: (Rsh16x8 (Const16 [0]) _)
26432	// result: (Const16 [0])
26433	for {
26434		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
26435			break
26436		}
26437		v.reset(OpConst16)
26438		v.AuxInt = int16ToAuxInt(0)
26439		return true
26440	}
26441	return false
26442}
26443func rewriteValuegeneric_OpRsh32Ux16(v *Value) bool {
26444	v_1 := v.Args[1]
26445	v_0 := v.Args[0]
26446	b := v.Block
26447	// match: (Rsh32Ux16 <t> x (Const16 [c]))
26448	// result: (Rsh32Ux64 x (Const64 <t> [int64(uint16(c))]))
26449	for {
26450		t := v.Type
26451		x := v_0
26452		if v_1.Op != OpConst16 {
26453			break
26454		}
26455		c := auxIntToInt16(v_1.AuxInt)
26456		v.reset(OpRsh32Ux64)
26457		v0 := b.NewValue0(v.Pos, OpConst64, t)
26458		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
26459		v.AddArg2(x, v0)
26460		return true
26461	}
26462	// match: (Rsh32Ux16 (Const32 [0]) _)
26463	// result: (Const32 [0])
26464	for {
26465		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
26466			break
26467		}
26468		v.reset(OpConst32)
26469		v.AuxInt = int32ToAuxInt(0)
26470		return true
26471	}
26472	return false
26473}
26474func rewriteValuegeneric_OpRsh32Ux32(v *Value) bool {
26475	v_1 := v.Args[1]
26476	v_0 := v.Args[0]
26477	b := v.Block
26478	// match: (Rsh32Ux32 <t> x (Const32 [c]))
26479	// result: (Rsh32Ux64 x (Const64 <t> [int64(uint32(c))]))
26480	for {
26481		t := v.Type
26482		x := v_0
26483		if v_1.Op != OpConst32 {
26484			break
26485		}
26486		c := auxIntToInt32(v_1.AuxInt)
26487		v.reset(OpRsh32Ux64)
26488		v0 := b.NewValue0(v.Pos, OpConst64, t)
26489		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
26490		v.AddArg2(x, v0)
26491		return true
26492	}
26493	// match: (Rsh32Ux32 (Const32 [0]) _)
26494	// result: (Const32 [0])
26495	for {
26496		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
26497			break
26498		}
26499		v.reset(OpConst32)
26500		v.AuxInt = int32ToAuxInt(0)
26501		return true
26502	}
26503	return false
26504}
26505func rewriteValuegeneric_OpRsh32Ux64(v *Value) bool {
26506	v_1 := v.Args[1]
26507	v_0 := v.Args[0]
26508	b := v.Block
26509	typ := &b.Func.Config.Types
26510	// match: (Rsh32Ux64 (Const32 [c]) (Const64 [d]))
26511	// result: (Const32 [int32(uint32(c) >> uint64(d))])
26512	for {
26513		if v_0.Op != OpConst32 {
26514			break
26515		}
26516		c := auxIntToInt32(v_0.AuxInt)
26517		if v_1.Op != OpConst64 {
26518			break
26519		}
26520		d := auxIntToInt64(v_1.AuxInt)
26521		v.reset(OpConst32)
26522		v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
26523		return true
26524	}
26525	// match: (Rsh32Ux64 x (Const64 [0]))
26526	// result: x
26527	for {
26528		x := v_0
26529		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
26530			break
26531		}
26532		v.copyOf(x)
26533		return true
26534	}
26535	// match: (Rsh32Ux64 (Const32 [0]) _)
26536	// result: (Const32 [0])
26537	for {
26538		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
26539			break
26540		}
26541		v.reset(OpConst32)
26542		v.AuxInt = int32ToAuxInt(0)
26543		return true
26544	}
26545	// match: (Rsh32Ux64 _ (Const64 [c]))
26546	// cond: uint64(c) >= 32
26547	// result: (Const32 [0])
26548	for {
26549		if v_1.Op != OpConst64 {
26550			break
26551		}
26552		c := auxIntToInt64(v_1.AuxInt)
26553		if !(uint64(c) >= 32) {
26554			break
26555		}
26556		v.reset(OpConst32)
26557		v.AuxInt = int32ToAuxInt(0)
26558		return true
26559	}
26560	// match: (Rsh32Ux64 <t> (Rsh32Ux64 x (Const64 [c])) (Const64 [d]))
26561	// cond: !uaddOvf(c,d)
26562	// result: (Rsh32Ux64 x (Const64 <t> [c+d]))
26563	for {
26564		t := v.Type
26565		if v_0.Op != OpRsh32Ux64 {
26566			break
26567		}
26568		_ = v_0.Args[1]
26569		x := v_0.Args[0]
26570		v_0_1 := v_0.Args[1]
26571		if v_0_1.Op != OpConst64 {
26572			break
26573		}
26574		c := auxIntToInt64(v_0_1.AuxInt)
26575		if v_1.Op != OpConst64 {
26576			break
26577		}
26578		d := auxIntToInt64(v_1.AuxInt)
26579		if !(!uaddOvf(c, d)) {
26580			break
26581		}
26582		v.reset(OpRsh32Ux64)
26583		v0 := b.NewValue0(v.Pos, OpConst64, t)
26584		v0.AuxInt = int64ToAuxInt(c + d)
26585		v.AddArg2(x, v0)
26586		return true
26587	}
26588	// match: (Rsh32Ux64 (Rsh32x64 x _) (Const64 <t> [31]))
26589	// result: (Rsh32Ux64 x (Const64 <t> [31]))
26590	for {
26591		if v_0.Op != OpRsh32x64 {
26592			break
26593		}
26594		x := v_0.Args[0]
26595		if v_1.Op != OpConst64 {
26596			break
26597		}
26598		t := v_1.Type
26599		if auxIntToInt64(v_1.AuxInt) != 31 {
26600			break
26601		}
26602		v.reset(OpRsh32Ux64)
26603		v0 := b.NewValue0(v.Pos, OpConst64, t)
26604		v0.AuxInt = int64ToAuxInt(31)
26605		v.AddArg2(x, v0)
26606		return true
26607	}
26608	// match: (Rsh32Ux64 i:(Lsh32x64 x (Const64 [c])) (Const64 [c]))
26609	// cond: c >= 0 && c < 32 && i.Uses == 1
26610	// result: (And32 x (Const32 <v.Type> [int32(^uint32(0)>>c)]))
26611	for {
26612		i := v_0
26613		if i.Op != OpLsh32x64 {
26614			break
26615		}
26616		_ = i.Args[1]
26617		x := i.Args[0]
26618		i_1 := i.Args[1]
26619		if i_1.Op != OpConst64 {
26620			break
26621		}
26622		c := auxIntToInt64(i_1.AuxInt)
26623		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 32 && i.Uses == 1) {
26624			break
26625		}
26626		v.reset(OpAnd32)
26627		v0 := b.NewValue0(v.Pos, OpConst32, v.Type)
26628		v0.AuxInt = int32ToAuxInt(int32(^uint32(0) >> c))
26629		v.AddArg2(x, v0)
26630		return true
26631	}
26632	// match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
26633	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
26634	// result: (Rsh32Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
26635	for {
26636		if v_0.Op != OpLsh32x64 {
26637			break
26638		}
26639		_ = v_0.Args[1]
26640		v_0_0 := v_0.Args[0]
26641		if v_0_0.Op != OpRsh32Ux64 {
26642			break
26643		}
26644		_ = v_0_0.Args[1]
26645		x := v_0_0.Args[0]
26646		v_0_0_1 := v_0_0.Args[1]
26647		if v_0_0_1.Op != OpConst64 {
26648			break
26649		}
26650		c1 := auxIntToInt64(v_0_0_1.AuxInt)
26651		v_0_1 := v_0.Args[1]
26652		if v_0_1.Op != OpConst64 {
26653			break
26654		}
26655		c2 := auxIntToInt64(v_0_1.AuxInt)
26656		if v_1.Op != OpConst64 {
26657			break
26658		}
26659		c3 := auxIntToInt64(v_1.AuxInt)
26660		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
26661			break
26662		}
26663		v.reset(OpRsh32Ux64)
26664		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
26665		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
26666		v.AddArg2(x, v0)
26667		return true
26668	}
26669	// match: (Rsh32Ux64 (Lsh32x64 x (Const64 [24])) (Const64 [24]))
26670	// result: (ZeroExt8to32 (Trunc32to8 <typ.UInt8> x))
26671	for {
26672		if v_0.Op != OpLsh32x64 {
26673			break
26674		}
26675		_ = v_0.Args[1]
26676		x := v_0.Args[0]
26677		v_0_1 := v_0.Args[1]
26678		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 24 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 24 {
26679			break
26680		}
26681		v.reset(OpZeroExt8to32)
26682		v0 := b.NewValue0(v.Pos, OpTrunc32to8, typ.UInt8)
26683		v0.AddArg(x)
26684		v.AddArg(v0)
26685		return true
26686	}
26687	// match: (Rsh32Ux64 (Lsh32x64 x (Const64 [16])) (Const64 [16]))
26688	// result: (ZeroExt16to32 (Trunc32to16 <typ.UInt16> x))
26689	for {
26690		if v_0.Op != OpLsh32x64 {
26691			break
26692		}
26693		_ = v_0.Args[1]
26694		x := v_0.Args[0]
26695		v_0_1 := v_0.Args[1]
26696		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 16 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 16 {
26697			break
26698		}
26699		v.reset(OpZeroExt16to32)
26700		v0 := b.NewValue0(v.Pos, OpTrunc32to16, typ.UInt16)
26701		v0.AddArg(x)
26702		v.AddArg(v0)
26703		return true
26704	}
26705	return false
26706}
26707func rewriteValuegeneric_OpRsh32Ux8(v *Value) bool {
26708	v_1 := v.Args[1]
26709	v_0 := v.Args[0]
26710	b := v.Block
26711	// match: (Rsh32Ux8 <t> x (Const8 [c]))
26712	// result: (Rsh32Ux64 x (Const64 <t> [int64(uint8(c))]))
26713	for {
26714		t := v.Type
26715		x := v_0
26716		if v_1.Op != OpConst8 {
26717			break
26718		}
26719		c := auxIntToInt8(v_1.AuxInt)
26720		v.reset(OpRsh32Ux64)
26721		v0 := b.NewValue0(v.Pos, OpConst64, t)
26722		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
26723		v.AddArg2(x, v0)
26724		return true
26725	}
26726	// match: (Rsh32Ux8 (Const32 [0]) _)
26727	// result: (Const32 [0])
26728	for {
26729		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
26730			break
26731		}
26732		v.reset(OpConst32)
26733		v.AuxInt = int32ToAuxInt(0)
26734		return true
26735	}
26736	return false
26737}
26738func rewriteValuegeneric_OpRsh32x16(v *Value) bool {
26739	v_1 := v.Args[1]
26740	v_0 := v.Args[0]
26741	b := v.Block
26742	// match: (Rsh32x16 <t> x (Const16 [c]))
26743	// result: (Rsh32x64 x (Const64 <t> [int64(uint16(c))]))
26744	for {
26745		t := v.Type
26746		x := v_0
26747		if v_1.Op != OpConst16 {
26748			break
26749		}
26750		c := auxIntToInt16(v_1.AuxInt)
26751		v.reset(OpRsh32x64)
26752		v0 := b.NewValue0(v.Pos, OpConst64, t)
26753		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
26754		v.AddArg2(x, v0)
26755		return true
26756	}
26757	// match: (Rsh32x16 (Const32 [0]) _)
26758	// result: (Const32 [0])
26759	for {
26760		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
26761			break
26762		}
26763		v.reset(OpConst32)
26764		v.AuxInt = int32ToAuxInt(0)
26765		return true
26766	}
26767	return false
26768}
26769func rewriteValuegeneric_OpRsh32x32(v *Value) bool {
26770	v_1 := v.Args[1]
26771	v_0 := v.Args[0]
26772	b := v.Block
26773	// match: (Rsh32x32 <t> x (Const32 [c]))
26774	// result: (Rsh32x64 x (Const64 <t> [int64(uint32(c))]))
26775	for {
26776		t := v.Type
26777		x := v_0
26778		if v_1.Op != OpConst32 {
26779			break
26780		}
26781		c := auxIntToInt32(v_1.AuxInt)
26782		v.reset(OpRsh32x64)
26783		v0 := b.NewValue0(v.Pos, OpConst64, t)
26784		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
26785		v.AddArg2(x, v0)
26786		return true
26787	}
26788	// match: (Rsh32x32 (Const32 [0]) _)
26789	// result: (Const32 [0])
26790	for {
26791		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
26792			break
26793		}
26794		v.reset(OpConst32)
26795		v.AuxInt = int32ToAuxInt(0)
26796		return true
26797	}
26798	return false
26799}
26800func rewriteValuegeneric_OpRsh32x64(v *Value) bool {
26801	v_1 := v.Args[1]
26802	v_0 := v.Args[0]
26803	b := v.Block
26804	typ := &b.Func.Config.Types
26805	// match: (Rsh32x64 (Const32 [c]) (Const64 [d]))
26806	// result: (Const32 [c >> uint64(d)])
26807	for {
26808		if v_0.Op != OpConst32 {
26809			break
26810		}
26811		c := auxIntToInt32(v_0.AuxInt)
26812		if v_1.Op != OpConst64 {
26813			break
26814		}
26815		d := auxIntToInt64(v_1.AuxInt)
26816		v.reset(OpConst32)
26817		v.AuxInt = int32ToAuxInt(c >> uint64(d))
26818		return true
26819	}
26820	// match: (Rsh32x64 x (Const64 [0]))
26821	// result: x
26822	for {
26823		x := v_0
26824		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
26825			break
26826		}
26827		v.copyOf(x)
26828		return true
26829	}
26830	// match: (Rsh32x64 (Const32 [0]) _)
26831	// result: (Const32 [0])
26832	for {
26833		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
26834			break
26835		}
26836		v.reset(OpConst32)
26837		v.AuxInt = int32ToAuxInt(0)
26838		return true
26839	}
26840	// match: (Rsh32x64 <t> (Rsh32x64 x (Const64 [c])) (Const64 [d]))
26841	// cond: !uaddOvf(c,d)
26842	// result: (Rsh32x64 x (Const64 <t> [c+d]))
26843	for {
26844		t := v.Type
26845		if v_0.Op != OpRsh32x64 {
26846			break
26847		}
26848		_ = v_0.Args[1]
26849		x := v_0.Args[0]
26850		v_0_1 := v_0.Args[1]
26851		if v_0_1.Op != OpConst64 {
26852			break
26853		}
26854		c := auxIntToInt64(v_0_1.AuxInt)
26855		if v_1.Op != OpConst64 {
26856			break
26857		}
26858		d := auxIntToInt64(v_1.AuxInt)
26859		if !(!uaddOvf(c, d)) {
26860			break
26861		}
26862		v.reset(OpRsh32x64)
26863		v0 := b.NewValue0(v.Pos, OpConst64, t)
26864		v0.AuxInt = int64ToAuxInt(c + d)
26865		v.AddArg2(x, v0)
26866		return true
26867	}
26868	// match: (Rsh32x64 (Lsh32x64 x (Const64 [24])) (Const64 [24]))
26869	// result: (SignExt8to32 (Trunc32to8 <typ.Int8> x))
26870	for {
26871		if v_0.Op != OpLsh32x64 {
26872			break
26873		}
26874		_ = v_0.Args[1]
26875		x := v_0.Args[0]
26876		v_0_1 := v_0.Args[1]
26877		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 24 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 24 {
26878			break
26879		}
26880		v.reset(OpSignExt8to32)
26881		v0 := b.NewValue0(v.Pos, OpTrunc32to8, typ.Int8)
26882		v0.AddArg(x)
26883		v.AddArg(v0)
26884		return true
26885	}
26886	// match: (Rsh32x64 (Lsh32x64 x (Const64 [16])) (Const64 [16]))
26887	// result: (SignExt16to32 (Trunc32to16 <typ.Int16> x))
26888	for {
26889		if v_0.Op != OpLsh32x64 {
26890			break
26891		}
26892		_ = v_0.Args[1]
26893		x := v_0.Args[0]
26894		v_0_1 := v_0.Args[1]
26895		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 16 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 16 {
26896			break
26897		}
26898		v.reset(OpSignExt16to32)
26899		v0 := b.NewValue0(v.Pos, OpTrunc32to16, typ.Int16)
26900		v0.AddArg(x)
26901		v.AddArg(v0)
26902		return true
26903	}
26904	return false
26905}
26906func rewriteValuegeneric_OpRsh32x8(v *Value) bool {
26907	v_1 := v.Args[1]
26908	v_0 := v.Args[0]
26909	b := v.Block
26910	// match: (Rsh32x8 <t> x (Const8 [c]))
26911	// result: (Rsh32x64 x (Const64 <t> [int64(uint8(c))]))
26912	for {
26913		t := v.Type
26914		x := v_0
26915		if v_1.Op != OpConst8 {
26916			break
26917		}
26918		c := auxIntToInt8(v_1.AuxInt)
26919		v.reset(OpRsh32x64)
26920		v0 := b.NewValue0(v.Pos, OpConst64, t)
26921		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
26922		v.AddArg2(x, v0)
26923		return true
26924	}
26925	// match: (Rsh32x8 (Const32 [0]) _)
26926	// result: (Const32 [0])
26927	for {
26928		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
26929			break
26930		}
26931		v.reset(OpConst32)
26932		v.AuxInt = int32ToAuxInt(0)
26933		return true
26934	}
26935	return false
26936}
26937func rewriteValuegeneric_OpRsh64Ux16(v *Value) bool {
26938	v_1 := v.Args[1]
26939	v_0 := v.Args[0]
26940	b := v.Block
26941	// match: (Rsh64Ux16 <t> x (Const16 [c]))
26942	// result: (Rsh64Ux64 x (Const64 <t> [int64(uint16(c))]))
26943	for {
26944		t := v.Type
26945		x := v_0
26946		if v_1.Op != OpConst16 {
26947			break
26948		}
26949		c := auxIntToInt16(v_1.AuxInt)
26950		v.reset(OpRsh64Ux64)
26951		v0 := b.NewValue0(v.Pos, OpConst64, t)
26952		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
26953		v.AddArg2(x, v0)
26954		return true
26955	}
26956	// match: (Rsh64Ux16 (Const64 [0]) _)
26957	// result: (Const64 [0])
26958	for {
26959		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
26960			break
26961		}
26962		v.reset(OpConst64)
26963		v.AuxInt = int64ToAuxInt(0)
26964		return true
26965	}
26966	return false
26967}
26968func rewriteValuegeneric_OpRsh64Ux32(v *Value) bool {
26969	v_1 := v.Args[1]
26970	v_0 := v.Args[0]
26971	b := v.Block
26972	// match: (Rsh64Ux32 <t> x (Const32 [c]))
26973	// result: (Rsh64Ux64 x (Const64 <t> [int64(uint32(c))]))
26974	for {
26975		t := v.Type
26976		x := v_0
26977		if v_1.Op != OpConst32 {
26978			break
26979		}
26980		c := auxIntToInt32(v_1.AuxInt)
26981		v.reset(OpRsh64Ux64)
26982		v0 := b.NewValue0(v.Pos, OpConst64, t)
26983		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
26984		v.AddArg2(x, v0)
26985		return true
26986	}
26987	// match: (Rsh64Ux32 (Const64 [0]) _)
26988	// result: (Const64 [0])
26989	for {
26990		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
26991			break
26992		}
26993		v.reset(OpConst64)
26994		v.AuxInt = int64ToAuxInt(0)
26995		return true
26996	}
26997	return false
26998}
26999func rewriteValuegeneric_OpRsh64Ux64(v *Value) bool {
27000	v_1 := v.Args[1]
27001	v_0 := v.Args[0]
27002	b := v.Block
27003	typ := &b.Func.Config.Types
27004	// match: (Rsh64Ux64 (Const64 [c]) (Const64 [d]))
27005	// result: (Const64 [int64(uint64(c) >> uint64(d))])
27006	for {
27007		if v_0.Op != OpConst64 {
27008			break
27009		}
27010		c := auxIntToInt64(v_0.AuxInt)
27011		if v_1.Op != OpConst64 {
27012			break
27013		}
27014		d := auxIntToInt64(v_1.AuxInt)
27015		v.reset(OpConst64)
27016		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
27017		return true
27018	}
27019	// match: (Rsh64Ux64 x (Const64 [0]))
27020	// result: x
27021	for {
27022		x := v_0
27023		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
27024			break
27025		}
27026		v.copyOf(x)
27027		return true
27028	}
27029	// match: (Rsh64Ux64 (Const64 [0]) _)
27030	// result: (Const64 [0])
27031	for {
27032		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
27033			break
27034		}
27035		v.reset(OpConst64)
27036		v.AuxInt = int64ToAuxInt(0)
27037		return true
27038	}
27039	// match: (Rsh64Ux64 _ (Const64 [c]))
27040	// cond: uint64(c) >= 64
27041	// result: (Const64 [0])
27042	for {
27043		if v_1.Op != OpConst64 {
27044			break
27045		}
27046		c := auxIntToInt64(v_1.AuxInt)
27047		if !(uint64(c) >= 64) {
27048			break
27049		}
27050		v.reset(OpConst64)
27051		v.AuxInt = int64ToAuxInt(0)
27052		return true
27053	}
27054	// match: (Rsh64Ux64 <t> (Rsh64Ux64 x (Const64 [c])) (Const64 [d]))
27055	// cond: !uaddOvf(c,d)
27056	// result: (Rsh64Ux64 x (Const64 <t> [c+d]))
27057	for {
27058		t := v.Type
27059		if v_0.Op != OpRsh64Ux64 {
27060			break
27061		}
27062		_ = v_0.Args[1]
27063		x := v_0.Args[0]
27064		v_0_1 := v_0.Args[1]
27065		if v_0_1.Op != OpConst64 {
27066			break
27067		}
27068		c := auxIntToInt64(v_0_1.AuxInt)
27069		if v_1.Op != OpConst64 {
27070			break
27071		}
27072		d := auxIntToInt64(v_1.AuxInt)
27073		if !(!uaddOvf(c, d)) {
27074			break
27075		}
27076		v.reset(OpRsh64Ux64)
27077		v0 := b.NewValue0(v.Pos, OpConst64, t)
27078		v0.AuxInt = int64ToAuxInt(c + d)
27079		v.AddArg2(x, v0)
27080		return true
27081	}
27082	// match: (Rsh64Ux64 (Rsh64x64 x _) (Const64 <t> [63]))
27083	// result: (Rsh64Ux64 x (Const64 <t> [63]))
27084	for {
27085		if v_0.Op != OpRsh64x64 {
27086			break
27087		}
27088		x := v_0.Args[0]
27089		if v_1.Op != OpConst64 {
27090			break
27091		}
27092		t := v_1.Type
27093		if auxIntToInt64(v_1.AuxInt) != 63 {
27094			break
27095		}
27096		v.reset(OpRsh64Ux64)
27097		v0 := b.NewValue0(v.Pos, OpConst64, t)
27098		v0.AuxInt = int64ToAuxInt(63)
27099		v.AddArg2(x, v0)
27100		return true
27101	}
27102	// match: (Rsh64Ux64 i:(Lsh64x64 x (Const64 [c])) (Const64 [c]))
27103	// cond: c >= 0 && c < 64 && i.Uses == 1
27104	// result: (And64 x (Const64 <v.Type> [int64(^uint64(0)>>c)]))
27105	for {
27106		i := v_0
27107		if i.Op != OpLsh64x64 {
27108			break
27109		}
27110		_ = i.Args[1]
27111		x := i.Args[0]
27112		i_1 := i.Args[1]
27113		if i_1.Op != OpConst64 {
27114			break
27115		}
27116		c := auxIntToInt64(i_1.AuxInt)
27117		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 64 && i.Uses == 1) {
27118			break
27119		}
27120		v.reset(OpAnd64)
27121		v0 := b.NewValue0(v.Pos, OpConst64, v.Type)
27122		v0.AuxInt = int64ToAuxInt(int64(^uint64(0) >> c))
27123		v.AddArg2(x, v0)
27124		return true
27125	}
27126	// match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
27127	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
27128	// result: (Rsh64Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
27129	for {
27130		if v_0.Op != OpLsh64x64 {
27131			break
27132		}
27133		_ = v_0.Args[1]
27134		v_0_0 := v_0.Args[0]
27135		if v_0_0.Op != OpRsh64Ux64 {
27136			break
27137		}
27138		_ = v_0_0.Args[1]
27139		x := v_0_0.Args[0]
27140		v_0_0_1 := v_0_0.Args[1]
27141		if v_0_0_1.Op != OpConst64 {
27142			break
27143		}
27144		c1 := auxIntToInt64(v_0_0_1.AuxInt)
27145		v_0_1 := v_0.Args[1]
27146		if v_0_1.Op != OpConst64 {
27147			break
27148		}
27149		c2 := auxIntToInt64(v_0_1.AuxInt)
27150		if v_1.Op != OpConst64 {
27151			break
27152		}
27153		c3 := auxIntToInt64(v_1.AuxInt)
27154		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
27155			break
27156		}
27157		v.reset(OpRsh64Ux64)
27158		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
27159		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
27160		v.AddArg2(x, v0)
27161		return true
27162	}
27163	// match: (Rsh64Ux64 (Lsh64x64 x (Const64 [56])) (Const64 [56]))
27164	// result: (ZeroExt8to64 (Trunc64to8 <typ.UInt8> x))
27165	for {
27166		if v_0.Op != OpLsh64x64 {
27167			break
27168		}
27169		_ = v_0.Args[1]
27170		x := v_0.Args[0]
27171		v_0_1 := v_0.Args[1]
27172		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 56 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 56 {
27173			break
27174		}
27175		v.reset(OpZeroExt8to64)
27176		v0 := b.NewValue0(v.Pos, OpTrunc64to8, typ.UInt8)
27177		v0.AddArg(x)
27178		v.AddArg(v0)
27179		return true
27180	}
27181	// match: (Rsh64Ux64 (Lsh64x64 x (Const64 [48])) (Const64 [48]))
27182	// result: (ZeroExt16to64 (Trunc64to16 <typ.UInt16> x))
27183	for {
27184		if v_0.Op != OpLsh64x64 {
27185			break
27186		}
27187		_ = v_0.Args[1]
27188		x := v_0.Args[0]
27189		v_0_1 := v_0.Args[1]
27190		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 48 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 48 {
27191			break
27192		}
27193		v.reset(OpZeroExt16to64)
27194		v0 := b.NewValue0(v.Pos, OpTrunc64to16, typ.UInt16)
27195		v0.AddArg(x)
27196		v.AddArg(v0)
27197		return true
27198	}
27199	// match: (Rsh64Ux64 (Lsh64x64 x (Const64 [32])) (Const64 [32]))
27200	// result: (ZeroExt32to64 (Trunc64to32 <typ.UInt32> x))
27201	for {
27202		if v_0.Op != OpLsh64x64 {
27203			break
27204		}
27205		_ = v_0.Args[1]
27206		x := v_0.Args[0]
27207		v_0_1 := v_0.Args[1]
27208		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 32 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 32 {
27209			break
27210		}
27211		v.reset(OpZeroExt32to64)
27212		v0 := b.NewValue0(v.Pos, OpTrunc64to32, typ.UInt32)
27213		v0.AddArg(x)
27214		v.AddArg(v0)
27215		return true
27216	}
27217	return false
27218}
27219func rewriteValuegeneric_OpRsh64Ux8(v *Value) bool {
27220	v_1 := v.Args[1]
27221	v_0 := v.Args[0]
27222	b := v.Block
27223	// match: (Rsh64Ux8 <t> x (Const8 [c]))
27224	// result: (Rsh64Ux64 x (Const64 <t> [int64(uint8(c))]))
27225	for {
27226		t := v.Type
27227		x := v_0
27228		if v_1.Op != OpConst8 {
27229			break
27230		}
27231		c := auxIntToInt8(v_1.AuxInt)
27232		v.reset(OpRsh64Ux64)
27233		v0 := b.NewValue0(v.Pos, OpConst64, t)
27234		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
27235		v.AddArg2(x, v0)
27236		return true
27237	}
27238	// match: (Rsh64Ux8 (Const64 [0]) _)
27239	// result: (Const64 [0])
27240	for {
27241		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
27242			break
27243		}
27244		v.reset(OpConst64)
27245		v.AuxInt = int64ToAuxInt(0)
27246		return true
27247	}
27248	return false
27249}
27250func rewriteValuegeneric_OpRsh64x16(v *Value) bool {
27251	v_1 := v.Args[1]
27252	v_0 := v.Args[0]
27253	b := v.Block
27254	// match: (Rsh64x16 <t> x (Const16 [c]))
27255	// result: (Rsh64x64 x (Const64 <t> [int64(uint16(c))]))
27256	for {
27257		t := v.Type
27258		x := v_0
27259		if v_1.Op != OpConst16 {
27260			break
27261		}
27262		c := auxIntToInt16(v_1.AuxInt)
27263		v.reset(OpRsh64x64)
27264		v0 := b.NewValue0(v.Pos, OpConst64, t)
27265		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
27266		v.AddArg2(x, v0)
27267		return true
27268	}
27269	// match: (Rsh64x16 (Const64 [0]) _)
27270	// result: (Const64 [0])
27271	for {
27272		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
27273			break
27274		}
27275		v.reset(OpConst64)
27276		v.AuxInt = int64ToAuxInt(0)
27277		return true
27278	}
27279	return false
27280}
27281func rewriteValuegeneric_OpRsh64x32(v *Value) bool {
27282	v_1 := v.Args[1]
27283	v_0 := v.Args[0]
27284	b := v.Block
27285	// match: (Rsh64x32 <t> x (Const32 [c]))
27286	// result: (Rsh64x64 x (Const64 <t> [int64(uint32(c))]))
27287	for {
27288		t := v.Type
27289		x := v_0
27290		if v_1.Op != OpConst32 {
27291			break
27292		}
27293		c := auxIntToInt32(v_1.AuxInt)
27294		v.reset(OpRsh64x64)
27295		v0 := b.NewValue0(v.Pos, OpConst64, t)
27296		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
27297		v.AddArg2(x, v0)
27298		return true
27299	}
27300	// match: (Rsh64x32 (Const64 [0]) _)
27301	// result: (Const64 [0])
27302	for {
27303		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
27304			break
27305		}
27306		v.reset(OpConst64)
27307		v.AuxInt = int64ToAuxInt(0)
27308		return true
27309	}
27310	return false
27311}
27312func rewriteValuegeneric_OpRsh64x64(v *Value) bool {
27313	v_1 := v.Args[1]
27314	v_0 := v.Args[0]
27315	b := v.Block
27316	typ := &b.Func.Config.Types
27317	// match: (Rsh64x64 (Const64 [c]) (Const64 [d]))
27318	// result: (Const64 [c >> uint64(d)])
27319	for {
27320		if v_0.Op != OpConst64 {
27321			break
27322		}
27323		c := auxIntToInt64(v_0.AuxInt)
27324		if v_1.Op != OpConst64 {
27325			break
27326		}
27327		d := auxIntToInt64(v_1.AuxInt)
27328		v.reset(OpConst64)
27329		v.AuxInt = int64ToAuxInt(c >> uint64(d))
27330		return true
27331	}
27332	// match: (Rsh64x64 x (Const64 [0]))
27333	// result: x
27334	for {
27335		x := v_0
27336		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
27337			break
27338		}
27339		v.copyOf(x)
27340		return true
27341	}
27342	// match: (Rsh64x64 (Const64 [0]) _)
27343	// result: (Const64 [0])
27344	for {
27345		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
27346			break
27347		}
27348		v.reset(OpConst64)
27349		v.AuxInt = int64ToAuxInt(0)
27350		return true
27351	}
27352	// match: (Rsh64x64 <t> (Rsh64x64 x (Const64 [c])) (Const64 [d]))
27353	// cond: !uaddOvf(c,d)
27354	// result: (Rsh64x64 x (Const64 <t> [c+d]))
27355	for {
27356		t := v.Type
27357		if v_0.Op != OpRsh64x64 {
27358			break
27359		}
27360		_ = v_0.Args[1]
27361		x := v_0.Args[0]
27362		v_0_1 := v_0.Args[1]
27363		if v_0_1.Op != OpConst64 {
27364			break
27365		}
27366		c := auxIntToInt64(v_0_1.AuxInt)
27367		if v_1.Op != OpConst64 {
27368			break
27369		}
27370		d := auxIntToInt64(v_1.AuxInt)
27371		if !(!uaddOvf(c, d)) {
27372			break
27373		}
27374		v.reset(OpRsh64x64)
27375		v0 := b.NewValue0(v.Pos, OpConst64, t)
27376		v0.AuxInt = int64ToAuxInt(c + d)
27377		v.AddArg2(x, v0)
27378		return true
27379	}
27380	// match: (Rsh64x64 (Lsh64x64 x (Const64 [56])) (Const64 [56]))
27381	// result: (SignExt8to64 (Trunc64to8 <typ.Int8> x))
27382	for {
27383		if v_0.Op != OpLsh64x64 {
27384			break
27385		}
27386		_ = v_0.Args[1]
27387		x := v_0.Args[0]
27388		v_0_1 := v_0.Args[1]
27389		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 56 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 56 {
27390			break
27391		}
27392		v.reset(OpSignExt8to64)
27393		v0 := b.NewValue0(v.Pos, OpTrunc64to8, typ.Int8)
27394		v0.AddArg(x)
27395		v.AddArg(v0)
27396		return true
27397	}
27398	// match: (Rsh64x64 (Lsh64x64 x (Const64 [48])) (Const64 [48]))
27399	// result: (SignExt16to64 (Trunc64to16 <typ.Int16> x))
27400	for {
27401		if v_0.Op != OpLsh64x64 {
27402			break
27403		}
27404		_ = v_0.Args[1]
27405		x := v_0.Args[0]
27406		v_0_1 := v_0.Args[1]
27407		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 48 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 48 {
27408			break
27409		}
27410		v.reset(OpSignExt16to64)
27411		v0 := b.NewValue0(v.Pos, OpTrunc64to16, typ.Int16)
27412		v0.AddArg(x)
27413		v.AddArg(v0)
27414		return true
27415	}
27416	// match: (Rsh64x64 (Lsh64x64 x (Const64 [32])) (Const64 [32]))
27417	// result: (SignExt32to64 (Trunc64to32 <typ.Int32> x))
27418	for {
27419		if v_0.Op != OpLsh64x64 {
27420			break
27421		}
27422		_ = v_0.Args[1]
27423		x := v_0.Args[0]
27424		v_0_1 := v_0.Args[1]
27425		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 32 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 32 {
27426			break
27427		}
27428		v.reset(OpSignExt32to64)
27429		v0 := b.NewValue0(v.Pos, OpTrunc64to32, typ.Int32)
27430		v0.AddArg(x)
27431		v.AddArg(v0)
27432		return true
27433	}
27434	return false
27435}
27436func rewriteValuegeneric_OpRsh64x8(v *Value) bool {
27437	v_1 := v.Args[1]
27438	v_0 := v.Args[0]
27439	b := v.Block
27440	// match: (Rsh64x8 <t> x (Const8 [c]))
27441	// result: (Rsh64x64 x (Const64 <t> [int64(uint8(c))]))
27442	for {
27443		t := v.Type
27444		x := v_0
27445		if v_1.Op != OpConst8 {
27446			break
27447		}
27448		c := auxIntToInt8(v_1.AuxInt)
27449		v.reset(OpRsh64x64)
27450		v0 := b.NewValue0(v.Pos, OpConst64, t)
27451		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
27452		v.AddArg2(x, v0)
27453		return true
27454	}
27455	// match: (Rsh64x8 (Const64 [0]) _)
27456	// result: (Const64 [0])
27457	for {
27458		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
27459			break
27460		}
27461		v.reset(OpConst64)
27462		v.AuxInt = int64ToAuxInt(0)
27463		return true
27464	}
27465	return false
27466}
27467func rewriteValuegeneric_OpRsh8Ux16(v *Value) bool {
27468	v_1 := v.Args[1]
27469	v_0 := v.Args[0]
27470	b := v.Block
27471	// match: (Rsh8Ux16 <t> x (Const16 [c]))
27472	// result: (Rsh8Ux64 x (Const64 <t> [int64(uint16(c))]))
27473	for {
27474		t := v.Type
27475		x := v_0
27476		if v_1.Op != OpConst16 {
27477			break
27478		}
27479		c := auxIntToInt16(v_1.AuxInt)
27480		v.reset(OpRsh8Ux64)
27481		v0 := b.NewValue0(v.Pos, OpConst64, t)
27482		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
27483		v.AddArg2(x, v0)
27484		return true
27485	}
27486	// match: (Rsh8Ux16 (Const8 [0]) _)
27487	// result: (Const8 [0])
27488	for {
27489		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
27490			break
27491		}
27492		v.reset(OpConst8)
27493		v.AuxInt = int8ToAuxInt(0)
27494		return true
27495	}
27496	return false
27497}
27498func rewriteValuegeneric_OpRsh8Ux32(v *Value) bool {
27499	v_1 := v.Args[1]
27500	v_0 := v.Args[0]
27501	b := v.Block
27502	// match: (Rsh8Ux32 <t> x (Const32 [c]))
27503	// result: (Rsh8Ux64 x (Const64 <t> [int64(uint32(c))]))
27504	for {
27505		t := v.Type
27506		x := v_0
27507		if v_1.Op != OpConst32 {
27508			break
27509		}
27510		c := auxIntToInt32(v_1.AuxInt)
27511		v.reset(OpRsh8Ux64)
27512		v0 := b.NewValue0(v.Pos, OpConst64, t)
27513		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
27514		v.AddArg2(x, v0)
27515		return true
27516	}
27517	// match: (Rsh8Ux32 (Const8 [0]) _)
27518	// result: (Const8 [0])
27519	for {
27520		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
27521			break
27522		}
27523		v.reset(OpConst8)
27524		v.AuxInt = int8ToAuxInt(0)
27525		return true
27526	}
27527	return false
27528}
27529func rewriteValuegeneric_OpRsh8Ux64(v *Value) bool {
27530	v_1 := v.Args[1]
27531	v_0 := v.Args[0]
27532	b := v.Block
27533	typ := &b.Func.Config.Types
27534	// match: (Rsh8Ux64 (Const8 [c]) (Const64 [d]))
27535	// result: (Const8 [int8(uint8(c) >> uint64(d))])
27536	for {
27537		if v_0.Op != OpConst8 {
27538			break
27539		}
27540		c := auxIntToInt8(v_0.AuxInt)
27541		if v_1.Op != OpConst64 {
27542			break
27543		}
27544		d := auxIntToInt64(v_1.AuxInt)
27545		v.reset(OpConst8)
27546		v.AuxInt = int8ToAuxInt(int8(uint8(c) >> uint64(d)))
27547		return true
27548	}
27549	// match: (Rsh8Ux64 x (Const64 [0]))
27550	// result: x
27551	for {
27552		x := v_0
27553		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
27554			break
27555		}
27556		v.copyOf(x)
27557		return true
27558	}
27559	// match: (Rsh8Ux64 (Const8 [0]) _)
27560	// result: (Const8 [0])
27561	for {
27562		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
27563			break
27564		}
27565		v.reset(OpConst8)
27566		v.AuxInt = int8ToAuxInt(0)
27567		return true
27568	}
27569	// match: (Rsh8Ux64 _ (Const64 [c]))
27570	// cond: uint64(c) >= 8
27571	// result: (Const8 [0])
27572	for {
27573		if v_1.Op != OpConst64 {
27574			break
27575		}
27576		c := auxIntToInt64(v_1.AuxInt)
27577		if !(uint64(c) >= 8) {
27578			break
27579		}
27580		v.reset(OpConst8)
27581		v.AuxInt = int8ToAuxInt(0)
27582		return true
27583	}
27584	// match: (Rsh8Ux64 <t> (Rsh8Ux64 x (Const64 [c])) (Const64 [d]))
27585	// cond: !uaddOvf(c,d)
27586	// result: (Rsh8Ux64 x (Const64 <t> [c+d]))
27587	for {
27588		t := v.Type
27589		if v_0.Op != OpRsh8Ux64 {
27590			break
27591		}
27592		_ = v_0.Args[1]
27593		x := v_0.Args[0]
27594		v_0_1 := v_0.Args[1]
27595		if v_0_1.Op != OpConst64 {
27596			break
27597		}
27598		c := auxIntToInt64(v_0_1.AuxInt)
27599		if v_1.Op != OpConst64 {
27600			break
27601		}
27602		d := auxIntToInt64(v_1.AuxInt)
27603		if !(!uaddOvf(c, d)) {
27604			break
27605		}
27606		v.reset(OpRsh8Ux64)
27607		v0 := b.NewValue0(v.Pos, OpConst64, t)
27608		v0.AuxInt = int64ToAuxInt(c + d)
27609		v.AddArg2(x, v0)
27610		return true
27611	}
27612	// match: (Rsh8Ux64 (Rsh8x64 x _) (Const64 <t> [7] ))
27613	// result: (Rsh8Ux64 x (Const64 <t> [7] ))
27614	for {
27615		if v_0.Op != OpRsh8x64 {
27616			break
27617		}
27618		x := v_0.Args[0]
27619		if v_1.Op != OpConst64 {
27620			break
27621		}
27622		t := v_1.Type
27623		if auxIntToInt64(v_1.AuxInt) != 7 {
27624			break
27625		}
27626		v.reset(OpRsh8Ux64)
27627		v0 := b.NewValue0(v.Pos, OpConst64, t)
27628		v0.AuxInt = int64ToAuxInt(7)
27629		v.AddArg2(x, v0)
27630		return true
27631	}
27632	// match: (Rsh8Ux64 i:(Lsh8x64 x (Const64 [c])) (Const64 [c]))
27633	// cond: c >= 0 && c < 8 && i.Uses == 1
27634	// result: (And8 x (Const8 <v.Type> [int8 (^uint8 (0)>>c)]))
27635	for {
27636		i := v_0
27637		if i.Op != OpLsh8x64 {
27638			break
27639		}
27640		_ = i.Args[1]
27641		x := i.Args[0]
27642		i_1 := i.Args[1]
27643		if i_1.Op != OpConst64 {
27644			break
27645		}
27646		c := auxIntToInt64(i_1.AuxInt)
27647		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 8 && i.Uses == 1) {
27648			break
27649		}
27650		v.reset(OpAnd8)
27651		v0 := b.NewValue0(v.Pos, OpConst8, v.Type)
27652		v0.AuxInt = int8ToAuxInt(int8(^uint8(0) >> c))
27653		v.AddArg2(x, v0)
27654		return true
27655	}
27656	// match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
27657	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
27658	// result: (Rsh8Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
27659	for {
27660		if v_0.Op != OpLsh8x64 {
27661			break
27662		}
27663		_ = v_0.Args[1]
27664		v_0_0 := v_0.Args[0]
27665		if v_0_0.Op != OpRsh8Ux64 {
27666			break
27667		}
27668		_ = v_0_0.Args[1]
27669		x := v_0_0.Args[0]
27670		v_0_0_1 := v_0_0.Args[1]
27671		if v_0_0_1.Op != OpConst64 {
27672			break
27673		}
27674		c1 := auxIntToInt64(v_0_0_1.AuxInt)
27675		v_0_1 := v_0.Args[1]
27676		if v_0_1.Op != OpConst64 {
27677			break
27678		}
27679		c2 := auxIntToInt64(v_0_1.AuxInt)
27680		if v_1.Op != OpConst64 {
27681			break
27682		}
27683		c3 := auxIntToInt64(v_1.AuxInt)
27684		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
27685			break
27686		}
27687		v.reset(OpRsh8Ux64)
27688		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
27689		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
27690		v.AddArg2(x, v0)
27691		return true
27692	}
27693	return false
27694}
27695func rewriteValuegeneric_OpRsh8Ux8(v *Value) bool {
27696	v_1 := v.Args[1]
27697	v_0 := v.Args[0]
27698	b := v.Block
27699	// match: (Rsh8Ux8 <t> x (Const8 [c]))
27700	// result: (Rsh8Ux64 x (Const64 <t> [int64(uint8(c))]))
27701	for {
27702		t := v.Type
27703		x := v_0
27704		if v_1.Op != OpConst8 {
27705			break
27706		}
27707		c := auxIntToInt8(v_1.AuxInt)
27708		v.reset(OpRsh8Ux64)
27709		v0 := b.NewValue0(v.Pos, OpConst64, t)
27710		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
27711		v.AddArg2(x, v0)
27712		return true
27713	}
27714	// match: (Rsh8Ux8 (Const8 [0]) _)
27715	// result: (Const8 [0])
27716	for {
27717		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
27718			break
27719		}
27720		v.reset(OpConst8)
27721		v.AuxInt = int8ToAuxInt(0)
27722		return true
27723	}
27724	return false
27725}
27726func rewriteValuegeneric_OpRsh8x16(v *Value) bool {
27727	v_1 := v.Args[1]
27728	v_0 := v.Args[0]
27729	b := v.Block
27730	// match: (Rsh8x16 <t> x (Const16 [c]))
27731	// result: (Rsh8x64 x (Const64 <t> [int64(uint16(c))]))
27732	for {
27733		t := v.Type
27734		x := v_0
27735		if v_1.Op != OpConst16 {
27736			break
27737		}
27738		c := auxIntToInt16(v_1.AuxInt)
27739		v.reset(OpRsh8x64)
27740		v0 := b.NewValue0(v.Pos, OpConst64, t)
27741		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
27742		v.AddArg2(x, v0)
27743		return true
27744	}
27745	// match: (Rsh8x16 (Const8 [0]) _)
27746	// result: (Const8 [0])
27747	for {
27748		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
27749			break
27750		}
27751		v.reset(OpConst8)
27752		v.AuxInt = int8ToAuxInt(0)
27753		return true
27754	}
27755	return false
27756}
27757func rewriteValuegeneric_OpRsh8x32(v *Value) bool {
27758	v_1 := v.Args[1]
27759	v_0 := v.Args[0]
27760	b := v.Block
27761	// match: (Rsh8x32 <t> x (Const32 [c]))
27762	// result: (Rsh8x64 x (Const64 <t> [int64(uint32(c))]))
27763	for {
27764		t := v.Type
27765		x := v_0
27766		if v_1.Op != OpConst32 {
27767			break
27768		}
27769		c := auxIntToInt32(v_1.AuxInt)
27770		v.reset(OpRsh8x64)
27771		v0 := b.NewValue0(v.Pos, OpConst64, t)
27772		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
27773		v.AddArg2(x, v0)
27774		return true
27775	}
27776	// match: (Rsh8x32 (Const8 [0]) _)
27777	// result: (Const8 [0])
27778	for {
27779		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
27780			break
27781		}
27782		v.reset(OpConst8)
27783		v.AuxInt = int8ToAuxInt(0)
27784		return true
27785	}
27786	return false
27787}
27788func rewriteValuegeneric_OpRsh8x64(v *Value) bool {
27789	v_1 := v.Args[1]
27790	v_0 := v.Args[0]
27791	b := v.Block
27792	// match: (Rsh8x64 (Const8 [c]) (Const64 [d]))
27793	// result: (Const8 [c >> uint64(d)])
27794	for {
27795		if v_0.Op != OpConst8 {
27796			break
27797		}
27798		c := auxIntToInt8(v_0.AuxInt)
27799		if v_1.Op != OpConst64 {
27800			break
27801		}
27802		d := auxIntToInt64(v_1.AuxInt)
27803		v.reset(OpConst8)
27804		v.AuxInt = int8ToAuxInt(c >> uint64(d))
27805		return true
27806	}
27807	// match: (Rsh8x64 x (Const64 [0]))
27808	// result: x
27809	for {
27810		x := v_0
27811		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
27812			break
27813		}
27814		v.copyOf(x)
27815		return true
27816	}
27817	// match: (Rsh8x64 (Const8 [0]) _)
27818	// result: (Const8 [0])
27819	for {
27820		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
27821			break
27822		}
27823		v.reset(OpConst8)
27824		v.AuxInt = int8ToAuxInt(0)
27825		return true
27826	}
27827	// match: (Rsh8x64 <t> (Rsh8x64 x (Const64 [c])) (Const64 [d]))
27828	// cond: !uaddOvf(c,d)
27829	// result: (Rsh8x64 x (Const64 <t> [c+d]))
27830	for {
27831		t := v.Type
27832		if v_0.Op != OpRsh8x64 {
27833			break
27834		}
27835		_ = v_0.Args[1]
27836		x := v_0.Args[0]
27837		v_0_1 := v_0.Args[1]
27838		if v_0_1.Op != OpConst64 {
27839			break
27840		}
27841		c := auxIntToInt64(v_0_1.AuxInt)
27842		if v_1.Op != OpConst64 {
27843			break
27844		}
27845		d := auxIntToInt64(v_1.AuxInt)
27846		if !(!uaddOvf(c, d)) {
27847			break
27848		}
27849		v.reset(OpRsh8x64)
27850		v0 := b.NewValue0(v.Pos, OpConst64, t)
27851		v0.AuxInt = int64ToAuxInt(c + d)
27852		v.AddArg2(x, v0)
27853		return true
27854	}
27855	return false
27856}
27857func rewriteValuegeneric_OpRsh8x8(v *Value) bool {
27858	v_1 := v.Args[1]
27859	v_0 := v.Args[0]
27860	b := v.Block
27861	// match: (Rsh8x8 <t> x (Const8 [c]))
27862	// result: (Rsh8x64 x (Const64 <t> [int64(uint8(c))]))
27863	for {
27864		t := v.Type
27865		x := v_0
27866		if v_1.Op != OpConst8 {
27867			break
27868		}
27869		c := auxIntToInt8(v_1.AuxInt)
27870		v.reset(OpRsh8x64)
27871		v0 := b.NewValue0(v.Pos, OpConst64, t)
27872		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
27873		v.AddArg2(x, v0)
27874		return true
27875	}
27876	// match: (Rsh8x8 (Const8 [0]) _)
27877	// result: (Const8 [0])
27878	for {
27879		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
27880			break
27881		}
27882		v.reset(OpConst8)
27883		v.AuxInt = int8ToAuxInt(0)
27884		return true
27885	}
27886	return false
27887}
27888func rewriteValuegeneric_OpSelect0(v *Value) bool {
27889	v_0 := v.Args[0]
27890	// match: (Select0 (Div128u (Const64 [0]) lo y))
27891	// result: (Div64u lo y)
27892	for {
27893		if v_0.Op != OpDiv128u {
27894			break
27895		}
27896		y := v_0.Args[2]
27897		v_0_0 := v_0.Args[0]
27898		if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 0 {
27899			break
27900		}
27901		lo := v_0.Args[1]
27902		v.reset(OpDiv64u)
27903		v.AddArg2(lo, y)
27904		return true
27905	}
27906	// match: (Select0 (Mul32uover (Const32 [1]) x))
27907	// result: x
27908	for {
27909		if v_0.Op != OpMul32uover {
27910			break
27911		}
27912		_ = v_0.Args[1]
27913		v_0_0 := v_0.Args[0]
27914		v_0_1 := v_0.Args[1]
27915		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
27916			if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != 1 {
27917				continue
27918			}
27919			x := v_0_1
27920			v.copyOf(x)
27921			return true
27922		}
27923		break
27924	}
27925	// match: (Select0 (Mul64uover (Const64 [1]) x))
27926	// result: x
27927	for {
27928		if v_0.Op != OpMul64uover {
27929			break
27930		}
27931		_ = v_0.Args[1]
27932		v_0_0 := v_0.Args[0]
27933		v_0_1 := v_0.Args[1]
27934		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
27935			if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 1 {
27936				continue
27937			}
27938			x := v_0_1
27939			v.copyOf(x)
27940			return true
27941		}
27942		break
27943	}
27944	// match: (Select0 (Mul64uover (Const64 [0]) x))
27945	// result: (Const64 [0])
27946	for {
27947		if v_0.Op != OpMul64uover {
27948			break
27949		}
27950		_ = v_0.Args[1]
27951		v_0_0 := v_0.Args[0]
27952		v_0_1 := v_0.Args[1]
27953		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
27954			if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 0 {
27955				continue
27956			}
27957			v.reset(OpConst64)
27958			v.AuxInt = int64ToAuxInt(0)
27959			return true
27960		}
27961		break
27962	}
27963	// match: (Select0 (Mul32uover (Const32 [0]) x))
27964	// result: (Const32 [0])
27965	for {
27966		if v_0.Op != OpMul32uover {
27967			break
27968		}
27969		_ = v_0.Args[1]
27970		v_0_0 := v_0.Args[0]
27971		v_0_1 := v_0.Args[1]
27972		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
27973			if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != 0 {
27974				continue
27975			}
27976			v.reset(OpConst32)
27977			v.AuxInt = int32ToAuxInt(0)
27978			return true
27979		}
27980		break
27981	}
27982	return false
27983}
27984func rewriteValuegeneric_OpSelect1(v *Value) bool {
27985	v_0 := v.Args[0]
27986	// match: (Select1 (Div128u (Const64 [0]) lo y))
27987	// result: (Mod64u lo y)
27988	for {
27989		if v_0.Op != OpDiv128u {
27990			break
27991		}
27992		y := v_0.Args[2]
27993		v_0_0 := v_0.Args[0]
27994		if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 0 {
27995			break
27996		}
27997		lo := v_0.Args[1]
27998		v.reset(OpMod64u)
27999		v.AddArg2(lo, y)
28000		return true
28001	}
28002	// match: (Select1 (Mul32uover (Const32 [1]) x))
28003	// result: (ConstBool [false])
28004	for {
28005		if v_0.Op != OpMul32uover {
28006			break
28007		}
28008		_ = v_0.Args[1]
28009		v_0_0 := v_0.Args[0]
28010		v_0_1 := v_0.Args[1]
28011		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
28012			if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != 1 {
28013				continue
28014			}
28015			v.reset(OpConstBool)
28016			v.AuxInt = boolToAuxInt(false)
28017			return true
28018		}
28019		break
28020	}
28021	// match: (Select1 (Mul64uover (Const64 [1]) x))
28022	// result: (ConstBool [false])
28023	for {
28024		if v_0.Op != OpMul64uover {
28025			break
28026		}
28027		_ = v_0.Args[1]
28028		v_0_0 := v_0.Args[0]
28029		v_0_1 := v_0.Args[1]
28030		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
28031			if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 1 {
28032				continue
28033			}
28034			v.reset(OpConstBool)
28035			v.AuxInt = boolToAuxInt(false)
28036			return true
28037		}
28038		break
28039	}
28040	// match: (Select1 (Mul64uover (Const64 [0]) x))
28041	// result: (ConstBool [false])
28042	for {
28043		if v_0.Op != OpMul64uover {
28044			break
28045		}
28046		_ = v_0.Args[1]
28047		v_0_0 := v_0.Args[0]
28048		v_0_1 := v_0.Args[1]
28049		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
28050			if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 0 {
28051				continue
28052			}
28053			v.reset(OpConstBool)
28054			v.AuxInt = boolToAuxInt(false)
28055			return true
28056		}
28057		break
28058	}
28059	// match: (Select1 (Mul32uover (Const32 [0]) x))
28060	// result: (ConstBool [false])
28061	for {
28062		if v_0.Op != OpMul32uover {
28063			break
28064		}
28065		_ = v_0.Args[1]
28066		v_0_0 := v_0.Args[0]
28067		v_0_1 := v_0.Args[1]
28068		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
28069			if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != 0 {
28070				continue
28071			}
28072			v.reset(OpConstBool)
28073			v.AuxInt = boolToAuxInt(false)
28074			return true
28075		}
28076		break
28077	}
28078	return false
28079}
28080func rewriteValuegeneric_OpSelectN(v *Value) bool {
28081	v_0 := v.Args[0]
28082	b := v.Block
28083	config := b.Func.Config
28084	typ := &b.Func.Config.Types
28085	// match: (SelectN [0] (MakeResult x ___))
28086	// result: x
28087	for {
28088		if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpMakeResult || len(v_0.Args) < 1 {
28089			break
28090		}
28091		x := v_0.Args[0]
28092		v.copyOf(x)
28093		return true
28094	}
28095	// match: (SelectN [1] (MakeResult x y ___))
28096	// result: y
28097	for {
28098		if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpMakeResult || len(v_0.Args) < 2 {
28099			break
28100		}
28101		y := v_0.Args[1]
28102		v.copyOf(y)
28103		return true
28104	}
28105	// match: (SelectN [2] (MakeResult x y z ___))
28106	// result: z
28107	for {
28108		if auxIntToInt64(v.AuxInt) != 2 || v_0.Op != OpMakeResult || len(v_0.Args) < 3 {
28109			break
28110		}
28111		z := v_0.Args[2]
28112		v.copyOf(z)
28113		return true
28114	}
28115	// match: (SelectN [0] call:(StaticCall {sym} sptr (Const64 [c]) mem))
28116	// cond: isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)
28117	// result: (Zero {types.Types[types.TUINT8]} [int64(c)] sptr mem)
28118	for {
28119		if auxIntToInt64(v.AuxInt) != 0 {
28120			break
28121		}
28122		call := v_0
28123		if call.Op != OpStaticCall || len(call.Args) != 3 {
28124			break
28125		}
28126		sym := auxToCall(call.Aux)
28127		mem := call.Args[2]
28128		sptr := call.Args[0]
28129		call_1 := call.Args[1]
28130		if call_1.Op != OpConst64 {
28131			break
28132		}
28133		c := auxIntToInt64(call_1.AuxInt)
28134		if !(isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)) {
28135			break
28136		}
28137		v.reset(OpZero)
28138		v.AuxInt = int64ToAuxInt(int64(c))
28139		v.Aux = typeToAux(types.Types[types.TUINT8])
28140		v.AddArg2(sptr, mem)
28141		return true
28142	}
28143	// match: (SelectN [0] call:(StaticCall {sym} sptr (Const32 [c]) mem))
28144	// cond: isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)
28145	// result: (Zero {types.Types[types.TUINT8]} [int64(c)] sptr mem)
28146	for {
28147		if auxIntToInt64(v.AuxInt) != 0 {
28148			break
28149		}
28150		call := v_0
28151		if call.Op != OpStaticCall || len(call.Args) != 3 {
28152			break
28153		}
28154		sym := auxToCall(call.Aux)
28155		mem := call.Args[2]
28156		sptr := call.Args[0]
28157		call_1 := call.Args[1]
28158		if call_1.Op != OpConst32 {
28159			break
28160		}
28161		c := auxIntToInt32(call_1.AuxInt)
28162		if !(isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)) {
28163			break
28164		}
28165		v.reset(OpZero)
28166		v.AuxInt = int64ToAuxInt(int64(c))
28167		v.Aux = typeToAux(types.Types[types.TUINT8])
28168		v.AddArg2(sptr, mem)
28169		return true
28170	}
28171	// match: (SelectN [0] call:(StaticCall {sym} s1:(Store _ (Const64 [sz]) s2:(Store _ src s3:(Store {t} _ dst mem)))))
28172	// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)
28173	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
28174	for {
28175		if auxIntToInt64(v.AuxInt) != 0 {
28176			break
28177		}
28178		call := v_0
28179		if call.Op != OpStaticCall || len(call.Args) != 1 {
28180			break
28181		}
28182		sym := auxToCall(call.Aux)
28183		s1 := call.Args[0]
28184		if s1.Op != OpStore {
28185			break
28186		}
28187		_ = s1.Args[2]
28188		s1_1 := s1.Args[1]
28189		if s1_1.Op != OpConst64 {
28190			break
28191		}
28192		sz := auxIntToInt64(s1_1.AuxInt)
28193		s2 := s1.Args[2]
28194		if s2.Op != OpStore {
28195			break
28196		}
28197		_ = s2.Args[2]
28198		src := s2.Args[1]
28199		s3 := s2.Args[2]
28200		if s3.Op != OpStore {
28201			break
28202		}
28203		mem := s3.Args[2]
28204		dst := s3.Args[1]
28205		if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)) {
28206			break
28207		}
28208		v.reset(OpMove)
28209		v.AuxInt = int64ToAuxInt(int64(sz))
28210		v.Aux = typeToAux(types.Types[types.TUINT8])
28211		v.AddArg3(dst, src, mem)
28212		return true
28213	}
28214	// match: (SelectN [0] call:(StaticCall {sym} s1:(Store _ (Const32 [sz]) s2:(Store _ src s3:(Store {t} _ dst mem)))))
28215	// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)
28216	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
28217	for {
28218		if auxIntToInt64(v.AuxInt) != 0 {
28219			break
28220		}
28221		call := v_0
28222		if call.Op != OpStaticCall || len(call.Args) != 1 {
28223			break
28224		}
28225		sym := auxToCall(call.Aux)
28226		s1 := call.Args[0]
28227		if s1.Op != OpStore {
28228			break
28229		}
28230		_ = s1.Args[2]
28231		s1_1 := s1.Args[1]
28232		if s1_1.Op != OpConst32 {
28233			break
28234		}
28235		sz := auxIntToInt32(s1_1.AuxInt)
28236		s2 := s1.Args[2]
28237		if s2.Op != OpStore {
28238			break
28239		}
28240		_ = s2.Args[2]
28241		src := s2.Args[1]
28242		s3 := s2.Args[2]
28243		if s3.Op != OpStore {
28244			break
28245		}
28246		mem := s3.Args[2]
28247		dst := s3.Args[1]
28248		if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)) {
28249			break
28250		}
28251		v.reset(OpMove)
28252		v.AuxInt = int64ToAuxInt(int64(sz))
28253		v.Aux = typeToAux(types.Types[types.TUINT8])
28254		v.AddArg3(dst, src, mem)
28255		return true
28256	}
28257	// match: (SelectN [0] call:(StaticCall {sym} dst src (Const64 [sz]) mem))
28258	// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
28259	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
28260	for {
28261		if auxIntToInt64(v.AuxInt) != 0 {
28262			break
28263		}
28264		call := v_0
28265		if call.Op != OpStaticCall || len(call.Args) != 4 {
28266			break
28267		}
28268		sym := auxToCall(call.Aux)
28269		mem := call.Args[3]
28270		dst := call.Args[0]
28271		src := call.Args[1]
28272		call_2 := call.Args[2]
28273		if call_2.Op != OpConst64 {
28274			break
28275		}
28276		sz := auxIntToInt64(call_2.AuxInt)
28277		if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
28278			break
28279		}
28280		v.reset(OpMove)
28281		v.AuxInt = int64ToAuxInt(int64(sz))
28282		v.Aux = typeToAux(types.Types[types.TUINT8])
28283		v.AddArg3(dst, src, mem)
28284		return true
28285	}
28286	// match: (SelectN [0] call:(StaticCall {sym} dst src (Const32 [sz]) mem))
28287	// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
28288	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
28289	for {
28290		if auxIntToInt64(v.AuxInt) != 0 {
28291			break
28292		}
28293		call := v_0
28294		if call.Op != OpStaticCall || len(call.Args) != 4 {
28295			break
28296		}
28297		sym := auxToCall(call.Aux)
28298		mem := call.Args[3]
28299		dst := call.Args[0]
28300		src := call.Args[1]
28301		call_2 := call.Args[2]
28302		if call_2.Op != OpConst32 {
28303			break
28304		}
28305		sz := auxIntToInt32(call_2.AuxInt)
28306		if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
28307			break
28308		}
28309		v.reset(OpMove)
28310		v.AuxInt = int64ToAuxInt(int64(sz))
28311		v.Aux = typeToAux(types.Types[types.TUINT8])
28312		v.AddArg3(dst, src, mem)
28313		return true
28314	}
28315	// match: (SelectN [0] call:(StaticLECall {sym} dst src (Const64 [sz]) mem))
28316	// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
28317	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
28318	for {
28319		if auxIntToInt64(v.AuxInt) != 0 {
28320			break
28321		}
28322		call := v_0
28323		if call.Op != OpStaticLECall || len(call.Args) != 4 {
28324			break
28325		}
28326		sym := auxToCall(call.Aux)
28327		mem := call.Args[3]
28328		dst := call.Args[0]
28329		src := call.Args[1]
28330		call_2 := call.Args[2]
28331		if call_2.Op != OpConst64 {
28332			break
28333		}
28334		sz := auxIntToInt64(call_2.AuxInt)
28335		if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
28336			break
28337		}
28338		v.reset(OpMove)
28339		v.AuxInt = int64ToAuxInt(int64(sz))
28340		v.Aux = typeToAux(types.Types[types.TUINT8])
28341		v.AddArg3(dst, src, mem)
28342		return true
28343	}
28344	// match: (SelectN [0] call:(StaticLECall {sym} dst src (Const32 [sz]) mem))
28345	// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
28346	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
28347	for {
28348		if auxIntToInt64(v.AuxInt) != 0 {
28349			break
28350		}
28351		call := v_0
28352		if call.Op != OpStaticLECall || len(call.Args) != 4 {
28353			break
28354		}
28355		sym := auxToCall(call.Aux)
28356		mem := call.Args[3]
28357		dst := call.Args[0]
28358		src := call.Args[1]
28359		call_2 := call.Args[2]
28360		if call_2.Op != OpConst32 {
28361			break
28362		}
28363		sz := auxIntToInt32(call_2.AuxInt)
28364		if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
28365			break
28366		}
28367		v.reset(OpMove)
28368		v.AuxInt = int64ToAuxInt(int64(sz))
28369		v.Aux = typeToAux(types.Types[types.TUINT8])
28370		v.AddArg3(dst, src, mem)
28371		return true
28372	}
28373	// match: (SelectN [0] call:(StaticLECall {sym} a x))
28374	// cond: needRaceCleanup(sym, call) && clobber(call)
28375	// result: x
28376	for {
28377		if auxIntToInt64(v.AuxInt) != 0 {
28378			break
28379		}
28380		call := v_0
28381		if call.Op != OpStaticLECall || len(call.Args) != 2 {
28382			break
28383		}
28384		sym := auxToCall(call.Aux)
28385		x := call.Args[1]
28386		if !(needRaceCleanup(sym, call) && clobber(call)) {
28387			break
28388		}
28389		v.copyOf(x)
28390		return true
28391	}
28392	// match: (SelectN [0] call:(StaticLECall {sym} x))
28393	// cond: needRaceCleanup(sym, call) && clobber(call)
28394	// result: x
28395	for {
28396		if auxIntToInt64(v.AuxInt) != 0 {
28397			break
28398		}
28399		call := v_0
28400		if call.Op != OpStaticLECall || len(call.Args) != 1 {
28401			break
28402		}
28403		sym := auxToCall(call.Aux)
28404		x := call.Args[0]
28405		if !(needRaceCleanup(sym, call) && clobber(call)) {
28406			break
28407		}
28408		v.copyOf(x)
28409		return true
28410	}
28411	// match: (SelectN [1] (StaticCall {sym} _ newLen:(Const64) _ _ _ _))
28412	// cond: v.Type.IsInteger() && isSameCall(sym, "runtime.growslice")
28413	// result: newLen
28414	for {
28415		if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpStaticCall || len(v_0.Args) != 6 {
28416			break
28417		}
28418		sym := auxToCall(v_0.Aux)
28419		_ = v_0.Args[1]
28420		newLen := v_0.Args[1]
28421		if newLen.Op != OpConst64 || !(v.Type.IsInteger() && isSameCall(sym, "runtime.growslice")) {
28422			break
28423		}
28424		v.copyOf(newLen)
28425		return true
28426	}
28427	// match: (SelectN [1] (StaticCall {sym} _ newLen:(Const32) _ _ _ _))
28428	// cond: v.Type.IsInteger() && isSameCall(sym, "runtime.growslice")
28429	// result: newLen
28430	for {
28431		if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpStaticCall || len(v_0.Args) != 6 {
28432			break
28433		}
28434		sym := auxToCall(v_0.Aux)
28435		_ = v_0.Args[1]
28436		newLen := v_0.Args[1]
28437		if newLen.Op != OpConst32 || !(v.Type.IsInteger() && isSameCall(sym, "runtime.growslice")) {
28438			break
28439		}
28440		v.copyOf(newLen)
28441		return true
28442	}
28443	// match: (SelectN [0] (StaticLECall {f} x y (SelectN [1] c:(StaticLECall {g} x y mem))))
28444	// cond: isSameCall(f, "runtime.cmpstring") && isSameCall(g, "runtime.cmpstring")
28445	// result: @c.Block (SelectN [0] <typ.Int> c)
28446	for {
28447		if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpStaticLECall || len(v_0.Args) != 3 {
28448			break
28449		}
28450		f := auxToCall(v_0.Aux)
28451		_ = v_0.Args[2]
28452		x := v_0.Args[0]
28453		y := v_0.Args[1]
28454		v_0_2 := v_0.Args[2]
28455		if v_0_2.Op != OpSelectN || auxIntToInt64(v_0_2.AuxInt) != 1 {
28456			break
28457		}
28458		c := v_0_2.Args[0]
28459		if c.Op != OpStaticLECall || len(c.Args) != 3 {
28460			break
28461		}
28462		g := auxToCall(c.Aux)
28463		if x != c.Args[0] || y != c.Args[1] || !(isSameCall(f, "runtime.cmpstring") && isSameCall(g, "runtime.cmpstring")) {
28464			break
28465		}
28466		b = c.Block
28467		v0 := b.NewValue0(v.Pos, OpSelectN, typ.Int)
28468		v.copyOf(v0)
28469		v0.AuxInt = int64ToAuxInt(0)
28470		v0.AddArg(c)
28471		return true
28472	}
28473	// match: (SelectN [1] c:(StaticLECall {f} _ _ mem))
28474	// cond: c.Uses == 1 && isSameCall(f, "runtime.cmpstring") && clobber(c)
28475	// result: mem
28476	for {
28477		if auxIntToInt64(v.AuxInt) != 1 {
28478			break
28479		}
28480		c := v_0
28481		if c.Op != OpStaticLECall || len(c.Args) != 3 {
28482			break
28483		}
28484		f := auxToCall(c.Aux)
28485		mem := c.Args[2]
28486		if !(c.Uses == 1 && isSameCall(f, "runtime.cmpstring") && clobber(c)) {
28487			break
28488		}
28489		v.copyOf(mem)
28490		return true
28491	}
28492	return false
28493}
28494func rewriteValuegeneric_OpSignExt16to32(v *Value) bool {
28495	v_0 := v.Args[0]
28496	// match: (SignExt16to32 (Const16 [c]))
28497	// result: (Const32 [int32(c)])
28498	for {
28499		if v_0.Op != OpConst16 {
28500			break
28501		}
28502		c := auxIntToInt16(v_0.AuxInt)
28503		v.reset(OpConst32)
28504		v.AuxInt = int32ToAuxInt(int32(c))
28505		return true
28506	}
28507	// match: (SignExt16to32 (Trunc32to16 x:(Rsh32x64 _ (Const64 [s]))))
28508	// cond: s >= 16
28509	// result: x
28510	for {
28511		if v_0.Op != OpTrunc32to16 {
28512			break
28513		}
28514		x := v_0.Args[0]
28515		if x.Op != OpRsh32x64 {
28516			break
28517		}
28518		_ = x.Args[1]
28519		x_1 := x.Args[1]
28520		if x_1.Op != OpConst64 {
28521			break
28522		}
28523		s := auxIntToInt64(x_1.AuxInt)
28524		if !(s >= 16) {
28525			break
28526		}
28527		v.copyOf(x)
28528		return true
28529	}
28530	return false
28531}
28532func rewriteValuegeneric_OpSignExt16to64(v *Value) bool {
28533	v_0 := v.Args[0]
28534	// match: (SignExt16to64 (Const16 [c]))
28535	// result: (Const64 [int64(c)])
28536	for {
28537		if v_0.Op != OpConst16 {
28538			break
28539		}
28540		c := auxIntToInt16(v_0.AuxInt)
28541		v.reset(OpConst64)
28542		v.AuxInt = int64ToAuxInt(int64(c))
28543		return true
28544	}
28545	// match: (SignExt16to64 (Trunc64to16 x:(Rsh64x64 _ (Const64 [s]))))
28546	// cond: s >= 48
28547	// result: x
28548	for {
28549		if v_0.Op != OpTrunc64to16 {
28550			break
28551		}
28552		x := v_0.Args[0]
28553		if x.Op != OpRsh64x64 {
28554			break
28555		}
28556		_ = x.Args[1]
28557		x_1 := x.Args[1]
28558		if x_1.Op != OpConst64 {
28559			break
28560		}
28561		s := auxIntToInt64(x_1.AuxInt)
28562		if !(s >= 48) {
28563			break
28564		}
28565		v.copyOf(x)
28566		return true
28567	}
28568	return false
28569}
28570func rewriteValuegeneric_OpSignExt32to64(v *Value) bool {
28571	v_0 := v.Args[0]
28572	// match: (SignExt32to64 (Const32 [c]))
28573	// result: (Const64 [int64(c)])
28574	for {
28575		if v_0.Op != OpConst32 {
28576			break
28577		}
28578		c := auxIntToInt32(v_0.AuxInt)
28579		v.reset(OpConst64)
28580		v.AuxInt = int64ToAuxInt(int64(c))
28581		return true
28582	}
28583	// match: (SignExt32to64 (Trunc64to32 x:(Rsh64x64 _ (Const64 [s]))))
28584	// cond: s >= 32
28585	// result: x
28586	for {
28587		if v_0.Op != OpTrunc64to32 {
28588			break
28589		}
28590		x := v_0.Args[0]
28591		if x.Op != OpRsh64x64 {
28592			break
28593		}
28594		_ = x.Args[1]
28595		x_1 := x.Args[1]
28596		if x_1.Op != OpConst64 {
28597			break
28598		}
28599		s := auxIntToInt64(x_1.AuxInt)
28600		if !(s >= 32) {
28601			break
28602		}
28603		v.copyOf(x)
28604		return true
28605	}
28606	return false
28607}
28608func rewriteValuegeneric_OpSignExt8to16(v *Value) bool {
28609	v_0 := v.Args[0]
28610	// match: (SignExt8to16 (Const8 [c]))
28611	// result: (Const16 [int16(c)])
28612	for {
28613		if v_0.Op != OpConst8 {
28614			break
28615		}
28616		c := auxIntToInt8(v_0.AuxInt)
28617		v.reset(OpConst16)
28618		v.AuxInt = int16ToAuxInt(int16(c))
28619		return true
28620	}
28621	// match: (SignExt8to16 (Trunc16to8 x:(Rsh16x64 _ (Const64 [s]))))
28622	// cond: s >= 8
28623	// result: x
28624	for {
28625		if v_0.Op != OpTrunc16to8 {
28626			break
28627		}
28628		x := v_0.Args[0]
28629		if x.Op != OpRsh16x64 {
28630			break
28631		}
28632		_ = x.Args[1]
28633		x_1 := x.Args[1]
28634		if x_1.Op != OpConst64 {
28635			break
28636		}
28637		s := auxIntToInt64(x_1.AuxInt)
28638		if !(s >= 8) {
28639			break
28640		}
28641		v.copyOf(x)
28642		return true
28643	}
28644	return false
28645}
28646func rewriteValuegeneric_OpSignExt8to32(v *Value) bool {
28647	v_0 := v.Args[0]
28648	// match: (SignExt8to32 (Const8 [c]))
28649	// result: (Const32 [int32(c)])
28650	for {
28651		if v_0.Op != OpConst8 {
28652			break
28653		}
28654		c := auxIntToInt8(v_0.AuxInt)
28655		v.reset(OpConst32)
28656		v.AuxInt = int32ToAuxInt(int32(c))
28657		return true
28658	}
28659	// match: (SignExt8to32 (Trunc32to8 x:(Rsh32x64 _ (Const64 [s]))))
28660	// cond: s >= 24
28661	// result: x
28662	for {
28663		if v_0.Op != OpTrunc32to8 {
28664			break
28665		}
28666		x := v_0.Args[0]
28667		if x.Op != OpRsh32x64 {
28668			break
28669		}
28670		_ = x.Args[1]
28671		x_1 := x.Args[1]
28672		if x_1.Op != OpConst64 {
28673			break
28674		}
28675		s := auxIntToInt64(x_1.AuxInt)
28676		if !(s >= 24) {
28677			break
28678		}
28679		v.copyOf(x)
28680		return true
28681	}
28682	return false
28683}
28684func rewriteValuegeneric_OpSignExt8to64(v *Value) bool {
28685	v_0 := v.Args[0]
28686	// match: (SignExt8to64 (Const8 [c]))
28687	// result: (Const64 [int64(c)])
28688	for {
28689		if v_0.Op != OpConst8 {
28690			break
28691		}
28692		c := auxIntToInt8(v_0.AuxInt)
28693		v.reset(OpConst64)
28694		v.AuxInt = int64ToAuxInt(int64(c))
28695		return true
28696	}
28697	// match: (SignExt8to64 (Trunc64to8 x:(Rsh64x64 _ (Const64 [s]))))
28698	// cond: s >= 56
28699	// result: x
28700	for {
28701		if v_0.Op != OpTrunc64to8 {
28702			break
28703		}
28704		x := v_0.Args[0]
28705		if x.Op != OpRsh64x64 {
28706			break
28707		}
28708		_ = x.Args[1]
28709		x_1 := x.Args[1]
28710		if x_1.Op != OpConst64 {
28711			break
28712		}
28713		s := auxIntToInt64(x_1.AuxInt)
28714		if !(s >= 56) {
28715			break
28716		}
28717		v.copyOf(x)
28718		return true
28719	}
28720	return false
28721}
28722func rewriteValuegeneric_OpSliceCap(v *Value) bool {
28723	v_0 := v.Args[0]
28724	// match: (SliceCap (SliceMake _ _ (Const64 <t> [c])))
28725	// result: (Const64 <t> [c])
28726	for {
28727		if v_0.Op != OpSliceMake {
28728			break
28729		}
28730		_ = v_0.Args[2]
28731		v_0_2 := v_0.Args[2]
28732		if v_0_2.Op != OpConst64 {
28733			break
28734		}
28735		t := v_0_2.Type
28736		c := auxIntToInt64(v_0_2.AuxInt)
28737		v.reset(OpConst64)
28738		v.Type = t
28739		v.AuxInt = int64ToAuxInt(c)
28740		return true
28741	}
28742	// match: (SliceCap (SliceMake _ _ (Const32 <t> [c])))
28743	// result: (Const32 <t> [c])
28744	for {
28745		if v_0.Op != OpSliceMake {
28746			break
28747		}
28748		_ = v_0.Args[2]
28749		v_0_2 := v_0.Args[2]
28750		if v_0_2.Op != OpConst32 {
28751			break
28752		}
28753		t := v_0_2.Type
28754		c := auxIntToInt32(v_0_2.AuxInt)
28755		v.reset(OpConst32)
28756		v.Type = t
28757		v.AuxInt = int32ToAuxInt(c)
28758		return true
28759	}
28760	// match: (SliceCap (SliceMake _ _ (SliceCap x)))
28761	// result: (SliceCap x)
28762	for {
28763		if v_0.Op != OpSliceMake {
28764			break
28765		}
28766		_ = v_0.Args[2]
28767		v_0_2 := v_0.Args[2]
28768		if v_0_2.Op != OpSliceCap {
28769			break
28770		}
28771		x := v_0_2.Args[0]
28772		v.reset(OpSliceCap)
28773		v.AddArg(x)
28774		return true
28775	}
28776	// match: (SliceCap (SliceMake _ _ (SliceLen x)))
28777	// result: (SliceLen x)
28778	for {
28779		if v_0.Op != OpSliceMake {
28780			break
28781		}
28782		_ = v_0.Args[2]
28783		v_0_2 := v_0.Args[2]
28784		if v_0_2.Op != OpSliceLen {
28785			break
28786		}
28787		x := v_0_2.Args[0]
28788		v.reset(OpSliceLen)
28789		v.AddArg(x)
28790		return true
28791	}
28792	return false
28793}
28794func rewriteValuegeneric_OpSliceLen(v *Value) bool {
28795	v_0 := v.Args[0]
28796	// match: (SliceLen (SliceMake _ (Const64 <t> [c]) _))
28797	// result: (Const64 <t> [c])
28798	for {
28799		if v_0.Op != OpSliceMake {
28800			break
28801		}
28802		_ = v_0.Args[1]
28803		v_0_1 := v_0.Args[1]
28804		if v_0_1.Op != OpConst64 {
28805			break
28806		}
28807		t := v_0_1.Type
28808		c := auxIntToInt64(v_0_1.AuxInt)
28809		v.reset(OpConst64)
28810		v.Type = t
28811		v.AuxInt = int64ToAuxInt(c)
28812		return true
28813	}
28814	// match: (SliceLen (SliceMake _ (Const32 <t> [c]) _))
28815	// result: (Const32 <t> [c])
28816	for {
28817		if v_0.Op != OpSliceMake {
28818			break
28819		}
28820		_ = v_0.Args[1]
28821		v_0_1 := v_0.Args[1]
28822		if v_0_1.Op != OpConst32 {
28823			break
28824		}
28825		t := v_0_1.Type
28826		c := auxIntToInt32(v_0_1.AuxInt)
28827		v.reset(OpConst32)
28828		v.Type = t
28829		v.AuxInt = int32ToAuxInt(c)
28830		return true
28831	}
28832	// match: (SliceLen (SliceMake _ (SliceLen x) _))
28833	// result: (SliceLen x)
28834	for {
28835		if v_0.Op != OpSliceMake {
28836			break
28837		}
28838		_ = v_0.Args[1]
28839		v_0_1 := v_0.Args[1]
28840		if v_0_1.Op != OpSliceLen {
28841			break
28842		}
28843		x := v_0_1.Args[0]
28844		v.reset(OpSliceLen)
28845		v.AddArg(x)
28846		return true
28847	}
28848	// match: (SliceLen (SelectN [0] (StaticLECall {sym} _ newLen:(Const64) _ _ _ _)))
28849	// cond: isSameCall(sym, "runtime.growslice")
28850	// result: newLen
28851	for {
28852		if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
28853			break
28854		}
28855		v_0_0 := v_0.Args[0]
28856		if v_0_0.Op != OpStaticLECall || len(v_0_0.Args) != 6 {
28857			break
28858		}
28859		sym := auxToCall(v_0_0.Aux)
28860		_ = v_0_0.Args[1]
28861		newLen := v_0_0.Args[1]
28862		if newLen.Op != OpConst64 || !(isSameCall(sym, "runtime.growslice")) {
28863			break
28864		}
28865		v.copyOf(newLen)
28866		return true
28867	}
28868	// match: (SliceLen (SelectN [0] (StaticLECall {sym} _ newLen:(Const32) _ _ _ _)))
28869	// cond: isSameCall(sym, "runtime.growslice")
28870	// result: newLen
28871	for {
28872		if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
28873			break
28874		}
28875		v_0_0 := v_0.Args[0]
28876		if v_0_0.Op != OpStaticLECall || len(v_0_0.Args) != 6 {
28877			break
28878		}
28879		sym := auxToCall(v_0_0.Aux)
28880		_ = v_0_0.Args[1]
28881		newLen := v_0_0.Args[1]
28882		if newLen.Op != OpConst32 || !(isSameCall(sym, "runtime.growslice")) {
28883			break
28884		}
28885		v.copyOf(newLen)
28886		return true
28887	}
28888	return false
28889}
28890func rewriteValuegeneric_OpSlicePtr(v *Value) bool {
28891	v_0 := v.Args[0]
28892	// match: (SlicePtr (SliceMake (SlicePtr x) _ _))
28893	// result: (SlicePtr x)
28894	for {
28895		if v_0.Op != OpSliceMake {
28896			break
28897		}
28898		v_0_0 := v_0.Args[0]
28899		if v_0_0.Op != OpSlicePtr {
28900			break
28901		}
28902		x := v_0_0.Args[0]
28903		v.reset(OpSlicePtr)
28904		v.AddArg(x)
28905		return true
28906	}
28907	return false
28908}
28909func rewriteValuegeneric_OpSlicemask(v *Value) bool {
28910	v_0 := v.Args[0]
28911	// match: (Slicemask (Const32 [x]))
28912	// cond: x > 0
28913	// result: (Const32 [-1])
28914	for {
28915		if v_0.Op != OpConst32 {
28916			break
28917		}
28918		x := auxIntToInt32(v_0.AuxInt)
28919		if !(x > 0) {
28920			break
28921		}
28922		v.reset(OpConst32)
28923		v.AuxInt = int32ToAuxInt(-1)
28924		return true
28925	}
28926	// match: (Slicemask (Const32 [0]))
28927	// result: (Const32 [0])
28928	for {
28929		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
28930			break
28931		}
28932		v.reset(OpConst32)
28933		v.AuxInt = int32ToAuxInt(0)
28934		return true
28935	}
28936	// match: (Slicemask (Const64 [x]))
28937	// cond: x > 0
28938	// result: (Const64 [-1])
28939	for {
28940		if v_0.Op != OpConst64 {
28941			break
28942		}
28943		x := auxIntToInt64(v_0.AuxInt)
28944		if !(x > 0) {
28945			break
28946		}
28947		v.reset(OpConst64)
28948		v.AuxInt = int64ToAuxInt(-1)
28949		return true
28950	}
28951	// match: (Slicemask (Const64 [0]))
28952	// result: (Const64 [0])
28953	for {
28954		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
28955			break
28956		}
28957		v.reset(OpConst64)
28958		v.AuxInt = int64ToAuxInt(0)
28959		return true
28960	}
28961	return false
28962}
28963func rewriteValuegeneric_OpSqrt(v *Value) bool {
28964	v_0 := v.Args[0]
28965	// match: (Sqrt (Const64F [c]))
28966	// cond: !math.IsNaN(math.Sqrt(c))
28967	// result: (Const64F [math.Sqrt(c)])
28968	for {
28969		if v_0.Op != OpConst64F {
28970			break
28971		}
28972		c := auxIntToFloat64(v_0.AuxInt)
28973		if !(!math.IsNaN(math.Sqrt(c))) {
28974			break
28975		}
28976		v.reset(OpConst64F)
28977		v.AuxInt = float64ToAuxInt(math.Sqrt(c))
28978		return true
28979	}
28980	return false
28981}
28982func rewriteValuegeneric_OpStaticCall(v *Value) bool {
28983	b := v.Block
28984	typ := &b.Func.Config.Types
28985	// match: (StaticCall {callAux} p q _ mem)
28986	// cond: isSameCall(callAux, "runtime.memequal") && isSamePtr(p, q)
28987	// result: (MakeResult (ConstBool <typ.Bool> [true]) mem)
28988	for {
28989		if len(v.Args) != 4 {
28990			break
28991		}
28992		callAux := auxToCall(v.Aux)
28993		mem := v.Args[3]
28994		p := v.Args[0]
28995		q := v.Args[1]
28996		if !(isSameCall(callAux, "runtime.memequal") && isSamePtr(p, q)) {
28997			break
28998		}
28999		v.reset(OpMakeResult)
29000		v0 := b.NewValue0(v.Pos, OpConstBool, typ.Bool)
29001		v0.AuxInt = boolToAuxInt(true)
29002		v.AddArg2(v0, mem)
29003		return true
29004	}
29005	return false
29006}
29007func rewriteValuegeneric_OpStaticLECall(v *Value) bool {
29008	b := v.Block
29009	config := b.Func.Config
29010	typ := &b.Func.Config.Types
29011	// match: (StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [1]) mem)
29012	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon)
29013	// result: (MakeResult (Eq8 (Load <typ.Int8> sptr mem) (Const8 <typ.Int8> [int8(read8(scon,0))])) mem)
29014	for {
29015		if len(v.Args) != 4 {
29016			break
29017		}
29018		callAux := auxToCall(v.Aux)
29019		mem := v.Args[3]
29020		sptr := v.Args[0]
29021		v_1 := v.Args[1]
29022		if v_1.Op != OpAddr {
29023			break
29024		}
29025		scon := auxToSym(v_1.Aux)
29026		v_1_0 := v_1.Args[0]
29027		if v_1_0.Op != OpSB {
29028			break
29029		}
29030		v_2 := v.Args[2]
29031		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 1 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon)) {
29032			break
29033		}
29034		v.reset(OpMakeResult)
29035		v0 := b.NewValue0(v.Pos, OpEq8, typ.Bool)
29036		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int8)
29037		v1.AddArg2(sptr, mem)
29038		v2 := b.NewValue0(v.Pos, OpConst8, typ.Int8)
29039		v2.AuxInt = int8ToAuxInt(int8(read8(scon, 0)))
29040		v0.AddArg2(v1, v2)
29041		v.AddArg2(v0, mem)
29042		return true
29043	}
29044	// match: (StaticLECall {callAux} (Addr {scon} (SB)) sptr (Const64 [1]) mem)
29045	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon)
29046	// result: (MakeResult (Eq8 (Load <typ.Int8> sptr mem) (Const8 <typ.Int8> [int8(read8(scon,0))])) mem)
29047	for {
29048		if len(v.Args) != 4 {
29049			break
29050		}
29051		callAux := auxToCall(v.Aux)
29052		mem := v.Args[3]
29053		v_0 := v.Args[0]
29054		if v_0.Op != OpAddr {
29055			break
29056		}
29057		scon := auxToSym(v_0.Aux)
29058		v_0_0 := v_0.Args[0]
29059		if v_0_0.Op != OpSB {
29060			break
29061		}
29062		sptr := v.Args[1]
29063		v_2 := v.Args[2]
29064		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 1 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon)) {
29065			break
29066		}
29067		v.reset(OpMakeResult)
29068		v0 := b.NewValue0(v.Pos, OpEq8, typ.Bool)
29069		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int8)
29070		v1.AddArg2(sptr, mem)
29071		v2 := b.NewValue0(v.Pos, OpConst8, typ.Int8)
29072		v2.AuxInt = int8ToAuxInt(int8(read8(scon, 0)))
29073		v0.AddArg2(v1, v2)
29074		v.AddArg2(v0, mem)
29075		return true
29076	}
29077	// match: (StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [2]) mem)
29078	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)
29079	// result: (MakeResult (Eq16 (Load <typ.Int16> sptr mem) (Const16 <typ.Int16> [int16(read16(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
29080	for {
29081		if len(v.Args) != 4 {
29082			break
29083		}
29084		callAux := auxToCall(v.Aux)
29085		mem := v.Args[3]
29086		sptr := v.Args[0]
29087		v_1 := v.Args[1]
29088		if v_1.Op != OpAddr {
29089			break
29090		}
29091		scon := auxToSym(v_1.Aux)
29092		v_1_0 := v_1.Args[0]
29093		if v_1_0.Op != OpSB {
29094			break
29095		}
29096		v_2 := v.Args[2]
29097		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 2 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)) {
29098			break
29099		}
29100		v.reset(OpMakeResult)
29101		v0 := b.NewValue0(v.Pos, OpEq16, typ.Bool)
29102		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int16)
29103		v1.AddArg2(sptr, mem)
29104		v2 := b.NewValue0(v.Pos, OpConst16, typ.Int16)
29105		v2.AuxInt = int16ToAuxInt(int16(read16(scon, 0, config.ctxt.Arch.ByteOrder)))
29106		v0.AddArg2(v1, v2)
29107		v.AddArg2(v0, mem)
29108		return true
29109	}
29110	// match: (StaticLECall {callAux} (Addr {scon} (SB)) sptr (Const64 [2]) mem)
29111	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)
29112	// result: (MakeResult (Eq16 (Load <typ.Int16> sptr mem) (Const16 <typ.Int16> [int16(read16(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
29113	for {
29114		if len(v.Args) != 4 {
29115			break
29116		}
29117		callAux := auxToCall(v.Aux)
29118		mem := v.Args[3]
29119		v_0 := v.Args[0]
29120		if v_0.Op != OpAddr {
29121			break
29122		}
29123		scon := auxToSym(v_0.Aux)
29124		v_0_0 := v_0.Args[0]
29125		if v_0_0.Op != OpSB {
29126			break
29127		}
29128		sptr := v.Args[1]
29129		v_2 := v.Args[2]
29130		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 2 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)) {
29131			break
29132		}
29133		v.reset(OpMakeResult)
29134		v0 := b.NewValue0(v.Pos, OpEq16, typ.Bool)
29135		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int16)
29136		v1.AddArg2(sptr, mem)
29137		v2 := b.NewValue0(v.Pos, OpConst16, typ.Int16)
29138		v2.AuxInt = int16ToAuxInt(int16(read16(scon, 0, config.ctxt.Arch.ByteOrder)))
29139		v0.AddArg2(v1, v2)
29140		v.AddArg2(v0, mem)
29141		return true
29142	}
29143	// match: (StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [4]) mem)
29144	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)
29145	// result: (MakeResult (Eq32 (Load <typ.Int32> sptr mem) (Const32 <typ.Int32> [int32(read32(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
29146	for {
29147		if len(v.Args) != 4 {
29148			break
29149		}
29150		callAux := auxToCall(v.Aux)
29151		mem := v.Args[3]
29152		sptr := v.Args[0]
29153		v_1 := v.Args[1]
29154		if v_1.Op != OpAddr {
29155			break
29156		}
29157		scon := auxToSym(v_1.Aux)
29158		v_1_0 := v_1.Args[0]
29159		if v_1_0.Op != OpSB {
29160			break
29161		}
29162		v_2 := v.Args[2]
29163		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 4 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)) {
29164			break
29165		}
29166		v.reset(OpMakeResult)
29167		v0 := b.NewValue0(v.Pos, OpEq32, typ.Bool)
29168		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int32)
29169		v1.AddArg2(sptr, mem)
29170		v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
29171		v2.AuxInt = int32ToAuxInt(int32(read32(scon, 0, config.ctxt.Arch.ByteOrder)))
29172		v0.AddArg2(v1, v2)
29173		v.AddArg2(v0, mem)
29174		return true
29175	}
29176	// match: (StaticLECall {callAux} (Addr {scon} (SB)) sptr (Const64 [4]) mem)
29177	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)
29178	// result: (MakeResult (Eq32 (Load <typ.Int32> sptr mem) (Const32 <typ.Int32> [int32(read32(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
29179	for {
29180		if len(v.Args) != 4 {
29181			break
29182		}
29183		callAux := auxToCall(v.Aux)
29184		mem := v.Args[3]
29185		v_0 := v.Args[0]
29186		if v_0.Op != OpAddr {
29187			break
29188		}
29189		scon := auxToSym(v_0.Aux)
29190		v_0_0 := v_0.Args[0]
29191		if v_0_0.Op != OpSB {
29192			break
29193		}
29194		sptr := v.Args[1]
29195		v_2 := v.Args[2]
29196		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 4 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)) {
29197			break
29198		}
29199		v.reset(OpMakeResult)
29200		v0 := b.NewValue0(v.Pos, OpEq32, typ.Bool)
29201		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int32)
29202		v1.AddArg2(sptr, mem)
29203		v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
29204		v2.AuxInt = int32ToAuxInt(int32(read32(scon, 0, config.ctxt.Arch.ByteOrder)))
29205		v0.AddArg2(v1, v2)
29206		v.AddArg2(v0, mem)
29207		return true
29208	}
29209	// match: (StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [8]) mem)
29210	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config) && config.PtrSize == 8
29211	// result: (MakeResult (Eq64 (Load <typ.Int64> sptr mem) (Const64 <typ.Int64> [int64(read64(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
29212	for {
29213		if len(v.Args) != 4 {
29214			break
29215		}
29216		callAux := auxToCall(v.Aux)
29217		mem := v.Args[3]
29218		sptr := v.Args[0]
29219		v_1 := v.Args[1]
29220		if v_1.Op != OpAddr {
29221			break
29222		}
29223		scon := auxToSym(v_1.Aux)
29224		v_1_0 := v_1.Args[0]
29225		if v_1_0.Op != OpSB {
29226			break
29227		}
29228		v_2 := v.Args[2]
29229		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 8 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config) && config.PtrSize == 8) {
29230			break
29231		}
29232		v.reset(OpMakeResult)
29233		v0 := b.NewValue0(v.Pos, OpEq64, typ.Bool)
29234		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int64)
29235		v1.AddArg2(sptr, mem)
29236		v2 := b.NewValue0(v.Pos, OpConst64, typ.Int64)
29237		v2.AuxInt = int64ToAuxInt(int64(read64(scon, 0, config.ctxt.Arch.ByteOrder)))
29238		v0.AddArg2(v1, v2)
29239		v.AddArg2(v0, mem)
29240		return true
29241	}
29242	// match: (StaticLECall {callAux} (Addr {scon} (SB)) sptr (Const64 [8]) mem)
29243	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config) && config.PtrSize == 8
29244	// result: (MakeResult (Eq64 (Load <typ.Int64> sptr mem) (Const64 <typ.Int64> [int64(read64(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
29245	for {
29246		if len(v.Args) != 4 {
29247			break
29248		}
29249		callAux := auxToCall(v.Aux)
29250		mem := v.Args[3]
29251		v_0 := v.Args[0]
29252		if v_0.Op != OpAddr {
29253			break
29254		}
29255		scon := auxToSym(v_0.Aux)
29256		v_0_0 := v_0.Args[0]
29257		if v_0_0.Op != OpSB {
29258			break
29259		}
29260		sptr := v.Args[1]
29261		v_2 := v.Args[2]
29262		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 8 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config) && config.PtrSize == 8) {
29263			break
29264		}
29265		v.reset(OpMakeResult)
29266		v0 := b.NewValue0(v.Pos, OpEq64, typ.Bool)
29267		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int64)
29268		v1.AddArg2(sptr, mem)
29269		v2 := b.NewValue0(v.Pos, OpConst64, typ.Int64)
29270		v2.AuxInt = int64ToAuxInt(int64(read64(scon, 0, config.ctxt.Arch.ByteOrder)))
29271		v0.AddArg2(v1, v2)
29272		v.AddArg2(v0, mem)
29273		return true
29274	}
29275	// match: (StaticLECall {callAux} _ _ (Const64 [0]) mem)
29276	// cond: isSameCall(callAux, "runtime.memequal")
29277	// result: (MakeResult (ConstBool <typ.Bool> [true]) mem)
29278	for {
29279		if len(v.Args) != 4 {
29280			break
29281		}
29282		callAux := auxToCall(v.Aux)
29283		mem := v.Args[3]
29284		v_2 := v.Args[2]
29285		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 0 || !(isSameCall(callAux, "runtime.memequal")) {
29286			break
29287		}
29288		v.reset(OpMakeResult)
29289		v0 := b.NewValue0(v.Pos, OpConstBool, typ.Bool)
29290		v0.AuxInt = boolToAuxInt(true)
29291		v.AddArg2(v0, mem)
29292		return true
29293	}
29294	// match: (StaticLECall {callAux} p q _ mem)
29295	// cond: isSameCall(callAux, "runtime.memequal") && isSamePtr(p, q)
29296	// result: (MakeResult (ConstBool <typ.Bool> [true]) mem)
29297	for {
29298		if len(v.Args) != 4 {
29299			break
29300		}
29301		callAux := auxToCall(v.Aux)
29302		mem := v.Args[3]
29303		p := v.Args[0]
29304		q := v.Args[1]
29305		if !(isSameCall(callAux, "runtime.memequal") && isSamePtr(p, q)) {
29306			break
29307		}
29308		v.reset(OpMakeResult)
29309		v0 := b.NewValue0(v.Pos, OpConstBool, typ.Bool)
29310		v0.AuxInt = boolToAuxInt(true)
29311		v.AddArg2(v0, mem)
29312		return true
29313	}
29314	// match: (StaticLECall {callAux} _ (Const64 [0]) (Const64 [0]) mem)
29315	// cond: isSameCall(callAux, "runtime.makeslice")
29316	// result: (MakeResult (Addr <v.Type.FieldType(0)> {ir.Syms.Zerobase} (SB)) mem)
29317	for {
29318		if len(v.Args) != 4 {
29319			break
29320		}
29321		callAux := auxToCall(v.Aux)
29322		mem := v.Args[3]
29323		v_1 := v.Args[1]
29324		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
29325			break
29326		}
29327		v_2 := v.Args[2]
29328		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 0 || !(isSameCall(callAux, "runtime.makeslice")) {
29329			break
29330		}
29331		v.reset(OpMakeResult)
29332		v0 := b.NewValue0(v.Pos, OpAddr, v.Type.FieldType(0))
29333		v0.Aux = symToAux(ir.Syms.Zerobase)
29334		v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr)
29335		v0.AddArg(v1)
29336		v.AddArg2(v0, mem)
29337		return true
29338	}
29339	// match: (StaticLECall {callAux} _ (Const32 [0]) (Const32 [0]) mem)
29340	// cond: isSameCall(callAux, "runtime.makeslice")
29341	// result: (MakeResult (Addr <v.Type.FieldType(0)> {ir.Syms.Zerobase} (SB)) mem)
29342	for {
29343		if len(v.Args) != 4 {
29344			break
29345		}
29346		callAux := auxToCall(v.Aux)
29347		mem := v.Args[3]
29348		v_1 := v.Args[1]
29349		if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != 0 {
29350			break
29351		}
29352		v_2 := v.Args[2]
29353		if v_2.Op != OpConst32 || auxIntToInt32(v_2.AuxInt) != 0 || !(isSameCall(callAux, "runtime.makeslice")) {
29354			break
29355		}
29356		v.reset(OpMakeResult)
29357		v0 := b.NewValue0(v.Pos, OpAddr, v.Type.FieldType(0))
29358		v0.Aux = symToAux(ir.Syms.Zerobase)
29359		v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr)
29360		v0.AddArg(v1)
29361		v.AddArg2(v0, mem)
29362		return true
29363	}
29364	return false
29365}
29366func rewriteValuegeneric_OpStore(v *Value) bool {
29367	v_2 := v.Args[2]
29368	v_1 := v.Args[1]
29369	v_0 := v.Args[0]
29370	b := v.Block
29371	// match: (Store {t1} p1 (Load <t2> p2 mem) mem)
29372	// cond: isSamePtr(p1, p2) && t2.Size() == t1.Size()
29373	// result: mem
29374	for {
29375		t1 := auxToType(v.Aux)
29376		p1 := v_0
29377		if v_1.Op != OpLoad {
29378			break
29379		}
29380		t2 := v_1.Type
29381		mem := v_1.Args[1]
29382		p2 := v_1.Args[0]
29383		if mem != v_2 || !(isSamePtr(p1, p2) && t2.Size() == t1.Size()) {
29384			break
29385		}
29386		v.copyOf(mem)
29387		return true
29388	}
29389	// match: (Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ oldmem))
29390	// cond: isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size())
29391	// result: mem
29392	for {
29393		t1 := auxToType(v.Aux)
29394		p1 := v_0
29395		if v_1.Op != OpLoad {
29396			break
29397		}
29398		t2 := v_1.Type
29399		oldmem := v_1.Args[1]
29400		p2 := v_1.Args[0]
29401		mem := v_2
29402		if mem.Op != OpStore {
29403			break
29404		}
29405		t3 := auxToType(mem.Aux)
29406		_ = mem.Args[2]
29407		p3 := mem.Args[0]
29408		if oldmem != mem.Args[2] || !(isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size())) {
29409			break
29410		}
29411		v.copyOf(mem)
29412		return true
29413	}
29414	// match: (Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ (Store {t4} p4 _ oldmem)))
29415	// cond: isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size()) && disjoint(p1, t1.Size(), p4, t4.Size())
29416	// result: mem
29417	for {
29418		t1 := auxToType(v.Aux)
29419		p1 := v_0
29420		if v_1.Op != OpLoad {
29421			break
29422		}
29423		t2 := v_1.Type
29424		oldmem := v_1.Args[1]
29425		p2 := v_1.Args[0]
29426		mem := v_2
29427		if mem.Op != OpStore {
29428			break
29429		}
29430		t3 := auxToType(mem.Aux)
29431		_ = mem.Args[2]
29432		p3 := mem.Args[0]
29433		mem_2 := mem.Args[2]
29434		if mem_2.Op != OpStore {
29435			break
29436		}
29437		t4 := auxToType(mem_2.Aux)
29438		_ = mem_2.Args[2]
29439		p4 := mem_2.Args[0]
29440		if oldmem != mem_2.Args[2] || !(isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size()) && disjoint(p1, t1.Size(), p4, t4.Size())) {
29441			break
29442		}
29443		v.copyOf(mem)
29444		return true
29445	}
29446	// match: (Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ (Store {t4} p4 _ (Store {t5} p5 _ oldmem))))
29447	// cond: isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size()) && disjoint(p1, t1.Size(), p4, t4.Size()) && disjoint(p1, t1.Size(), p5, t5.Size())
29448	// result: mem
29449	for {
29450		t1 := auxToType(v.Aux)
29451		p1 := v_0
29452		if v_1.Op != OpLoad {
29453			break
29454		}
29455		t2 := v_1.Type
29456		oldmem := v_1.Args[1]
29457		p2 := v_1.Args[0]
29458		mem := v_2
29459		if mem.Op != OpStore {
29460			break
29461		}
29462		t3 := auxToType(mem.Aux)
29463		_ = mem.Args[2]
29464		p3 := mem.Args[0]
29465		mem_2 := mem.Args[2]
29466		if mem_2.Op != OpStore {
29467			break
29468		}
29469		t4 := auxToType(mem_2.Aux)
29470		_ = mem_2.Args[2]
29471		p4 := mem_2.Args[0]
29472		mem_2_2 := mem_2.Args[2]
29473		if mem_2_2.Op != OpStore {
29474			break
29475		}
29476		t5 := auxToType(mem_2_2.Aux)
29477		_ = mem_2_2.Args[2]
29478		p5 := mem_2_2.Args[0]
29479		if oldmem != mem_2_2.Args[2] || !(isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size()) && disjoint(p1, t1.Size(), p4, t4.Size()) && disjoint(p1, t1.Size(), p5, t5.Size())) {
29480			break
29481		}
29482		v.copyOf(mem)
29483		return true
29484	}
29485	// match: (Store {t} (OffPtr [o] p1) x mem:(Zero [n] p2 _))
29486	// cond: isConstZero(x) && o >= 0 && t.Size() + o <= n && isSamePtr(p1, p2)
29487	// result: mem
29488	for {
29489		t := auxToType(v.Aux)
29490		if v_0.Op != OpOffPtr {
29491			break
29492		}
29493		o := auxIntToInt64(v_0.AuxInt)
29494		p1 := v_0.Args[0]
29495		x := v_1
29496		mem := v_2
29497		if mem.Op != OpZero {
29498			break
29499		}
29500		n := auxIntToInt64(mem.AuxInt)
29501		p2 := mem.Args[0]
29502		if !(isConstZero(x) && o >= 0 && t.Size()+o <= n && isSamePtr(p1, p2)) {
29503			break
29504		}
29505		v.copyOf(mem)
29506		return true
29507	}
29508	// match: (Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Zero [n] p3 _)))
29509	// cond: isConstZero(x) && o1 >= 0 && t1.Size() + o1 <= n && isSamePtr(p1, p3) && disjoint(op, t1.Size(), p2, t2.Size())
29510	// result: mem
29511	for {
29512		t1 := auxToType(v.Aux)
29513		op := v_0
29514		if op.Op != OpOffPtr {
29515			break
29516		}
29517		o1 := auxIntToInt64(op.AuxInt)
29518		p1 := op.Args[0]
29519		x := v_1
29520		mem := v_2
29521		if mem.Op != OpStore {
29522			break
29523		}
29524		t2 := auxToType(mem.Aux)
29525		_ = mem.Args[2]
29526		p2 := mem.Args[0]
29527		mem_2 := mem.Args[2]
29528		if mem_2.Op != OpZero {
29529			break
29530		}
29531		n := auxIntToInt64(mem_2.AuxInt)
29532		p3 := mem_2.Args[0]
29533		if !(isConstZero(x) && o1 >= 0 && t1.Size()+o1 <= n && isSamePtr(p1, p3) && disjoint(op, t1.Size(), p2, t2.Size())) {
29534			break
29535		}
29536		v.copyOf(mem)
29537		return true
29538	}
29539	// match: (Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Store {t3} p3 _ (Zero [n] p4 _))))
29540	// cond: isConstZero(x) && o1 >= 0 && t1.Size() + o1 <= n && isSamePtr(p1, p4) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size())
29541	// result: mem
29542	for {
29543		t1 := auxToType(v.Aux)
29544		op := v_0
29545		if op.Op != OpOffPtr {
29546			break
29547		}
29548		o1 := auxIntToInt64(op.AuxInt)
29549		p1 := op.Args[0]
29550		x := v_1
29551		mem := v_2
29552		if mem.Op != OpStore {
29553			break
29554		}
29555		t2 := auxToType(mem.Aux)
29556		_ = mem.Args[2]
29557		p2 := mem.Args[0]
29558		mem_2 := mem.Args[2]
29559		if mem_2.Op != OpStore {
29560			break
29561		}
29562		t3 := auxToType(mem_2.Aux)
29563		_ = mem_2.Args[2]
29564		p3 := mem_2.Args[0]
29565		mem_2_2 := mem_2.Args[2]
29566		if mem_2_2.Op != OpZero {
29567			break
29568		}
29569		n := auxIntToInt64(mem_2_2.AuxInt)
29570		p4 := mem_2_2.Args[0]
29571		if !(isConstZero(x) && o1 >= 0 && t1.Size()+o1 <= n && isSamePtr(p1, p4) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size())) {
29572			break
29573		}
29574		v.copyOf(mem)
29575		return true
29576	}
29577	// match: (Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ (Zero [n] p5 _)))))
29578	// cond: isConstZero(x) && o1 >= 0 && t1.Size() + o1 <= n && isSamePtr(p1, p5) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size())
29579	// result: mem
29580	for {
29581		t1 := auxToType(v.Aux)
29582		op := v_0
29583		if op.Op != OpOffPtr {
29584			break
29585		}
29586		o1 := auxIntToInt64(op.AuxInt)
29587		p1 := op.Args[0]
29588		x := v_1
29589		mem := v_2
29590		if mem.Op != OpStore {
29591			break
29592		}
29593		t2 := auxToType(mem.Aux)
29594		_ = mem.Args[2]
29595		p2 := mem.Args[0]
29596		mem_2 := mem.Args[2]
29597		if mem_2.Op != OpStore {
29598			break
29599		}
29600		t3 := auxToType(mem_2.Aux)
29601		_ = mem_2.Args[2]
29602		p3 := mem_2.Args[0]
29603		mem_2_2 := mem_2.Args[2]
29604		if mem_2_2.Op != OpStore {
29605			break
29606		}
29607		t4 := auxToType(mem_2_2.Aux)
29608		_ = mem_2_2.Args[2]
29609		p4 := mem_2_2.Args[0]
29610		mem_2_2_2 := mem_2_2.Args[2]
29611		if mem_2_2_2.Op != OpZero {
29612			break
29613		}
29614		n := auxIntToInt64(mem_2_2_2.AuxInt)
29615		p5 := mem_2_2_2.Args[0]
29616		if !(isConstZero(x) && o1 >= 0 && t1.Size()+o1 <= n && isSamePtr(p1, p5) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size())) {
29617			break
29618		}
29619		v.copyOf(mem)
29620		return true
29621	}
29622	// match: (Store _ (StructMake0) mem)
29623	// result: mem
29624	for {
29625		if v_1.Op != OpStructMake0 {
29626			break
29627		}
29628		mem := v_2
29629		v.copyOf(mem)
29630		return true
29631	}
29632	// match: (Store dst (StructMake1 <t> f0) mem)
29633	// result: (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem)
29634	for {
29635		dst := v_0
29636		if v_1.Op != OpStructMake1 {
29637			break
29638		}
29639		t := v_1.Type
29640		f0 := v_1.Args[0]
29641		mem := v_2
29642		v.reset(OpStore)
29643		v.Aux = typeToAux(t.FieldType(0))
29644		v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
29645		v0.AuxInt = int64ToAuxInt(0)
29646		v0.AddArg(dst)
29647		v.AddArg3(v0, f0, mem)
29648		return true
29649	}
29650	// match: (Store dst (StructMake2 <t> f0 f1) mem)
29651	// result: (Store {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem))
29652	for {
29653		dst := v_0
29654		if v_1.Op != OpStructMake2 {
29655			break
29656		}
29657		t := v_1.Type
29658		f1 := v_1.Args[1]
29659		f0 := v_1.Args[0]
29660		mem := v_2
29661		v.reset(OpStore)
29662		v.Aux = typeToAux(t.FieldType(1))
29663		v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
29664		v0.AuxInt = int64ToAuxInt(t.FieldOff(1))
29665		v0.AddArg(dst)
29666		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29667		v1.Aux = typeToAux(t.FieldType(0))
29668		v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
29669		v2.AuxInt = int64ToAuxInt(0)
29670		v2.AddArg(dst)
29671		v1.AddArg3(v2, f0, mem)
29672		v.AddArg3(v0, f1, v1)
29673		return true
29674	}
29675	// match: (Store dst (StructMake3 <t> f0 f1 f2) mem)
29676	// result: (Store {t.FieldType(2)} (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem)))
29677	for {
29678		dst := v_0
29679		if v_1.Op != OpStructMake3 {
29680			break
29681		}
29682		t := v_1.Type
29683		f2 := v_1.Args[2]
29684		f0 := v_1.Args[0]
29685		f1 := v_1.Args[1]
29686		mem := v_2
29687		v.reset(OpStore)
29688		v.Aux = typeToAux(t.FieldType(2))
29689		v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
29690		v0.AuxInt = int64ToAuxInt(t.FieldOff(2))
29691		v0.AddArg(dst)
29692		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29693		v1.Aux = typeToAux(t.FieldType(1))
29694		v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
29695		v2.AuxInt = int64ToAuxInt(t.FieldOff(1))
29696		v2.AddArg(dst)
29697		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29698		v3.Aux = typeToAux(t.FieldType(0))
29699		v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
29700		v4.AuxInt = int64ToAuxInt(0)
29701		v4.AddArg(dst)
29702		v3.AddArg3(v4, f0, mem)
29703		v1.AddArg3(v2, f1, v3)
29704		v.AddArg3(v0, f2, v1)
29705		return true
29706	}
29707	// match: (Store dst (StructMake4 <t> f0 f1 f2 f3) mem)
29708	// result: (Store {t.FieldType(3)} (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst) f3 (Store {t.FieldType(2)} (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem))))
29709	for {
29710		dst := v_0
29711		if v_1.Op != OpStructMake4 {
29712			break
29713		}
29714		t := v_1.Type
29715		f3 := v_1.Args[3]
29716		f0 := v_1.Args[0]
29717		f1 := v_1.Args[1]
29718		f2 := v_1.Args[2]
29719		mem := v_2
29720		v.reset(OpStore)
29721		v.Aux = typeToAux(t.FieldType(3))
29722		v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(3).PtrTo())
29723		v0.AuxInt = int64ToAuxInt(t.FieldOff(3))
29724		v0.AddArg(dst)
29725		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29726		v1.Aux = typeToAux(t.FieldType(2))
29727		v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
29728		v2.AuxInt = int64ToAuxInt(t.FieldOff(2))
29729		v2.AddArg(dst)
29730		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29731		v3.Aux = typeToAux(t.FieldType(1))
29732		v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
29733		v4.AuxInt = int64ToAuxInt(t.FieldOff(1))
29734		v4.AddArg(dst)
29735		v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29736		v5.Aux = typeToAux(t.FieldType(0))
29737		v6 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
29738		v6.AuxInt = int64ToAuxInt(0)
29739		v6.AddArg(dst)
29740		v5.AddArg3(v6, f0, mem)
29741		v3.AddArg3(v4, f1, v5)
29742		v1.AddArg3(v2, f2, v3)
29743		v.AddArg3(v0, f3, v1)
29744		return true
29745	}
29746	// match: (Store {t} dst (Load src mem) mem)
29747	// cond: !CanSSA(t)
29748	// result: (Move {t} [t.Size()] dst src mem)
29749	for {
29750		t := auxToType(v.Aux)
29751		dst := v_0
29752		if v_1.Op != OpLoad {
29753			break
29754		}
29755		mem := v_1.Args[1]
29756		src := v_1.Args[0]
29757		if mem != v_2 || !(!CanSSA(t)) {
29758			break
29759		}
29760		v.reset(OpMove)
29761		v.AuxInt = int64ToAuxInt(t.Size())
29762		v.Aux = typeToAux(t)
29763		v.AddArg3(dst, src, mem)
29764		return true
29765	}
29766	// match: (Store {t} dst (Load src mem) (VarDef {x} mem))
29767	// cond: !CanSSA(t)
29768	// result: (Move {t} [t.Size()] dst src (VarDef {x} mem))
29769	for {
29770		t := auxToType(v.Aux)
29771		dst := v_0
29772		if v_1.Op != OpLoad {
29773			break
29774		}
29775		mem := v_1.Args[1]
29776		src := v_1.Args[0]
29777		if v_2.Op != OpVarDef {
29778			break
29779		}
29780		x := auxToSym(v_2.Aux)
29781		if mem != v_2.Args[0] || !(!CanSSA(t)) {
29782			break
29783		}
29784		v.reset(OpMove)
29785		v.AuxInt = int64ToAuxInt(t.Size())
29786		v.Aux = typeToAux(t)
29787		v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
29788		v0.Aux = symToAux(x)
29789		v0.AddArg(mem)
29790		v.AddArg3(dst, src, v0)
29791		return true
29792	}
29793	// match: (Store _ (ArrayMake0) mem)
29794	// result: mem
29795	for {
29796		if v_1.Op != OpArrayMake0 {
29797			break
29798		}
29799		mem := v_2
29800		v.copyOf(mem)
29801		return true
29802	}
29803	// match: (Store dst (ArrayMake1 e) mem)
29804	// result: (Store {e.Type} dst e mem)
29805	for {
29806		dst := v_0
29807		if v_1.Op != OpArrayMake1 {
29808			break
29809		}
29810		e := v_1.Args[0]
29811		mem := v_2
29812		v.reset(OpStore)
29813		v.Aux = typeToAux(e.Type)
29814		v.AddArg3(dst, e, mem)
29815		return true
29816	}
29817	// match: (Store (SelectN [0] call:(StaticLECall _ _)) x mem:(SelectN [1] call))
29818	// cond: isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")
29819	// result: mem
29820	for {
29821		if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
29822			break
29823		}
29824		call := v_0.Args[0]
29825		if call.Op != OpStaticLECall || len(call.Args) != 2 {
29826			break
29827		}
29828		x := v_1
29829		mem := v_2
29830		if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")) {
29831			break
29832		}
29833		v.copyOf(mem)
29834		return true
29835	}
29836	// match: (Store (OffPtr (SelectN [0] call:(StaticLECall _ _))) x mem:(SelectN [1] call))
29837	// cond: isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")
29838	// result: mem
29839	for {
29840		if v_0.Op != OpOffPtr {
29841			break
29842		}
29843		v_0_0 := v_0.Args[0]
29844		if v_0_0.Op != OpSelectN || auxIntToInt64(v_0_0.AuxInt) != 0 {
29845			break
29846		}
29847		call := v_0_0.Args[0]
29848		if call.Op != OpStaticLECall || len(call.Args) != 2 {
29849			break
29850		}
29851		x := v_1
29852		mem := v_2
29853		if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")) {
29854			break
29855		}
29856		v.copyOf(mem)
29857		return true
29858	}
29859	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [0] p2) d2 m3:(Move [n] p3 _ mem)))
29860	// cond: m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3)
29861	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 mem))
29862	for {
29863		t1 := auxToType(v.Aux)
29864		op1 := v_0
29865		if op1.Op != OpOffPtr {
29866			break
29867		}
29868		o1 := auxIntToInt64(op1.AuxInt)
29869		p1 := op1.Args[0]
29870		d1 := v_1
29871		m2 := v_2
29872		if m2.Op != OpStore {
29873			break
29874		}
29875		t2 := auxToType(m2.Aux)
29876		_ = m2.Args[2]
29877		op2 := m2.Args[0]
29878		if op2.Op != OpOffPtr || auxIntToInt64(op2.AuxInt) != 0 {
29879			break
29880		}
29881		p2 := op2.Args[0]
29882		d2 := m2.Args[1]
29883		m3 := m2.Args[2]
29884		if m3.Op != OpMove {
29885			break
29886		}
29887		n := auxIntToInt64(m3.AuxInt)
29888		mem := m3.Args[2]
29889		p3 := m3.Args[0]
29890		if !(m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3)) {
29891			break
29892		}
29893		v.reset(OpStore)
29894		v.Aux = typeToAux(t1)
29895		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29896		v0.Aux = typeToAux(t2)
29897		v0.AddArg3(op2, d2, mem)
29898		v.AddArg3(op1, d1, v0)
29899		return true
29900	}
29901	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [0] p3) d3 m4:(Move [n] p4 _ mem))))
29902	// cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == t3.Size() && o1-o2 == t2.Size() && n == t3.Size() + t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2, m3, m4)
29903	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 mem)))
29904	for {
29905		t1 := auxToType(v.Aux)
29906		op1 := v_0
29907		if op1.Op != OpOffPtr {
29908			break
29909		}
29910		o1 := auxIntToInt64(op1.AuxInt)
29911		p1 := op1.Args[0]
29912		d1 := v_1
29913		m2 := v_2
29914		if m2.Op != OpStore {
29915			break
29916		}
29917		t2 := auxToType(m2.Aux)
29918		_ = m2.Args[2]
29919		op2 := m2.Args[0]
29920		if op2.Op != OpOffPtr {
29921			break
29922		}
29923		o2 := auxIntToInt64(op2.AuxInt)
29924		p2 := op2.Args[0]
29925		d2 := m2.Args[1]
29926		m3 := m2.Args[2]
29927		if m3.Op != OpStore {
29928			break
29929		}
29930		t3 := auxToType(m3.Aux)
29931		_ = m3.Args[2]
29932		op3 := m3.Args[0]
29933		if op3.Op != OpOffPtr || auxIntToInt64(op3.AuxInt) != 0 {
29934			break
29935		}
29936		p3 := op3.Args[0]
29937		d3 := m3.Args[1]
29938		m4 := m3.Args[2]
29939		if m4.Op != OpMove {
29940			break
29941		}
29942		n := auxIntToInt64(m4.AuxInt)
29943		mem := m4.Args[2]
29944		p4 := m4.Args[0]
29945		if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == t3.Size() && o1-o2 == t2.Size() && n == t3.Size()+t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2, m3, m4)) {
29946			break
29947		}
29948		v.reset(OpStore)
29949		v.Aux = typeToAux(t1)
29950		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29951		v0.Aux = typeToAux(t2)
29952		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29953		v1.Aux = typeToAux(t3)
29954		v1.AddArg3(op3, d3, mem)
29955		v0.AddArg3(op2, d2, v1)
29956		v.AddArg3(op1, d1, v0)
29957		return true
29958	}
29959	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [o3] p3) d3 m4:(Store {t4} op4:(OffPtr [0] p4) d4 m5:(Move [n] p5 _ mem)))))
29960	// cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == t4.Size() && o2-o3 == t3.Size() && o1-o2 == t2.Size() && n == t4.Size() + t3.Size() + t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2, m3, m4, m5)
29961	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 (Store {t4} op4 d4 mem))))
29962	for {
29963		t1 := auxToType(v.Aux)
29964		op1 := v_0
29965		if op1.Op != OpOffPtr {
29966			break
29967		}
29968		o1 := auxIntToInt64(op1.AuxInt)
29969		p1 := op1.Args[0]
29970		d1 := v_1
29971		m2 := v_2
29972		if m2.Op != OpStore {
29973			break
29974		}
29975		t2 := auxToType(m2.Aux)
29976		_ = m2.Args[2]
29977		op2 := m2.Args[0]
29978		if op2.Op != OpOffPtr {
29979			break
29980		}
29981		o2 := auxIntToInt64(op2.AuxInt)
29982		p2 := op2.Args[0]
29983		d2 := m2.Args[1]
29984		m3 := m2.Args[2]
29985		if m3.Op != OpStore {
29986			break
29987		}
29988		t3 := auxToType(m3.Aux)
29989		_ = m3.Args[2]
29990		op3 := m3.Args[0]
29991		if op3.Op != OpOffPtr {
29992			break
29993		}
29994		o3 := auxIntToInt64(op3.AuxInt)
29995		p3 := op3.Args[0]
29996		d3 := m3.Args[1]
29997		m4 := m3.Args[2]
29998		if m4.Op != OpStore {
29999			break
30000		}
30001		t4 := auxToType(m4.Aux)
30002		_ = m4.Args[2]
30003		op4 := m4.Args[0]
30004		if op4.Op != OpOffPtr || auxIntToInt64(op4.AuxInt) != 0 {
30005			break
30006		}
30007		p4 := op4.Args[0]
30008		d4 := m4.Args[1]
30009		m5 := m4.Args[2]
30010		if m5.Op != OpMove {
30011			break
30012		}
30013		n := auxIntToInt64(m5.AuxInt)
30014		mem := m5.Args[2]
30015		p5 := m5.Args[0]
30016		if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == t4.Size() && o2-o3 == t3.Size() && o1-o2 == t2.Size() && n == t4.Size()+t3.Size()+t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2, m3, m4, m5)) {
30017			break
30018		}
30019		v.reset(OpStore)
30020		v.Aux = typeToAux(t1)
30021		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
30022		v0.Aux = typeToAux(t2)
30023		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
30024		v1.Aux = typeToAux(t3)
30025		v2 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
30026		v2.Aux = typeToAux(t4)
30027		v2.AddArg3(op4, d4, mem)
30028		v1.AddArg3(op3, d3, v2)
30029		v0.AddArg3(op2, d2, v1)
30030		v.AddArg3(op1, d1, v0)
30031		return true
30032	}
30033	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [0] p2) d2 m3:(Zero [n] p3 mem)))
30034	// cond: m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3)
30035	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 mem))
30036	for {
30037		t1 := auxToType(v.Aux)
30038		op1 := v_0
30039		if op1.Op != OpOffPtr {
30040			break
30041		}
30042		o1 := auxIntToInt64(op1.AuxInt)
30043		p1 := op1.Args[0]
30044		d1 := v_1
30045		m2 := v_2
30046		if m2.Op != OpStore {
30047			break
30048		}
30049		t2 := auxToType(m2.Aux)
30050		_ = m2.Args[2]
30051		op2 := m2.Args[0]
30052		if op2.Op != OpOffPtr || auxIntToInt64(op2.AuxInt) != 0 {
30053			break
30054		}
30055		p2 := op2.Args[0]
30056		d2 := m2.Args[1]
30057		m3 := m2.Args[2]
30058		if m3.Op != OpZero {
30059			break
30060		}
30061		n := auxIntToInt64(m3.AuxInt)
30062		mem := m3.Args[1]
30063		p3 := m3.Args[0]
30064		if !(m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3)) {
30065			break
30066		}
30067		v.reset(OpStore)
30068		v.Aux = typeToAux(t1)
30069		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
30070		v0.Aux = typeToAux(t2)
30071		v0.AddArg3(op2, d2, mem)
30072		v.AddArg3(op1, d1, v0)
30073		return true
30074	}
30075	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [0] p3) d3 m4:(Zero [n] p4 mem))))
30076	// cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == t3.Size() && o1-o2 == t2.Size() && n == t3.Size() + t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2, m3, m4)
30077	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 mem)))
30078	for {
30079		t1 := auxToType(v.Aux)
30080		op1 := v_0
30081		if op1.Op != OpOffPtr {
30082			break
30083		}
30084		o1 := auxIntToInt64(op1.AuxInt)
30085		p1 := op1.Args[0]
30086		d1 := v_1
30087		m2 := v_2
30088		if m2.Op != OpStore {
30089			break
30090		}
30091		t2 := auxToType(m2.Aux)
30092		_ = m2.Args[2]
30093		op2 := m2.Args[0]
30094		if op2.Op != OpOffPtr {
30095			break
30096		}
30097		o2 := auxIntToInt64(op2.AuxInt)
30098		p2 := op2.Args[0]
30099		d2 := m2.Args[1]
30100		m3 := m2.Args[2]
30101		if m3.Op != OpStore {
30102			break
30103		}
30104		t3 := auxToType(m3.Aux)
30105		_ = m3.Args[2]
30106		op3 := m3.Args[0]
30107		if op3.Op != OpOffPtr || auxIntToInt64(op3.AuxInt) != 0 {
30108			break
30109		}
30110		p3 := op3.Args[0]
30111		d3 := m3.Args[1]
30112		m4 := m3.Args[2]
30113		if m4.Op != OpZero {
30114			break
30115		}
30116		n := auxIntToInt64(m4.AuxInt)
30117		mem := m4.Args[1]
30118		p4 := m4.Args[0]
30119		if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == t3.Size() && o1-o2 == t2.Size() && n == t3.Size()+t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2, m3, m4)) {
30120			break
30121		}
30122		v.reset(OpStore)
30123		v.Aux = typeToAux(t1)
30124		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
30125		v0.Aux = typeToAux(t2)
30126		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
30127		v1.Aux = typeToAux(t3)
30128		v1.AddArg3(op3, d3, mem)
30129		v0.AddArg3(op2, d2, v1)
30130		v.AddArg3(op1, d1, v0)
30131		return true
30132	}
30133	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [o3] p3) d3 m4:(Store {t4} op4:(OffPtr [0] p4) d4 m5:(Zero [n] p5 mem)))))
30134	// cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == t4.Size() && o2-o3 == t3.Size() && o1-o2 == t2.Size() && n == t4.Size() + t3.Size() + t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2, m3, m4, m5)
30135	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 (Store {t4} op4 d4 mem))))
30136	for {
30137		t1 := auxToType(v.Aux)
30138		op1 := v_0
30139		if op1.Op != OpOffPtr {
30140			break
30141		}
30142		o1 := auxIntToInt64(op1.AuxInt)
30143		p1 := op1.Args[0]
30144		d1 := v_1
30145		m2 := v_2
30146		if m2.Op != OpStore {
30147			break
30148		}
30149		t2 := auxToType(m2.Aux)
30150		_ = m2.Args[2]
30151		op2 := m2.Args[0]
30152		if op2.Op != OpOffPtr {
30153			break
30154		}
30155		o2 := auxIntToInt64(op2.AuxInt)
30156		p2 := op2.Args[0]
30157		d2 := m2.Args[1]
30158		m3 := m2.Args[2]
30159		if m3.Op != OpStore {
30160			break
30161		}
30162		t3 := auxToType(m3.Aux)
30163		_ = m3.Args[2]
30164		op3 := m3.Args[0]
30165		if op3.Op != OpOffPtr {
30166			break
30167		}
30168		o3 := auxIntToInt64(op3.AuxInt)
30169		p3 := op3.Args[0]
30170		d3 := m3.Args[1]
30171		m4 := m3.Args[2]
30172		if m4.Op != OpStore {
30173			break
30174		}
30175		t4 := auxToType(m4.Aux)
30176		_ = m4.Args[2]
30177		op4 := m4.Args[0]
30178		if op4.Op != OpOffPtr || auxIntToInt64(op4.AuxInt) != 0 {
30179			break
30180		}
30181		p4 := op4.Args[0]
30182		d4 := m4.Args[1]
30183		m5 := m4.Args[2]
30184		if m5.Op != OpZero {
30185			break
30186		}
30187		n := auxIntToInt64(m5.AuxInt)
30188		mem := m5.Args[1]
30189		p5 := m5.Args[0]
30190		if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == t4.Size() && o2-o3 == t3.Size() && o1-o2 == t2.Size() && n == t4.Size()+t3.Size()+t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2, m3, m4, m5)) {
30191			break
30192		}
30193		v.reset(OpStore)
30194		v.Aux = typeToAux(t1)
30195		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
30196		v0.Aux = typeToAux(t2)
30197		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
30198		v1.Aux = typeToAux(t3)
30199		v2 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
30200		v2.Aux = typeToAux(t4)
30201		v2.AddArg3(op4, d4, mem)
30202		v1.AddArg3(op3, d3, v2)
30203		v0.AddArg3(op2, d2, v1)
30204		v.AddArg3(op1, d1, v0)
30205		return true
30206	}
30207	return false
30208}
30209func rewriteValuegeneric_OpStringLen(v *Value) bool {
30210	v_0 := v.Args[0]
30211	// match: (StringLen (StringMake _ (Const64 <t> [c])))
30212	// result: (Const64 <t> [c])
30213	for {
30214		if v_0.Op != OpStringMake {
30215			break
30216		}
30217		_ = v_0.Args[1]
30218		v_0_1 := v_0.Args[1]
30219		if v_0_1.Op != OpConst64 {
30220			break
30221		}
30222		t := v_0_1.Type
30223		c := auxIntToInt64(v_0_1.AuxInt)
30224		v.reset(OpConst64)
30225		v.Type = t
30226		v.AuxInt = int64ToAuxInt(c)
30227		return true
30228	}
30229	return false
30230}
30231func rewriteValuegeneric_OpStringPtr(v *Value) bool {
30232	v_0 := v.Args[0]
30233	// match: (StringPtr (StringMake (Addr <t> {s} base) _))
30234	// result: (Addr <t> {s} base)
30235	for {
30236		if v_0.Op != OpStringMake {
30237			break
30238		}
30239		v_0_0 := v_0.Args[0]
30240		if v_0_0.Op != OpAddr {
30241			break
30242		}
30243		t := v_0_0.Type
30244		s := auxToSym(v_0_0.Aux)
30245		base := v_0_0.Args[0]
30246		v.reset(OpAddr)
30247		v.Type = t
30248		v.Aux = symToAux(s)
30249		v.AddArg(base)
30250		return true
30251	}
30252	return false
30253}
30254func rewriteValuegeneric_OpStructSelect(v *Value) bool {
30255	v_0 := v.Args[0]
30256	b := v.Block
30257	// match: (StructSelect (StructMake1 x))
30258	// result: x
30259	for {
30260		if v_0.Op != OpStructMake1 {
30261			break
30262		}
30263		x := v_0.Args[0]
30264		v.copyOf(x)
30265		return true
30266	}
30267	// match: (StructSelect [0] (StructMake2 x _))
30268	// result: x
30269	for {
30270		if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpStructMake2 {
30271			break
30272		}
30273		x := v_0.Args[0]
30274		v.copyOf(x)
30275		return true
30276	}
30277	// match: (StructSelect [1] (StructMake2 _ x))
30278	// result: x
30279	for {
30280		if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpStructMake2 {
30281			break
30282		}
30283		x := v_0.Args[1]
30284		v.copyOf(x)
30285		return true
30286	}
30287	// match: (StructSelect [0] (StructMake3 x _ _))
30288	// result: x
30289	for {
30290		if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpStructMake3 {
30291			break
30292		}
30293		x := v_0.Args[0]
30294		v.copyOf(x)
30295		return true
30296	}
30297	// match: (StructSelect [1] (StructMake3 _ x _))
30298	// result: x
30299	for {
30300		if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpStructMake3 {
30301			break
30302		}
30303		x := v_0.Args[1]
30304		v.copyOf(x)
30305		return true
30306	}
30307	// match: (StructSelect [2] (StructMake3 _ _ x))
30308	// result: x
30309	for {
30310		if auxIntToInt64(v.AuxInt) != 2 || v_0.Op != OpStructMake3 {
30311			break
30312		}
30313		x := v_0.Args[2]
30314		v.copyOf(x)
30315		return true
30316	}
30317	// match: (StructSelect [0] (StructMake4 x _ _ _))
30318	// result: x
30319	for {
30320		if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpStructMake4 {
30321			break
30322		}
30323		x := v_0.Args[0]
30324		v.copyOf(x)
30325		return true
30326	}
30327	// match: (StructSelect [1] (StructMake4 _ x _ _))
30328	// result: x
30329	for {
30330		if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpStructMake4 {
30331			break
30332		}
30333		x := v_0.Args[1]
30334		v.copyOf(x)
30335		return true
30336	}
30337	// match: (StructSelect [2] (StructMake4 _ _ x _))
30338	// result: x
30339	for {
30340		if auxIntToInt64(v.AuxInt) != 2 || v_0.Op != OpStructMake4 {
30341			break
30342		}
30343		x := v_0.Args[2]
30344		v.copyOf(x)
30345		return true
30346	}
30347	// match: (StructSelect [3] (StructMake4 _ _ _ x))
30348	// result: x
30349	for {
30350		if auxIntToInt64(v.AuxInt) != 3 || v_0.Op != OpStructMake4 {
30351			break
30352		}
30353		x := v_0.Args[3]
30354		v.copyOf(x)
30355		return true
30356	}
30357	// match: (StructSelect [i] x:(Load <t> ptr mem))
30358	// cond: !CanSSA(t)
30359	// result: @x.Block (Load <v.Type> (OffPtr <v.Type.PtrTo()> [t.FieldOff(int(i))] ptr) mem)
30360	for {
30361		i := auxIntToInt64(v.AuxInt)
30362		x := v_0
30363		if x.Op != OpLoad {
30364			break
30365		}
30366		t := x.Type
30367		mem := x.Args[1]
30368		ptr := x.Args[0]
30369		if !(!CanSSA(t)) {
30370			break
30371		}
30372		b = x.Block
30373		v0 := b.NewValue0(v.Pos, OpLoad, v.Type)
30374		v.copyOf(v0)
30375		v1 := b.NewValue0(v.Pos, OpOffPtr, v.Type.PtrTo())
30376		v1.AuxInt = int64ToAuxInt(t.FieldOff(int(i)))
30377		v1.AddArg(ptr)
30378		v0.AddArg2(v1, mem)
30379		return true
30380	}
30381	// match: (StructSelect [0] (IData x))
30382	// result: (IData x)
30383	for {
30384		if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpIData {
30385			break
30386		}
30387		x := v_0.Args[0]
30388		v.reset(OpIData)
30389		v.AddArg(x)
30390		return true
30391	}
30392	return false
30393}
30394func rewriteValuegeneric_OpSub16(v *Value) bool {
30395	v_1 := v.Args[1]
30396	v_0 := v.Args[0]
30397	b := v.Block
30398	// match: (Sub16 (Const16 [c]) (Const16 [d]))
30399	// result: (Const16 [c-d])
30400	for {
30401		if v_0.Op != OpConst16 {
30402			break
30403		}
30404		c := auxIntToInt16(v_0.AuxInt)
30405		if v_1.Op != OpConst16 {
30406			break
30407		}
30408		d := auxIntToInt16(v_1.AuxInt)
30409		v.reset(OpConst16)
30410		v.AuxInt = int16ToAuxInt(c - d)
30411		return true
30412	}
30413	// match: (Sub16 x (Const16 <t> [c]))
30414	// cond: x.Op != OpConst16
30415	// result: (Add16 (Const16 <t> [-c]) x)
30416	for {
30417		x := v_0
30418		if v_1.Op != OpConst16 {
30419			break
30420		}
30421		t := v_1.Type
30422		c := auxIntToInt16(v_1.AuxInt)
30423		if !(x.Op != OpConst16) {
30424			break
30425		}
30426		v.reset(OpAdd16)
30427		v0 := b.NewValue0(v.Pos, OpConst16, t)
30428		v0.AuxInt = int16ToAuxInt(-c)
30429		v.AddArg2(v0, x)
30430		return true
30431	}
30432	// match: (Sub16 <t> (Mul16 x y) (Mul16 x z))
30433	// result: (Mul16 x (Sub16 <t> y z))
30434	for {
30435		t := v.Type
30436		if v_0.Op != OpMul16 {
30437			break
30438		}
30439		_ = v_0.Args[1]
30440		v_0_0 := v_0.Args[0]
30441		v_0_1 := v_0.Args[1]
30442		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30443			x := v_0_0
30444			y := v_0_1
30445			if v_1.Op != OpMul16 {
30446				continue
30447			}
30448			_ = v_1.Args[1]
30449			v_1_0 := v_1.Args[0]
30450			v_1_1 := v_1.Args[1]
30451			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
30452				if x != v_1_0 {
30453					continue
30454				}
30455				z := v_1_1
30456				v.reset(OpMul16)
30457				v0 := b.NewValue0(v.Pos, OpSub16, t)
30458				v0.AddArg2(y, z)
30459				v.AddArg2(x, v0)
30460				return true
30461			}
30462		}
30463		break
30464	}
30465	// match: (Sub16 x x)
30466	// result: (Const16 [0])
30467	for {
30468		x := v_0
30469		if x != v_1 {
30470			break
30471		}
30472		v.reset(OpConst16)
30473		v.AuxInt = int16ToAuxInt(0)
30474		return true
30475	}
30476	// match: (Sub16 (Neg16 x) (Com16 x))
30477	// result: (Const16 [1])
30478	for {
30479		if v_0.Op != OpNeg16 {
30480			break
30481		}
30482		x := v_0.Args[0]
30483		if v_1.Op != OpCom16 || x != v_1.Args[0] {
30484			break
30485		}
30486		v.reset(OpConst16)
30487		v.AuxInt = int16ToAuxInt(1)
30488		return true
30489	}
30490	// match: (Sub16 (Com16 x) (Neg16 x))
30491	// result: (Const16 [-1])
30492	for {
30493		if v_0.Op != OpCom16 {
30494			break
30495		}
30496		x := v_0.Args[0]
30497		if v_1.Op != OpNeg16 || x != v_1.Args[0] {
30498			break
30499		}
30500		v.reset(OpConst16)
30501		v.AuxInt = int16ToAuxInt(-1)
30502		return true
30503	}
30504	// match: (Sub16 (Add16 t x) (Add16 t y))
30505	// result: (Sub16 x y)
30506	for {
30507		if v_0.Op != OpAdd16 {
30508			break
30509		}
30510		_ = v_0.Args[1]
30511		v_0_0 := v_0.Args[0]
30512		v_0_1 := v_0.Args[1]
30513		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30514			t := v_0_0
30515			x := v_0_1
30516			if v_1.Op != OpAdd16 {
30517				continue
30518			}
30519			_ = v_1.Args[1]
30520			v_1_0 := v_1.Args[0]
30521			v_1_1 := v_1.Args[1]
30522			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
30523				if t != v_1_0 {
30524					continue
30525				}
30526				y := v_1_1
30527				v.reset(OpSub16)
30528				v.AddArg2(x, y)
30529				return true
30530			}
30531		}
30532		break
30533	}
30534	// match: (Sub16 (Add16 x y) x)
30535	// result: y
30536	for {
30537		if v_0.Op != OpAdd16 {
30538			break
30539		}
30540		_ = v_0.Args[1]
30541		v_0_0 := v_0.Args[0]
30542		v_0_1 := v_0.Args[1]
30543		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30544			x := v_0_0
30545			y := v_0_1
30546			if x != v_1 {
30547				continue
30548			}
30549			v.copyOf(y)
30550			return true
30551		}
30552		break
30553	}
30554	// match: (Sub16 (Add16 x y) y)
30555	// result: x
30556	for {
30557		if v_0.Op != OpAdd16 {
30558			break
30559		}
30560		_ = v_0.Args[1]
30561		v_0_0 := v_0.Args[0]
30562		v_0_1 := v_0.Args[1]
30563		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30564			x := v_0_0
30565			y := v_0_1
30566			if y != v_1 {
30567				continue
30568			}
30569			v.copyOf(x)
30570			return true
30571		}
30572		break
30573	}
30574	// match: (Sub16 (Sub16 x y) x)
30575	// result: (Neg16 y)
30576	for {
30577		if v_0.Op != OpSub16 {
30578			break
30579		}
30580		y := v_0.Args[1]
30581		x := v_0.Args[0]
30582		if x != v_1 {
30583			break
30584		}
30585		v.reset(OpNeg16)
30586		v.AddArg(y)
30587		return true
30588	}
30589	// match: (Sub16 x (Add16 x y))
30590	// result: (Neg16 y)
30591	for {
30592		x := v_0
30593		if v_1.Op != OpAdd16 {
30594			break
30595		}
30596		_ = v_1.Args[1]
30597		v_1_0 := v_1.Args[0]
30598		v_1_1 := v_1.Args[1]
30599		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
30600			if x != v_1_0 {
30601				continue
30602			}
30603			y := v_1_1
30604			v.reset(OpNeg16)
30605			v.AddArg(y)
30606			return true
30607		}
30608		break
30609	}
30610	// match: (Sub16 x (Sub16 i:(Const16 <t>) z))
30611	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
30612	// result: (Sub16 (Add16 <t> x z) i)
30613	for {
30614		x := v_0
30615		if v_1.Op != OpSub16 {
30616			break
30617		}
30618		z := v_1.Args[1]
30619		i := v_1.Args[0]
30620		if i.Op != OpConst16 {
30621			break
30622		}
30623		t := i.Type
30624		if !(z.Op != OpConst16 && x.Op != OpConst16) {
30625			break
30626		}
30627		v.reset(OpSub16)
30628		v0 := b.NewValue0(v.Pos, OpAdd16, t)
30629		v0.AddArg2(x, z)
30630		v.AddArg2(v0, i)
30631		return true
30632	}
30633	// match: (Sub16 x (Add16 z i:(Const16 <t>)))
30634	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
30635	// result: (Sub16 (Sub16 <t> x z) i)
30636	for {
30637		x := v_0
30638		if v_1.Op != OpAdd16 {
30639			break
30640		}
30641		_ = v_1.Args[1]
30642		v_1_0 := v_1.Args[0]
30643		v_1_1 := v_1.Args[1]
30644		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
30645			z := v_1_0
30646			i := v_1_1
30647			if i.Op != OpConst16 {
30648				continue
30649			}
30650			t := i.Type
30651			if !(z.Op != OpConst16 && x.Op != OpConst16) {
30652				continue
30653			}
30654			v.reset(OpSub16)
30655			v0 := b.NewValue0(v.Pos, OpSub16, t)
30656			v0.AddArg2(x, z)
30657			v.AddArg2(v0, i)
30658			return true
30659		}
30660		break
30661	}
30662	// match: (Sub16 (Sub16 i:(Const16 <t>) z) x)
30663	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
30664	// result: (Sub16 i (Add16 <t> z x))
30665	for {
30666		if v_0.Op != OpSub16 {
30667			break
30668		}
30669		z := v_0.Args[1]
30670		i := v_0.Args[0]
30671		if i.Op != OpConst16 {
30672			break
30673		}
30674		t := i.Type
30675		x := v_1
30676		if !(z.Op != OpConst16 && x.Op != OpConst16) {
30677			break
30678		}
30679		v.reset(OpSub16)
30680		v0 := b.NewValue0(v.Pos, OpAdd16, t)
30681		v0.AddArg2(z, x)
30682		v.AddArg2(i, v0)
30683		return true
30684	}
30685	// match: (Sub16 (Add16 z i:(Const16 <t>)) x)
30686	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
30687	// result: (Add16 i (Sub16 <t> z x))
30688	for {
30689		if v_0.Op != OpAdd16 {
30690			break
30691		}
30692		_ = v_0.Args[1]
30693		v_0_0 := v_0.Args[0]
30694		v_0_1 := v_0.Args[1]
30695		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30696			z := v_0_0
30697			i := v_0_1
30698			if i.Op != OpConst16 {
30699				continue
30700			}
30701			t := i.Type
30702			x := v_1
30703			if !(z.Op != OpConst16 && x.Op != OpConst16) {
30704				continue
30705			}
30706			v.reset(OpAdd16)
30707			v0 := b.NewValue0(v.Pos, OpSub16, t)
30708			v0.AddArg2(z, x)
30709			v.AddArg2(i, v0)
30710			return true
30711		}
30712		break
30713	}
30714	// match: (Sub16 (Const16 <t> [c]) (Sub16 (Const16 <t> [d]) x))
30715	// result: (Add16 (Const16 <t> [c-d]) x)
30716	for {
30717		if v_0.Op != OpConst16 {
30718			break
30719		}
30720		t := v_0.Type
30721		c := auxIntToInt16(v_0.AuxInt)
30722		if v_1.Op != OpSub16 {
30723			break
30724		}
30725		x := v_1.Args[1]
30726		v_1_0 := v_1.Args[0]
30727		if v_1_0.Op != OpConst16 || v_1_0.Type != t {
30728			break
30729		}
30730		d := auxIntToInt16(v_1_0.AuxInt)
30731		v.reset(OpAdd16)
30732		v0 := b.NewValue0(v.Pos, OpConst16, t)
30733		v0.AuxInt = int16ToAuxInt(c - d)
30734		v.AddArg2(v0, x)
30735		return true
30736	}
30737	// match: (Sub16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
30738	// result: (Sub16 (Const16 <t> [c-d]) x)
30739	for {
30740		if v_0.Op != OpConst16 {
30741			break
30742		}
30743		t := v_0.Type
30744		c := auxIntToInt16(v_0.AuxInt)
30745		if v_1.Op != OpAdd16 {
30746			break
30747		}
30748		_ = v_1.Args[1]
30749		v_1_0 := v_1.Args[0]
30750		v_1_1 := v_1.Args[1]
30751		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
30752			if v_1_0.Op != OpConst16 || v_1_0.Type != t {
30753				continue
30754			}
30755			d := auxIntToInt16(v_1_0.AuxInt)
30756			x := v_1_1
30757			v.reset(OpSub16)
30758			v0 := b.NewValue0(v.Pos, OpConst16, t)
30759			v0.AuxInt = int16ToAuxInt(c - d)
30760			v.AddArg2(v0, x)
30761			return true
30762		}
30763		break
30764	}
30765	return false
30766}
30767func rewriteValuegeneric_OpSub32(v *Value) bool {
30768	v_1 := v.Args[1]
30769	v_0 := v.Args[0]
30770	b := v.Block
30771	// match: (Sub32 (Const32 [c]) (Const32 [d]))
30772	// result: (Const32 [c-d])
30773	for {
30774		if v_0.Op != OpConst32 {
30775			break
30776		}
30777		c := auxIntToInt32(v_0.AuxInt)
30778		if v_1.Op != OpConst32 {
30779			break
30780		}
30781		d := auxIntToInt32(v_1.AuxInt)
30782		v.reset(OpConst32)
30783		v.AuxInt = int32ToAuxInt(c - d)
30784		return true
30785	}
30786	// match: (Sub32 x (Const32 <t> [c]))
30787	// cond: x.Op != OpConst32
30788	// result: (Add32 (Const32 <t> [-c]) x)
30789	for {
30790		x := v_0
30791		if v_1.Op != OpConst32 {
30792			break
30793		}
30794		t := v_1.Type
30795		c := auxIntToInt32(v_1.AuxInt)
30796		if !(x.Op != OpConst32) {
30797			break
30798		}
30799		v.reset(OpAdd32)
30800		v0 := b.NewValue0(v.Pos, OpConst32, t)
30801		v0.AuxInt = int32ToAuxInt(-c)
30802		v.AddArg2(v0, x)
30803		return true
30804	}
30805	// match: (Sub32 <t> (Mul32 x y) (Mul32 x z))
30806	// result: (Mul32 x (Sub32 <t> y z))
30807	for {
30808		t := v.Type
30809		if v_0.Op != OpMul32 {
30810			break
30811		}
30812		_ = v_0.Args[1]
30813		v_0_0 := v_0.Args[0]
30814		v_0_1 := v_0.Args[1]
30815		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30816			x := v_0_0
30817			y := v_0_1
30818			if v_1.Op != OpMul32 {
30819				continue
30820			}
30821			_ = v_1.Args[1]
30822			v_1_0 := v_1.Args[0]
30823			v_1_1 := v_1.Args[1]
30824			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
30825				if x != v_1_0 {
30826					continue
30827				}
30828				z := v_1_1
30829				v.reset(OpMul32)
30830				v0 := b.NewValue0(v.Pos, OpSub32, t)
30831				v0.AddArg2(y, z)
30832				v.AddArg2(x, v0)
30833				return true
30834			}
30835		}
30836		break
30837	}
30838	// match: (Sub32 x x)
30839	// result: (Const32 [0])
30840	for {
30841		x := v_0
30842		if x != v_1 {
30843			break
30844		}
30845		v.reset(OpConst32)
30846		v.AuxInt = int32ToAuxInt(0)
30847		return true
30848	}
30849	// match: (Sub32 (Neg32 x) (Com32 x))
30850	// result: (Const32 [1])
30851	for {
30852		if v_0.Op != OpNeg32 {
30853			break
30854		}
30855		x := v_0.Args[0]
30856		if v_1.Op != OpCom32 || x != v_1.Args[0] {
30857			break
30858		}
30859		v.reset(OpConst32)
30860		v.AuxInt = int32ToAuxInt(1)
30861		return true
30862	}
30863	// match: (Sub32 (Com32 x) (Neg32 x))
30864	// result: (Const32 [-1])
30865	for {
30866		if v_0.Op != OpCom32 {
30867			break
30868		}
30869		x := v_0.Args[0]
30870		if v_1.Op != OpNeg32 || x != v_1.Args[0] {
30871			break
30872		}
30873		v.reset(OpConst32)
30874		v.AuxInt = int32ToAuxInt(-1)
30875		return true
30876	}
30877	// match: (Sub32 (Add32 t x) (Add32 t y))
30878	// result: (Sub32 x y)
30879	for {
30880		if v_0.Op != OpAdd32 {
30881			break
30882		}
30883		_ = v_0.Args[1]
30884		v_0_0 := v_0.Args[0]
30885		v_0_1 := v_0.Args[1]
30886		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30887			t := v_0_0
30888			x := v_0_1
30889			if v_1.Op != OpAdd32 {
30890				continue
30891			}
30892			_ = v_1.Args[1]
30893			v_1_0 := v_1.Args[0]
30894			v_1_1 := v_1.Args[1]
30895			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
30896				if t != v_1_0 {
30897					continue
30898				}
30899				y := v_1_1
30900				v.reset(OpSub32)
30901				v.AddArg2(x, y)
30902				return true
30903			}
30904		}
30905		break
30906	}
30907	// match: (Sub32 (Add32 x y) x)
30908	// result: y
30909	for {
30910		if v_0.Op != OpAdd32 {
30911			break
30912		}
30913		_ = v_0.Args[1]
30914		v_0_0 := v_0.Args[0]
30915		v_0_1 := v_0.Args[1]
30916		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30917			x := v_0_0
30918			y := v_0_1
30919			if x != v_1 {
30920				continue
30921			}
30922			v.copyOf(y)
30923			return true
30924		}
30925		break
30926	}
30927	// match: (Sub32 (Add32 x y) y)
30928	// result: x
30929	for {
30930		if v_0.Op != OpAdd32 {
30931			break
30932		}
30933		_ = v_0.Args[1]
30934		v_0_0 := v_0.Args[0]
30935		v_0_1 := v_0.Args[1]
30936		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30937			x := v_0_0
30938			y := v_0_1
30939			if y != v_1 {
30940				continue
30941			}
30942			v.copyOf(x)
30943			return true
30944		}
30945		break
30946	}
30947	// match: (Sub32 (Sub32 x y) x)
30948	// result: (Neg32 y)
30949	for {
30950		if v_0.Op != OpSub32 {
30951			break
30952		}
30953		y := v_0.Args[1]
30954		x := v_0.Args[0]
30955		if x != v_1 {
30956			break
30957		}
30958		v.reset(OpNeg32)
30959		v.AddArg(y)
30960		return true
30961	}
30962	// match: (Sub32 x (Add32 x y))
30963	// result: (Neg32 y)
30964	for {
30965		x := v_0
30966		if v_1.Op != OpAdd32 {
30967			break
30968		}
30969		_ = v_1.Args[1]
30970		v_1_0 := v_1.Args[0]
30971		v_1_1 := v_1.Args[1]
30972		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
30973			if x != v_1_0 {
30974				continue
30975			}
30976			y := v_1_1
30977			v.reset(OpNeg32)
30978			v.AddArg(y)
30979			return true
30980		}
30981		break
30982	}
30983	// match: (Sub32 x (Sub32 i:(Const32 <t>) z))
30984	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
30985	// result: (Sub32 (Add32 <t> x z) i)
30986	for {
30987		x := v_0
30988		if v_1.Op != OpSub32 {
30989			break
30990		}
30991		z := v_1.Args[1]
30992		i := v_1.Args[0]
30993		if i.Op != OpConst32 {
30994			break
30995		}
30996		t := i.Type
30997		if !(z.Op != OpConst32 && x.Op != OpConst32) {
30998			break
30999		}
31000		v.reset(OpSub32)
31001		v0 := b.NewValue0(v.Pos, OpAdd32, t)
31002		v0.AddArg2(x, z)
31003		v.AddArg2(v0, i)
31004		return true
31005	}
31006	// match: (Sub32 x (Add32 z i:(Const32 <t>)))
31007	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
31008	// result: (Sub32 (Sub32 <t> x z) i)
31009	for {
31010		x := v_0
31011		if v_1.Op != OpAdd32 {
31012			break
31013		}
31014		_ = v_1.Args[1]
31015		v_1_0 := v_1.Args[0]
31016		v_1_1 := v_1.Args[1]
31017		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
31018			z := v_1_0
31019			i := v_1_1
31020			if i.Op != OpConst32 {
31021				continue
31022			}
31023			t := i.Type
31024			if !(z.Op != OpConst32 && x.Op != OpConst32) {
31025				continue
31026			}
31027			v.reset(OpSub32)
31028			v0 := b.NewValue0(v.Pos, OpSub32, t)
31029			v0.AddArg2(x, z)
31030			v.AddArg2(v0, i)
31031			return true
31032		}
31033		break
31034	}
31035	// match: (Sub32 (Sub32 i:(Const32 <t>) z) x)
31036	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
31037	// result: (Sub32 i (Add32 <t> z x))
31038	for {
31039		if v_0.Op != OpSub32 {
31040			break
31041		}
31042		z := v_0.Args[1]
31043		i := v_0.Args[0]
31044		if i.Op != OpConst32 {
31045			break
31046		}
31047		t := i.Type
31048		x := v_1
31049		if !(z.Op != OpConst32 && x.Op != OpConst32) {
31050			break
31051		}
31052		v.reset(OpSub32)
31053		v0 := b.NewValue0(v.Pos, OpAdd32, t)
31054		v0.AddArg2(z, x)
31055		v.AddArg2(i, v0)
31056		return true
31057	}
31058	// match: (Sub32 (Add32 z i:(Const32 <t>)) x)
31059	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
31060	// result: (Add32 i (Sub32 <t> z x))
31061	for {
31062		if v_0.Op != OpAdd32 {
31063			break
31064		}
31065		_ = v_0.Args[1]
31066		v_0_0 := v_0.Args[0]
31067		v_0_1 := v_0.Args[1]
31068		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31069			z := v_0_0
31070			i := v_0_1
31071			if i.Op != OpConst32 {
31072				continue
31073			}
31074			t := i.Type
31075			x := v_1
31076			if !(z.Op != OpConst32 && x.Op != OpConst32) {
31077				continue
31078			}
31079			v.reset(OpAdd32)
31080			v0 := b.NewValue0(v.Pos, OpSub32, t)
31081			v0.AddArg2(z, x)
31082			v.AddArg2(i, v0)
31083			return true
31084		}
31085		break
31086	}
31087	// match: (Sub32 (Const32 <t> [c]) (Sub32 (Const32 <t> [d]) x))
31088	// result: (Add32 (Const32 <t> [c-d]) x)
31089	for {
31090		if v_0.Op != OpConst32 {
31091			break
31092		}
31093		t := v_0.Type
31094		c := auxIntToInt32(v_0.AuxInt)
31095		if v_1.Op != OpSub32 {
31096			break
31097		}
31098		x := v_1.Args[1]
31099		v_1_0 := v_1.Args[0]
31100		if v_1_0.Op != OpConst32 || v_1_0.Type != t {
31101			break
31102		}
31103		d := auxIntToInt32(v_1_0.AuxInt)
31104		v.reset(OpAdd32)
31105		v0 := b.NewValue0(v.Pos, OpConst32, t)
31106		v0.AuxInt = int32ToAuxInt(c - d)
31107		v.AddArg2(v0, x)
31108		return true
31109	}
31110	// match: (Sub32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
31111	// result: (Sub32 (Const32 <t> [c-d]) x)
31112	for {
31113		if v_0.Op != OpConst32 {
31114			break
31115		}
31116		t := v_0.Type
31117		c := auxIntToInt32(v_0.AuxInt)
31118		if v_1.Op != OpAdd32 {
31119			break
31120		}
31121		_ = v_1.Args[1]
31122		v_1_0 := v_1.Args[0]
31123		v_1_1 := v_1.Args[1]
31124		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
31125			if v_1_0.Op != OpConst32 || v_1_0.Type != t {
31126				continue
31127			}
31128			d := auxIntToInt32(v_1_0.AuxInt)
31129			x := v_1_1
31130			v.reset(OpSub32)
31131			v0 := b.NewValue0(v.Pos, OpConst32, t)
31132			v0.AuxInt = int32ToAuxInt(c - d)
31133			v.AddArg2(v0, x)
31134			return true
31135		}
31136		break
31137	}
31138	return false
31139}
31140func rewriteValuegeneric_OpSub32F(v *Value) bool {
31141	v_1 := v.Args[1]
31142	v_0 := v.Args[0]
31143	// match: (Sub32F (Const32F [c]) (Const32F [d]))
31144	// cond: c-d == c-d
31145	// result: (Const32F [c-d])
31146	for {
31147		if v_0.Op != OpConst32F {
31148			break
31149		}
31150		c := auxIntToFloat32(v_0.AuxInt)
31151		if v_1.Op != OpConst32F {
31152			break
31153		}
31154		d := auxIntToFloat32(v_1.AuxInt)
31155		if !(c-d == c-d) {
31156			break
31157		}
31158		v.reset(OpConst32F)
31159		v.AuxInt = float32ToAuxInt(c - d)
31160		return true
31161	}
31162	return false
31163}
31164func rewriteValuegeneric_OpSub64(v *Value) bool {
31165	v_1 := v.Args[1]
31166	v_0 := v.Args[0]
31167	b := v.Block
31168	// match: (Sub64 (Const64 [c]) (Const64 [d]))
31169	// result: (Const64 [c-d])
31170	for {
31171		if v_0.Op != OpConst64 {
31172			break
31173		}
31174		c := auxIntToInt64(v_0.AuxInt)
31175		if v_1.Op != OpConst64 {
31176			break
31177		}
31178		d := auxIntToInt64(v_1.AuxInt)
31179		v.reset(OpConst64)
31180		v.AuxInt = int64ToAuxInt(c - d)
31181		return true
31182	}
31183	// match: (Sub64 x (Const64 <t> [c]))
31184	// cond: x.Op != OpConst64
31185	// result: (Add64 (Const64 <t> [-c]) x)
31186	for {
31187		x := v_0
31188		if v_1.Op != OpConst64 {
31189			break
31190		}
31191		t := v_1.Type
31192		c := auxIntToInt64(v_1.AuxInt)
31193		if !(x.Op != OpConst64) {
31194			break
31195		}
31196		v.reset(OpAdd64)
31197		v0 := b.NewValue0(v.Pos, OpConst64, t)
31198		v0.AuxInt = int64ToAuxInt(-c)
31199		v.AddArg2(v0, x)
31200		return true
31201	}
31202	// match: (Sub64 <t> (Mul64 x y) (Mul64 x z))
31203	// result: (Mul64 x (Sub64 <t> y z))
31204	for {
31205		t := v.Type
31206		if v_0.Op != OpMul64 {
31207			break
31208		}
31209		_ = v_0.Args[1]
31210		v_0_0 := v_0.Args[0]
31211		v_0_1 := v_0.Args[1]
31212		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31213			x := v_0_0
31214			y := v_0_1
31215			if v_1.Op != OpMul64 {
31216				continue
31217			}
31218			_ = v_1.Args[1]
31219			v_1_0 := v_1.Args[0]
31220			v_1_1 := v_1.Args[1]
31221			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
31222				if x != v_1_0 {
31223					continue
31224				}
31225				z := v_1_1
31226				v.reset(OpMul64)
31227				v0 := b.NewValue0(v.Pos, OpSub64, t)
31228				v0.AddArg2(y, z)
31229				v.AddArg2(x, v0)
31230				return true
31231			}
31232		}
31233		break
31234	}
31235	// match: (Sub64 x x)
31236	// result: (Const64 [0])
31237	for {
31238		x := v_0
31239		if x != v_1 {
31240			break
31241		}
31242		v.reset(OpConst64)
31243		v.AuxInt = int64ToAuxInt(0)
31244		return true
31245	}
31246	// match: (Sub64 (Neg64 x) (Com64 x))
31247	// result: (Const64 [1])
31248	for {
31249		if v_0.Op != OpNeg64 {
31250			break
31251		}
31252		x := v_0.Args[0]
31253		if v_1.Op != OpCom64 || x != v_1.Args[0] {
31254			break
31255		}
31256		v.reset(OpConst64)
31257		v.AuxInt = int64ToAuxInt(1)
31258		return true
31259	}
31260	// match: (Sub64 (Com64 x) (Neg64 x))
31261	// result: (Const64 [-1])
31262	for {
31263		if v_0.Op != OpCom64 {
31264			break
31265		}
31266		x := v_0.Args[0]
31267		if v_1.Op != OpNeg64 || x != v_1.Args[0] {
31268			break
31269		}
31270		v.reset(OpConst64)
31271		v.AuxInt = int64ToAuxInt(-1)
31272		return true
31273	}
31274	// match: (Sub64 (Add64 t x) (Add64 t y))
31275	// result: (Sub64 x y)
31276	for {
31277		if v_0.Op != OpAdd64 {
31278			break
31279		}
31280		_ = v_0.Args[1]
31281		v_0_0 := v_0.Args[0]
31282		v_0_1 := v_0.Args[1]
31283		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31284			t := v_0_0
31285			x := v_0_1
31286			if v_1.Op != OpAdd64 {
31287				continue
31288			}
31289			_ = v_1.Args[1]
31290			v_1_0 := v_1.Args[0]
31291			v_1_1 := v_1.Args[1]
31292			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
31293				if t != v_1_0 {
31294					continue
31295				}
31296				y := v_1_1
31297				v.reset(OpSub64)
31298				v.AddArg2(x, y)
31299				return true
31300			}
31301		}
31302		break
31303	}
31304	// match: (Sub64 (Add64 x y) x)
31305	// result: y
31306	for {
31307		if v_0.Op != OpAdd64 {
31308			break
31309		}
31310		_ = v_0.Args[1]
31311		v_0_0 := v_0.Args[0]
31312		v_0_1 := v_0.Args[1]
31313		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31314			x := v_0_0
31315			y := v_0_1
31316			if x != v_1 {
31317				continue
31318			}
31319			v.copyOf(y)
31320			return true
31321		}
31322		break
31323	}
31324	// match: (Sub64 (Add64 x y) y)
31325	// result: x
31326	for {
31327		if v_0.Op != OpAdd64 {
31328			break
31329		}
31330		_ = v_0.Args[1]
31331		v_0_0 := v_0.Args[0]
31332		v_0_1 := v_0.Args[1]
31333		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31334			x := v_0_0
31335			y := v_0_1
31336			if y != v_1 {
31337				continue
31338			}
31339			v.copyOf(x)
31340			return true
31341		}
31342		break
31343	}
31344	// match: (Sub64 (Sub64 x y) x)
31345	// result: (Neg64 y)
31346	for {
31347		if v_0.Op != OpSub64 {
31348			break
31349		}
31350		y := v_0.Args[1]
31351		x := v_0.Args[0]
31352		if x != v_1 {
31353			break
31354		}
31355		v.reset(OpNeg64)
31356		v.AddArg(y)
31357		return true
31358	}
31359	// match: (Sub64 x (Add64 x y))
31360	// result: (Neg64 y)
31361	for {
31362		x := v_0
31363		if v_1.Op != OpAdd64 {
31364			break
31365		}
31366		_ = v_1.Args[1]
31367		v_1_0 := v_1.Args[0]
31368		v_1_1 := v_1.Args[1]
31369		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
31370			if x != v_1_0 {
31371				continue
31372			}
31373			y := v_1_1
31374			v.reset(OpNeg64)
31375			v.AddArg(y)
31376			return true
31377		}
31378		break
31379	}
31380	// match: (Sub64 x (Sub64 i:(Const64 <t>) z))
31381	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
31382	// result: (Sub64 (Add64 <t> x z) i)
31383	for {
31384		x := v_0
31385		if v_1.Op != OpSub64 {
31386			break
31387		}
31388		z := v_1.Args[1]
31389		i := v_1.Args[0]
31390		if i.Op != OpConst64 {
31391			break
31392		}
31393		t := i.Type
31394		if !(z.Op != OpConst64 && x.Op != OpConst64) {
31395			break
31396		}
31397		v.reset(OpSub64)
31398		v0 := b.NewValue0(v.Pos, OpAdd64, t)
31399		v0.AddArg2(x, z)
31400		v.AddArg2(v0, i)
31401		return true
31402	}
31403	// match: (Sub64 x (Add64 z i:(Const64 <t>)))
31404	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
31405	// result: (Sub64 (Sub64 <t> x z) i)
31406	for {
31407		x := v_0
31408		if v_1.Op != OpAdd64 {
31409			break
31410		}
31411		_ = v_1.Args[1]
31412		v_1_0 := v_1.Args[0]
31413		v_1_1 := v_1.Args[1]
31414		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
31415			z := v_1_0
31416			i := v_1_1
31417			if i.Op != OpConst64 {
31418				continue
31419			}
31420			t := i.Type
31421			if !(z.Op != OpConst64 && x.Op != OpConst64) {
31422				continue
31423			}
31424			v.reset(OpSub64)
31425			v0 := b.NewValue0(v.Pos, OpSub64, t)
31426			v0.AddArg2(x, z)
31427			v.AddArg2(v0, i)
31428			return true
31429		}
31430		break
31431	}
31432	// match: (Sub64 (Sub64 i:(Const64 <t>) z) x)
31433	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
31434	// result: (Sub64 i (Add64 <t> z x))
31435	for {
31436		if v_0.Op != OpSub64 {
31437			break
31438		}
31439		z := v_0.Args[1]
31440		i := v_0.Args[0]
31441		if i.Op != OpConst64 {
31442			break
31443		}
31444		t := i.Type
31445		x := v_1
31446		if !(z.Op != OpConst64 && x.Op != OpConst64) {
31447			break
31448		}
31449		v.reset(OpSub64)
31450		v0 := b.NewValue0(v.Pos, OpAdd64, t)
31451		v0.AddArg2(z, x)
31452		v.AddArg2(i, v0)
31453		return true
31454	}
31455	// match: (Sub64 (Add64 z i:(Const64 <t>)) x)
31456	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
31457	// result: (Add64 i (Sub64 <t> z x))
31458	for {
31459		if v_0.Op != OpAdd64 {
31460			break
31461		}
31462		_ = v_0.Args[1]
31463		v_0_0 := v_0.Args[0]
31464		v_0_1 := v_0.Args[1]
31465		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31466			z := v_0_0
31467			i := v_0_1
31468			if i.Op != OpConst64 {
31469				continue
31470			}
31471			t := i.Type
31472			x := v_1
31473			if !(z.Op != OpConst64 && x.Op != OpConst64) {
31474				continue
31475			}
31476			v.reset(OpAdd64)
31477			v0 := b.NewValue0(v.Pos, OpSub64, t)
31478			v0.AddArg2(z, x)
31479			v.AddArg2(i, v0)
31480			return true
31481		}
31482		break
31483	}
31484	// match: (Sub64 (Const64 <t> [c]) (Sub64 (Const64 <t> [d]) x))
31485	// result: (Add64 (Const64 <t> [c-d]) x)
31486	for {
31487		if v_0.Op != OpConst64 {
31488			break
31489		}
31490		t := v_0.Type
31491		c := auxIntToInt64(v_0.AuxInt)
31492		if v_1.Op != OpSub64 {
31493			break
31494		}
31495		x := v_1.Args[1]
31496		v_1_0 := v_1.Args[0]
31497		if v_1_0.Op != OpConst64 || v_1_0.Type != t {
31498			break
31499		}
31500		d := auxIntToInt64(v_1_0.AuxInt)
31501		v.reset(OpAdd64)
31502		v0 := b.NewValue0(v.Pos, OpConst64, t)
31503		v0.AuxInt = int64ToAuxInt(c - d)
31504		v.AddArg2(v0, x)
31505		return true
31506	}
31507	// match: (Sub64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
31508	// result: (Sub64 (Const64 <t> [c-d]) x)
31509	for {
31510		if v_0.Op != OpConst64 {
31511			break
31512		}
31513		t := v_0.Type
31514		c := auxIntToInt64(v_0.AuxInt)
31515		if v_1.Op != OpAdd64 {
31516			break
31517		}
31518		_ = v_1.Args[1]
31519		v_1_0 := v_1.Args[0]
31520		v_1_1 := v_1.Args[1]
31521		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
31522			if v_1_0.Op != OpConst64 || v_1_0.Type != t {
31523				continue
31524			}
31525			d := auxIntToInt64(v_1_0.AuxInt)
31526			x := v_1_1
31527			v.reset(OpSub64)
31528			v0 := b.NewValue0(v.Pos, OpConst64, t)
31529			v0.AuxInt = int64ToAuxInt(c - d)
31530			v.AddArg2(v0, x)
31531			return true
31532		}
31533		break
31534	}
31535	return false
31536}
31537func rewriteValuegeneric_OpSub64F(v *Value) bool {
31538	v_1 := v.Args[1]
31539	v_0 := v.Args[0]
31540	// match: (Sub64F (Const64F [c]) (Const64F [d]))
31541	// cond: c-d == c-d
31542	// result: (Const64F [c-d])
31543	for {
31544		if v_0.Op != OpConst64F {
31545			break
31546		}
31547		c := auxIntToFloat64(v_0.AuxInt)
31548		if v_1.Op != OpConst64F {
31549			break
31550		}
31551		d := auxIntToFloat64(v_1.AuxInt)
31552		if !(c-d == c-d) {
31553			break
31554		}
31555		v.reset(OpConst64F)
31556		v.AuxInt = float64ToAuxInt(c - d)
31557		return true
31558	}
31559	return false
31560}
31561func rewriteValuegeneric_OpSub8(v *Value) bool {
31562	v_1 := v.Args[1]
31563	v_0 := v.Args[0]
31564	b := v.Block
31565	// match: (Sub8 (Const8 [c]) (Const8 [d]))
31566	// result: (Const8 [c-d])
31567	for {
31568		if v_0.Op != OpConst8 {
31569			break
31570		}
31571		c := auxIntToInt8(v_0.AuxInt)
31572		if v_1.Op != OpConst8 {
31573			break
31574		}
31575		d := auxIntToInt8(v_1.AuxInt)
31576		v.reset(OpConst8)
31577		v.AuxInt = int8ToAuxInt(c - d)
31578		return true
31579	}
31580	// match: (Sub8 x (Const8 <t> [c]))
31581	// cond: x.Op != OpConst8
31582	// result: (Add8 (Const8 <t> [-c]) x)
31583	for {
31584		x := v_0
31585		if v_1.Op != OpConst8 {
31586			break
31587		}
31588		t := v_1.Type
31589		c := auxIntToInt8(v_1.AuxInt)
31590		if !(x.Op != OpConst8) {
31591			break
31592		}
31593		v.reset(OpAdd8)
31594		v0 := b.NewValue0(v.Pos, OpConst8, t)
31595		v0.AuxInt = int8ToAuxInt(-c)
31596		v.AddArg2(v0, x)
31597		return true
31598	}
31599	// match: (Sub8 <t> (Mul8 x y) (Mul8 x z))
31600	// result: (Mul8 x (Sub8 <t> y z))
31601	for {
31602		t := v.Type
31603		if v_0.Op != OpMul8 {
31604			break
31605		}
31606		_ = v_0.Args[1]
31607		v_0_0 := v_0.Args[0]
31608		v_0_1 := v_0.Args[1]
31609		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31610			x := v_0_0
31611			y := v_0_1
31612			if v_1.Op != OpMul8 {
31613				continue
31614			}
31615			_ = v_1.Args[1]
31616			v_1_0 := v_1.Args[0]
31617			v_1_1 := v_1.Args[1]
31618			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
31619				if x != v_1_0 {
31620					continue
31621				}
31622				z := v_1_1
31623				v.reset(OpMul8)
31624				v0 := b.NewValue0(v.Pos, OpSub8, t)
31625				v0.AddArg2(y, z)
31626				v.AddArg2(x, v0)
31627				return true
31628			}
31629		}
31630		break
31631	}
31632	// match: (Sub8 x x)
31633	// result: (Const8 [0])
31634	for {
31635		x := v_0
31636		if x != v_1 {
31637			break
31638		}
31639		v.reset(OpConst8)
31640		v.AuxInt = int8ToAuxInt(0)
31641		return true
31642	}
31643	// match: (Sub8 (Neg8 x) (Com8 x))
31644	// result: (Const8 [1])
31645	for {
31646		if v_0.Op != OpNeg8 {
31647			break
31648		}
31649		x := v_0.Args[0]
31650		if v_1.Op != OpCom8 || x != v_1.Args[0] {
31651			break
31652		}
31653		v.reset(OpConst8)
31654		v.AuxInt = int8ToAuxInt(1)
31655		return true
31656	}
31657	// match: (Sub8 (Com8 x) (Neg8 x))
31658	// result: (Const8 [-1])
31659	for {
31660		if v_0.Op != OpCom8 {
31661			break
31662		}
31663		x := v_0.Args[0]
31664		if v_1.Op != OpNeg8 || x != v_1.Args[0] {
31665			break
31666		}
31667		v.reset(OpConst8)
31668		v.AuxInt = int8ToAuxInt(-1)
31669		return true
31670	}
31671	// match: (Sub8 (Add8 t x) (Add8 t y))
31672	// result: (Sub8 x y)
31673	for {
31674		if v_0.Op != OpAdd8 {
31675			break
31676		}
31677		_ = v_0.Args[1]
31678		v_0_0 := v_0.Args[0]
31679		v_0_1 := v_0.Args[1]
31680		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31681			t := v_0_0
31682			x := v_0_1
31683			if v_1.Op != OpAdd8 {
31684				continue
31685			}
31686			_ = v_1.Args[1]
31687			v_1_0 := v_1.Args[0]
31688			v_1_1 := v_1.Args[1]
31689			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
31690				if t != v_1_0 {
31691					continue
31692				}
31693				y := v_1_1
31694				v.reset(OpSub8)
31695				v.AddArg2(x, y)
31696				return true
31697			}
31698		}
31699		break
31700	}
31701	// match: (Sub8 (Add8 x y) x)
31702	// result: y
31703	for {
31704		if v_0.Op != OpAdd8 {
31705			break
31706		}
31707		_ = v_0.Args[1]
31708		v_0_0 := v_0.Args[0]
31709		v_0_1 := v_0.Args[1]
31710		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31711			x := v_0_0
31712			y := v_0_1
31713			if x != v_1 {
31714				continue
31715			}
31716			v.copyOf(y)
31717			return true
31718		}
31719		break
31720	}
31721	// match: (Sub8 (Add8 x y) y)
31722	// result: x
31723	for {
31724		if v_0.Op != OpAdd8 {
31725			break
31726		}
31727		_ = v_0.Args[1]
31728		v_0_0 := v_0.Args[0]
31729		v_0_1 := v_0.Args[1]
31730		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31731			x := v_0_0
31732			y := v_0_1
31733			if y != v_1 {
31734				continue
31735			}
31736			v.copyOf(x)
31737			return true
31738		}
31739		break
31740	}
31741	// match: (Sub8 (Sub8 x y) x)
31742	// result: (Neg8 y)
31743	for {
31744		if v_0.Op != OpSub8 {
31745			break
31746		}
31747		y := v_0.Args[1]
31748		x := v_0.Args[0]
31749		if x != v_1 {
31750			break
31751		}
31752		v.reset(OpNeg8)
31753		v.AddArg(y)
31754		return true
31755	}
31756	// match: (Sub8 x (Add8 x y))
31757	// result: (Neg8 y)
31758	for {
31759		x := v_0
31760		if v_1.Op != OpAdd8 {
31761			break
31762		}
31763		_ = v_1.Args[1]
31764		v_1_0 := v_1.Args[0]
31765		v_1_1 := v_1.Args[1]
31766		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
31767			if x != v_1_0 {
31768				continue
31769			}
31770			y := v_1_1
31771			v.reset(OpNeg8)
31772			v.AddArg(y)
31773			return true
31774		}
31775		break
31776	}
31777	// match: (Sub8 x (Sub8 i:(Const8 <t>) z))
31778	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
31779	// result: (Sub8 (Add8 <t> x z) i)
31780	for {
31781		x := v_0
31782		if v_1.Op != OpSub8 {
31783			break
31784		}
31785		z := v_1.Args[1]
31786		i := v_1.Args[0]
31787		if i.Op != OpConst8 {
31788			break
31789		}
31790		t := i.Type
31791		if !(z.Op != OpConst8 && x.Op != OpConst8) {
31792			break
31793		}
31794		v.reset(OpSub8)
31795		v0 := b.NewValue0(v.Pos, OpAdd8, t)
31796		v0.AddArg2(x, z)
31797		v.AddArg2(v0, i)
31798		return true
31799	}
31800	// match: (Sub8 x (Add8 z i:(Const8 <t>)))
31801	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
31802	// result: (Sub8 (Sub8 <t> x z) i)
31803	for {
31804		x := v_0
31805		if v_1.Op != OpAdd8 {
31806			break
31807		}
31808		_ = v_1.Args[1]
31809		v_1_0 := v_1.Args[0]
31810		v_1_1 := v_1.Args[1]
31811		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
31812			z := v_1_0
31813			i := v_1_1
31814			if i.Op != OpConst8 {
31815				continue
31816			}
31817			t := i.Type
31818			if !(z.Op != OpConst8 && x.Op != OpConst8) {
31819				continue
31820			}
31821			v.reset(OpSub8)
31822			v0 := b.NewValue0(v.Pos, OpSub8, t)
31823			v0.AddArg2(x, z)
31824			v.AddArg2(v0, i)
31825			return true
31826		}
31827		break
31828	}
31829	// match: (Sub8 (Sub8 i:(Const8 <t>) z) x)
31830	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
31831	// result: (Sub8 i (Add8 <t> z x))
31832	for {
31833		if v_0.Op != OpSub8 {
31834			break
31835		}
31836		z := v_0.Args[1]
31837		i := v_0.Args[0]
31838		if i.Op != OpConst8 {
31839			break
31840		}
31841		t := i.Type
31842		x := v_1
31843		if !(z.Op != OpConst8 && x.Op != OpConst8) {
31844			break
31845		}
31846		v.reset(OpSub8)
31847		v0 := b.NewValue0(v.Pos, OpAdd8, t)
31848		v0.AddArg2(z, x)
31849		v.AddArg2(i, v0)
31850		return true
31851	}
31852	// match: (Sub8 (Add8 z i:(Const8 <t>)) x)
31853	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
31854	// result: (Add8 i (Sub8 <t> z x))
31855	for {
31856		if v_0.Op != OpAdd8 {
31857			break
31858		}
31859		_ = v_0.Args[1]
31860		v_0_0 := v_0.Args[0]
31861		v_0_1 := v_0.Args[1]
31862		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31863			z := v_0_0
31864			i := v_0_1
31865			if i.Op != OpConst8 {
31866				continue
31867			}
31868			t := i.Type
31869			x := v_1
31870			if !(z.Op != OpConst8 && x.Op != OpConst8) {
31871				continue
31872			}
31873			v.reset(OpAdd8)
31874			v0 := b.NewValue0(v.Pos, OpSub8, t)
31875			v0.AddArg2(z, x)
31876			v.AddArg2(i, v0)
31877			return true
31878		}
31879		break
31880	}
31881	// match: (Sub8 (Const8 <t> [c]) (Sub8 (Const8 <t> [d]) x))
31882	// result: (Add8 (Const8 <t> [c-d]) x)
31883	for {
31884		if v_0.Op != OpConst8 {
31885			break
31886		}
31887		t := v_0.Type
31888		c := auxIntToInt8(v_0.AuxInt)
31889		if v_1.Op != OpSub8 {
31890			break
31891		}
31892		x := v_1.Args[1]
31893		v_1_0 := v_1.Args[0]
31894		if v_1_0.Op != OpConst8 || v_1_0.Type != t {
31895			break
31896		}
31897		d := auxIntToInt8(v_1_0.AuxInt)
31898		v.reset(OpAdd8)
31899		v0 := b.NewValue0(v.Pos, OpConst8, t)
31900		v0.AuxInt = int8ToAuxInt(c - d)
31901		v.AddArg2(v0, x)
31902		return true
31903	}
31904	// match: (Sub8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
31905	// result: (Sub8 (Const8 <t> [c-d]) x)
31906	for {
31907		if v_0.Op != OpConst8 {
31908			break
31909		}
31910		t := v_0.Type
31911		c := auxIntToInt8(v_0.AuxInt)
31912		if v_1.Op != OpAdd8 {
31913			break
31914		}
31915		_ = v_1.Args[1]
31916		v_1_0 := v_1.Args[0]
31917		v_1_1 := v_1.Args[1]
31918		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
31919			if v_1_0.Op != OpConst8 || v_1_0.Type != t {
31920				continue
31921			}
31922			d := auxIntToInt8(v_1_0.AuxInt)
31923			x := v_1_1
31924			v.reset(OpSub8)
31925			v0 := b.NewValue0(v.Pos, OpConst8, t)
31926			v0.AuxInt = int8ToAuxInt(c - d)
31927			v.AddArg2(v0, x)
31928			return true
31929		}
31930		break
31931	}
31932	return false
31933}
31934func rewriteValuegeneric_OpTrunc(v *Value) bool {
31935	v_0 := v.Args[0]
31936	// match: (Trunc (Const64F [c]))
31937	// result: (Const64F [math.Trunc(c)])
31938	for {
31939		if v_0.Op != OpConst64F {
31940			break
31941		}
31942		c := auxIntToFloat64(v_0.AuxInt)
31943		v.reset(OpConst64F)
31944		v.AuxInt = float64ToAuxInt(math.Trunc(c))
31945		return true
31946	}
31947	return false
31948}
31949func rewriteValuegeneric_OpTrunc16to8(v *Value) bool {
31950	v_0 := v.Args[0]
31951	// match: (Trunc16to8 (Const16 [c]))
31952	// result: (Const8 [int8(c)])
31953	for {
31954		if v_0.Op != OpConst16 {
31955			break
31956		}
31957		c := auxIntToInt16(v_0.AuxInt)
31958		v.reset(OpConst8)
31959		v.AuxInt = int8ToAuxInt(int8(c))
31960		return true
31961	}
31962	// match: (Trunc16to8 (ZeroExt8to16 x))
31963	// result: x
31964	for {
31965		if v_0.Op != OpZeroExt8to16 {
31966			break
31967		}
31968		x := v_0.Args[0]
31969		v.copyOf(x)
31970		return true
31971	}
31972	// match: (Trunc16to8 (SignExt8to16 x))
31973	// result: x
31974	for {
31975		if v_0.Op != OpSignExt8to16 {
31976			break
31977		}
31978		x := v_0.Args[0]
31979		v.copyOf(x)
31980		return true
31981	}
31982	// match: (Trunc16to8 (And16 (Const16 [y]) x))
31983	// cond: y&0xFF == 0xFF
31984	// result: (Trunc16to8 x)
31985	for {
31986		if v_0.Op != OpAnd16 {
31987			break
31988		}
31989		_ = v_0.Args[1]
31990		v_0_0 := v_0.Args[0]
31991		v_0_1 := v_0.Args[1]
31992		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31993			if v_0_0.Op != OpConst16 {
31994				continue
31995			}
31996			y := auxIntToInt16(v_0_0.AuxInt)
31997			x := v_0_1
31998			if !(y&0xFF == 0xFF) {
31999				continue
32000			}
32001			v.reset(OpTrunc16to8)
32002			v.AddArg(x)
32003			return true
32004		}
32005		break
32006	}
32007	return false
32008}
32009func rewriteValuegeneric_OpTrunc32to16(v *Value) bool {
32010	v_0 := v.Args[0]
32011	// match: (Trunc32to16 (Const32 [c]))
32012	// result: (Const16 [int16(c)])
32013	for {
32014		if v_0.Op != OpConst32 {
32015			break
32016		}
32017		c := auxIntToInt32(v_0.AuxInt)
32018		v.reset(OpConst16)
32019		v.AuxInt = int16ToAuxInt(int16(c))
32020		return true
32021	}
32022	// match: (Trunc32to16 (ZeroExt8to32 x))
32023	// result: (ZeroExt8to16 x)
32024	for {
32025		if v_0.Op != OpZeroExt8to32 {
32026			break
32027		}
32028		x := v_0.Args[0]
32029		v.reset(OpZeroExt8to16)
32030		v.AddArg(x)
32031		return true
32032	}
32033	// match: (Trunc32to16 (ZeroExt16to32 x))
32034	// result: x
32035	for {
32036		if v_0.Op != OpZeroExt16to32 {
32037			break
32038		}
32039		x := v_0.Args[0]
32040		v.copyOf(x)
32041		return true
32042	}
32043	// match: (Trunc32to16 (SignExt8to32 x))
32044	// result: (SignExt8to16 x)
32045	for {
32046		if v_0.Op != OpSignExt8to32 {
32047			break
32048		}
32049		x := v_0.Args[0]
32050		v.reset(OpSignExt8to16)
32051		v.AddArg(x)
32052		return true
32053	}
32054	// match: (Trunc32to16 (SignExt16to32 x))
32055	// result: x
32056	for {
32057		if v_0.Op != OpSignExt16to32 {
32058			break
32059		}
32060		x := v_0.Args[0]
32061		v.copyOf(x)
32062		return true
32063	}
32064	// match: (Trunc32to16 (And32 (Const32 [y]) x))
32065	// cond: y&0xFFFF == 0xFFFF
32066	// result: (Trunc32to16 x)
32067	for {
32068		if v_0.Op != OpAnd32 {
32069			break
32070		}
32071		_ = v_0.Args[1]
32072		v_0_0 := v_0.Args[0]
32073		v_0_1 := v_0.Args[1]
32074		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
32075			if v_0_0.Op != OpConst32 {
32076				continue
32077			}
32078			y := auxIntToInt32(v_0_0.AuxInt)
32079			x := v_0_1
32080			if !(y&0xFFFF == 0xFFFF) {
32081				continue
32082			}
32083			v.reset(OpTrunc32to16)
32084			v.AddArg(x)
32085			return true
32086		}
32087		break
32088	}
32089	return false
32090}
32091func rewriteValuegeneric_OpTrunc32to8(v *Value) bool {
32092	v_0 := v.Args[0]
32093	// match: (Trunc32to8 (Const32 [c]))
32094	// result: (Const8 [int8(c)])
32095	for {
32096		if v_0.Op != OpConst32 {
32097			break
32098		}
32099		c := auxIntToInt32(v_0.AuxInt)
32100		v.reset(OpConst8)
32101		v.AuxInt = int8ToAuxInt(int8(c))
32102		return true
32103	}
32104	// match: (Trunc32to8 (ZeroExt8to32 x))
32105	// result: x
32106	for {
32107		if v_0.Op != OpZeroExt8to32 {
32108			break
32109		}
32110		x := v_0.Args[0]
32111		v.copyOf(x)
32112		return true
32113	}
32114	// match: (Trunc32to8 (SignExt8to32 x))
32115	// result: x
32116	for {
32117		if v_0.Op != OpSignExt8to32 {
32118			break
32119		}
32120		x := v_0.Args[0]
32121		v.copyOf(x)
32122		return true
32123	}
32124	// match: (Trunc32to8 (And32 (Const32 [y]) x))
32125	// cond: y&0xFF == 0xFF
32126	// result: (Trunc32to8 x)
32127	for {
32128		if v_0.Op != OpAnd32 {
32129			break
32130		}
32131		_ = v_0.Args[1]
32132		v_0_0 := v_0.Args[0]
32133		v_0_1 := v_0.Args[1]
32134		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
32135			if v_0_0.Op != OpConst32 {
32136				continue
32137			}
32138			y := auxIntToInt32(v_0_0.AuxInt)
32139			x := v_0_1
32140			if !(y&0xFF == 0xFF) {
32141				continue
32142			}
32143			v.reset(OpTrunc32to8)
32144			v.AddArg(x)
32145			return true
32146		}
32147		break
32148	}
32149	return false
32150}
32151func rewriteValuegeneric_OpTrunc64to16(v *Value) bool {
32152	v_0 := v.Args[0]
32153	// match: (Trunc64to16 (Const64 [c]))
32154	// result: (Const16 [int16(c)])
32155	for {
32156		if v_0.Op != OpConst64 {
32157			break
32158		}
32159		c := auxIntToInt64(v_0.AuxInt)
32160		v.reset(OpConst16)
32161		v.AuxInt = int16ToAuxInt(int16(c))
32162		return true
32163	}
32164	// match: (Trunc64to16 (ZeroExt8to64 x))
32165	// result: (ZeroExt8to16 x)
32166	for {
32167		if v_0.Op != OpZeroExt8to64 {
32168			break
32169		}
32170		x := v_0.Args[0]
32171		v.reset(OpZeroExt8to16)
32172		v.AddArg(x)
32173		return true
32174	}
32175	// match: (Trunc64to16 (ZeroExt16to64 x))
32176	// result: x
32177	for {
32178		if v_0.Op != OpZeroExt16to64 {
32179			break
32180		}
32181		x := v_0.Args[0]
32182		v.copyOf(x)
32183		return true
32184	}
32185	// match: (Trunc64to16 (SignExt8to64 x))
32186	// result: (SignExt8to16 x)
32187	for {
32188		if v_0.Op != OpSignExt8to64 {
32189			break
32190		}
32191		x := v_0.Args[0]
32192		v.reset(OpSignExt8to16)
32193		v.AddArg(x)
32194		return true
32195	}
32196	// match: (Trunc64to16 (SignExt16to64 x))
32197	// result: x
32198	for {
32199		if v_0.Op != OpSignExt16to64 {
32200			break
32201		}
32202		x := v_0.Args[0]
32203		v.copyOf(x)
32204		return true
32205	}
32206	// match: (Trunc64to16 (And64 (Const64 [y]) x))
32207	// cond: y&0xFFFF == 0xFFFF
32208	// result: (Trunc64to16 x)
32209	for {
32210		if v_0.Op != OpAnd64 {
32211			break
32212		}
32213		_ = v_0.Args[1]
32214		v_0_0 := v_0.Args[0]
32215		v_0_1 := v_0.Args[1]
32216		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
32217			if v_0_0.Op != OpConst64 {
32218				continue
32219			}
32220			y := auxIntToInt64(v_0_0.AuxInt)
32221			x := v_0_1
32222			if !(y&0xFFFF == 0xFFFF) {
32223				continue
32224			}
32225			v.reset(OpTrunc64to16)
32226			v.AddArg(x)
32227			return true
32228		}
32229		break
32230	}
32231	return false
32232}
32233func rewriteValuegeneric_OpTrunc64to32(v *Value) bool {
32234	v_0 := v.Args[0]
32235	// match: (Trunc64to32 (Const64 [c]))
32236	// result: (Const32 [int32(c)])
32237	for {
32238		if v_0.Op != OpConst64 {
32239			break
32240		}
32241		c := auxIntToInt64(v_0.AuxInt)
32242		v.reset(OpConst32)
32243		v.AuxInt = int32ToAuxInt(int32(c))
32244		return true
32245	}
32246	// match: (Trunc64to32 (ZeroExt8to64 x))
32247	// result: (ZeroExt8to32 x)
32248	for {
32249		if v_0.Op != OpZeroExt8to64 {
32250			break
32251		}
32252		x := v_0.Args[0]
32253		v.reset(OpZeroExt8to32)
32254		v.AddArg(x)
32255		return true
32256	}
32257	// match: (Trunc64to32 (ZeroExt16to64 x))
32258	// result: (ZeroExt16to32 x)
32259	for {
32260		if v_0.Op != OpZeroExt16to64 {
32261			break
32262		}
32263		x := v_0.Args[0]
32264		v.reset(OpZeroExt16to32)
32265		v.AddArg(x)
32266		return true
32267	}
32268	// match: (Trunc64to32 (ZeroExt32to64 x))
32269	// result: x
32270	for {
32271		if v_0.Op != OpZeroExt32to64 {
32272			break
32273		}
32274		x := v_0.Args[0]
32275		v.copyOf(x)
32276		return true
32277	}
32278	// match: (Trunc64to32 (SignExt8to64 x))
32279	// result: (SignExt8to32 x)
32280	for {
32281		if v_0.Op != OpSignExt8to64 {
32282			break
32283		}
32284		x := v_0.Args[0]
32285		v.reset(OpSignExt8to32)
32286		v.AddArg(x)
32287		return true
32288	}
32289	// match: (Trunc64to32 (SignExt16to64 x))
32290	// result: (SignExt16to32 x)
32291	for {
32292		if v_0.Op != OpSignExt16to64 {
32293			break
32294		}
32295		x := v_0.Args[0]
32296		v.reset(OpSignExt16to32)
32297		v.AddArg(x)
32298		return true
32299	}
32300	// match: (Trunc64to32 (SignExt32to64 x))
32301	// result: x
32302	for {
32303		if v_0.Op != OpSignExt32to64 {
32304			break
32305		}
32306		x := v_0.Args[0]
32307		v.copyOf(x)
32308		return true
32309	}
32310	// match: (Trunc64to32 (And64 (Const64 [y]) x))
32311	// cond: y&0xFFFFFFFF == 0xFFFFFFFF
32312	// result: (Trunc64to32 x)
32313	for {
32314		if v_0.Op != OpAnd64 {
32315			break
32316		}
32317		_ = v_0.Args[1]
32318		v_0_0 := v_0.Args[0]
32319		v_0_1 := v_0.Args[1]
32320		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
32321			if v_0_0.Op != OpConst64 {
32322				continue
32323			}
32324			y := auxIntToInt64(v_0_0.AuxInt)
32325			x := v_0_1
32326			if !(y&0xFFFFFFFF == 0xFFFFFFFF) {
32327				continue
32328			}
32329			v.reset(OpTrunc64to32)
32330			v.AddArg(x)
32331			return true
32332		}
32333		break
32334	}
32335	return false
32336}
32337func rewriteValuegeneric_OpTrunc64to8(v *Value) bool {
32338	v_0 := v.Args[0]
32339	// match: (Trunc64to8 (Const64 [c]))
32340	// result: (Const8 [int8(c)])
32341	for {
32342		if v_0.Op != OpConst64 {
32343			break
32344		}
32345		c := auxIntToInt64(v_0.AuxInt)
32346		v.reset(OpConst8)
32347		v.AuxInt = int8ToAuxInt(int8(c))
32348		return true
32349	}
32350	// match: (Trunc64to8 (ZeroExt8to64 x))
32351	// result: x
32352	for {
32353		if v_0.Op != OpZeroExt8to64 {
32354			break
32355		}
32356		x := v_0.Args[0]
32357		v.copyOf(x)
32358		return true
32359	}
32360	// match: (Trunc64to8 (SignExt8to64 x))
32361	// result: x
32362	for {
32363		if v_0.Op != OpSignExt8to64 {
32364			break
32365		}
32366		x := v_0.Args[0]
32367		v.copyOf(x)
32368		return true
32369	}
32370	// match: (Trunc64to8 (And64 (Const64 [y]) x))
32371	// cond: y&0xFF == 0xFF
32372	// result: (Trunc64to8 x)
32373	for {
32374		if v_0.Op != OpAnd64 {
32375			break
32376		}
32377		_ = v_0.Args[1]
32378		v_0_0 := v_0.Args[0]
32379		v_0_1 := v_0.Args[1]
32380		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
32381			if v_0_0.Op != OpConst64 {
32382				continue
32383			}
32384			y := auxIntToInt64(v_0_0.AuxInt)
32385			x := v_0_1
32386			if !(y&0xFF == 0xFF) {
32387				continue
32388			}
32389			v.reset(OpTrunc64to8)
32390			v.AddArg(x)
32391			return true
32392		}
32393		break
32394	}
32395	return false
32396}
32397func rewriteValuegeneric_OpXor16(v *Value) bool {
32398	v_1 := v.Args[1]
32399	v_0 := v.Args[0]
32400	b := v.Block
32401	config := b.Func.Config
32402	// match: (Xor16 (Const16 [c]) (Const16 [d]))
32403	// result: (Const16 [c^d])
32404	for {
32405		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32406			if v_0.Op != OpConst16 {
32407				continue
32408			}
32409			c := auxIntToInt16(v_0.AuxInt)
32410			if v_1.Op != OpConst16 {
32411				continue
32412			}
32413			d := auxIntToInt16(v_1.AuxInt)
32414			v.reset(OpConst16)
32415			v.AuxInt = int16ToAuxInt(c ^ d)
32416			return true
32417		}
32418		break
32419	}
32420	// match: (Xor16 x x)
32421	// result: (Const16 [0])
32422	for {
32423		x := v_0
32424		if x != v_1 {
32425			break
32426		}
32427		v.reset(OpConst16)
32428		v.AuxInt = int16ToAuxInt(0)
32429		return true
32430	}
32431	// match: (Xor16 (Const16 [0]) x)
32432	// result: x
32433	for {
32434		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32435			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
32436				continue
32437			}
32438			x := v_1
32439			v.copyOf(x)
32440			return true
32441		}
32442		break
32443	}
32444	// match: (Xor16 (Com16 x) x)
32445	// result: (Const16 [-1])
32446	for {
32447		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32448			if v_0.Op != OpCom16 {
32449				continue
32450			}
32451			x := v_0.Args[0]
32452			if x != v_1 {
32453				continue
32454			}
32455			v.reset(OpConst16)
32456			v.AuxInt = int16ToAuxInt(-1)
32457			return true
32458		}
32459		break
32460	}
32461	// match: (Xor16 (Const16 [-1]) x)
32462	// result: (Com16 x)
32463	for {
32464		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32465			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
32466				continue
32467			}
32468			x := v_1
32469			v.reset(OpCom16)
32470			v.AddArg(x)
32471			return true
32472		}
32473		break
32474	}
32475	// match: (Xor16 x (Xor16 x y))
32476	// result: y
32477	for {
32478		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32479			x := v_0
32480			if v_1.Op != OpXor16 {
32481				continue
32482			}
32483			_ = v_1.Args[1]
32484			v_1_0 := v_1.Args[0]
32485			v_1_1 := v_1.Args[1]
32486			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
32487				if x != v_1_0 {
32488					continue
32489				}
32490				y := v_1_1
32491				v.copyOf(y)
32492				return true
32493			}
32494		}
32495		break
32496	}
32497	// match: (Xor16 (Xor16 i:(Const16 <t>) z) x)
32498	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
32499	// result: (Xor16 i (Xor16 <t> z x))
32500	for {
32501		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32502			if v_0.Op != OpXor16 {
32503				continue
32504			}
32505			_ = v_0.Args[1]
32506			v_0_0 := v_0.Args[0]
32507			v_0_1 := v_0.Args[1]
32508			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
32509				i := v_0_0
32510				if i.Op != OpConst16 {
32511					continue
32512				}
32513				t := i.Type
32514				z := v_0_1
32515				x := v_1
32516				if !(z.Op != OpConst16 && x.Op != OpConst16) {
32517					continue
32518				}
32519				v.reset(OpXor16)
32520				v0 := b.NewValue0(v.Pos, OpXor16, t)
32521				v0.AddArg2(z, x)
32522				v.AddArg2(i, v0)
32523				return true
32524			}
32525		}
32526		break
32527	}
32528	// match: (Xor16 (Const16 <t> [c]) (Xor16 (Const16 <t> [d]) x))
32529	// result: (Xor16 (Const16 <t> [c^d]) x)
32530	for {
32531		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32532			if v_0.Op != OpConst16 {
32533				continue
32534			}
32535			t := v_0.Type
32536			c := auxIntToInt16(v_0.AuxInt)
32537			if v_1.Op != OpXor16 {
32538				continue
32539			}
32540			_ = v_1.Args[1]
32541			v_1_0 := v_1.Args[0]
32542			v_1_1 := v_1.Args[1]
32543			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
32544				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
32545					continue
32546				}
32547				d := auxIntToInt16(v_1_0.AuxInt)
32548				x := v_1_1
32549				v.reset(OpXor16)
32550				v0 := b.NewValue0(v.Pos, OpConst16, t)
32551				v0.AuxInt = int16ToAuxInt(c ^ d)
32552				v.AddArg2(v0, x)
32553				return true
32554			}
32555		}
32556		break
32557	}
32558	// match: (Xor16 (Lsh16x64 x z:(Const64 <t> [c])) (Rsh16Ux64 x (Const64 [d])))
32559	// cond: c < 16 && d == 16-c && canRotate(config, 16)
32560	// result: (RotateLeft16 x z)
32561	for {
32562		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32563			if v_0.Op != OpLsh16x64 {
32564				continue
32565			}
32566			_ = v_0.Args[1]
32567			x := v_0.Args[0]
32568			z := v_0.Args[1]
32569			if z.Op != OpConst64 {
32570				continue
32571			}
32572			c := auxIntToInt64(z.AuxInt)
32573			if v_1.Op != OpRsh16Ux64 {
32574				continue
32575			}
32576			_ = v_1.Args[1]
32577			if x != v_1.Args[0] {
32578				continue
32579			}
32580			v_1_1 := v_1.Args[1]
32581			if v_1_1.Op != OpConst64 {
32582				continue
32583			}
32584			d := auxIntToInt64(v_1_1.AuxInt)
32585			if !(c < 16 && d == 16-c && canRotate(config, 16)) {
32586				continue
32587			}
32588			v.reset(OpRotateLeft16)
32589			v.AddArg2(x, z)
32590			return true
32591		}
32592		break
32593	}
32594	// match: (Xor16 left:(Lsh16x64 x y) right:(Rsh16Ux64 x (Sub64 (Const64 [16]) y)))
32595	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
32596	// result: (RotateLeft16 x y)
32597	for {
32598		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32599			left := v_0
32600			if left.Op != OpLsh16x64 {
32601				continue
32602			}
32603			y := left.Args[1]
32604			x := left.Args[0]
32605			right := v_1
32606			if right.Op != OpRsh16Ux64 {
32607				continue
32608			}
32609			_ = right.Args[1]
32610			if x != right.Args[0] {
32611				continue
32612			}
32613			right_1 := right.Args[1]
32614			if right_1.Op != OpSub64 {
32615				continue
32616			}
32617			_ = right_1.Args[1]
32618			right_1_0 := right_1.Args[0]
32619			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
32620				continue
32621			}
32622			v.reset(OpRotateLeft16)
32623			v.AddArg2(x, y)
32624			return true
32625		}
32626		break
32627	}
32628	// match: (Xor16 left:(Lsh16x32 x y) right:(Rsh16Ux32 x (Sub32 (Const32 [16]) y)))
32629	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
32630	// result: (RotateLeft16 x y)
32631	for {
32632		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32633			left := v_0
32634			if left.Op != OpLsh16x32 {
32635				continue
32636			}
32637			y := left.Args[1]
32638			x := left.Args[0]
32639			right := v_1
32640			if right.Op != OpRsh16Ux32 {
32641				continue
32642			}
32643			_ = right.Args[1]
32644			if x != right.Args[0] {
32645				continue
32646			}
32647			right_1 := right.Args[1]
32648			if right_1.Op != OpSub32 {
32649				continue
32650			}
32651			_ = right_1.Args[1]
32652			right_1_0 := right_1.Args[0]
32653			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
32654				continue
32655			}
32656			v.reset(OpRotateLeft16)
32657			v.AddArg2(x, y)
32658			return true
32659		}
32660		break
32661	}
32662	// match: (Xor16 left:(Lsh16x16 x y) right:(Rsh16Ux16 x (Sub16 (Const16 [16]) y)))
32663	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
32664	// result: (RotateLeft16 x y)
32665	for {
32666		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32667			left := v_0
32668			if left.Op != OpLsh16x16 {
32669				continue
32670			}
32671			y := left.Args[1]
32672			x := left.Args[0]
32673			right := v_1
32674			if right.Op != OpRsh16Ux16 {
32675				continue
32676			}
32677			_ = right.Args[1]
32678			if x != right.Args[0] {
32679				continue
32680			}
32681			right_1 := right.Args[1]
32682			if right_1.Op != OpSub16 {
32683				continue
32684			}
32685			_ = right_1.Args[1]
32686			right_1_0 := right_1.Args[0]
32687			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
32688				continue
32689			}
32690			v.reset(OpRotateLeft16)
32691			v.AddArg2(x, y)
32692			return true
32693		}
32694		break
32695	}
32696	// match: (Xor16 left:(Lsh16x8 x y) right:(Rsh16Ux8 x (Sub8 (Const8 [16]) y)))
32697	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
32698	// result: (RotateLeft16 x y)
32699	for {
32700		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32701			left := v_0
32702			if left.Op != OpLsh16x8 {
32703				continue
32704			}
32705			y := left.Args[1]
32706			x := left.Args[0]
32707			right := v_1
32708			if right.Op != OpRsh16Ux8 {
32709				continue
32710			}
32711			_ = right.Args[1]
32712			if x != right.Args[0] {
32713				continue
32714			}
32715			right_1 := right.Args[1]
32716			if right_1.Op != OpSub8 {
32717				continue
32718			}
32719			_ = right_1.Args[1]
32720			right_1_0 := right_1.Args[0]
32721			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
32722				continue
32723			}
32724			v.reset(OpRotateLeft16)
32725			v.AddArg2(x, y)
32726			return true
32727		}
32728		break
32729	}
32730	// match: (Xor16 right:(Rsh16Ux64 x y) left:(Lsh16x64 x z:(Sub64 (Const64 [16]) y)))
32731	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
32732	// result: (RotateLeft16 x z)
32733	for {
32734		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32735			right := v_0
32736			if right.Op != OpRsh16Ux64 {
32737				continue
32738			}
32739			y := right.Args[1]
32740			x := right.Args[0]
32741			left := v_1
32742			if left.Op != OpLsh16x64 {
32743				continue
32744			}
32745			_ = left.Args[1]
32746			if x != left.Args[0] {
32747				continue
32748			}
32749			z := left.Args[1]
32750			if z.Op != OpSub64 {
32751				continue
32752			}
32753			_ = z.Args[1]
32754			z_0 := z.Args[0]
32755			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
32756				continue
32757			}
32758			v.reset(OpRotateLeft16)
32759			v.AddArg2(x, z)
32760			return true
32761		}
32762		break
32763	}
32764	// match: (Xor16 right:(Rsh16Ux32 x y) left:(Lsh16x32 x z:(Sub32 (Const32 [16]) y)))
32765	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
32766	// result: (RotateLeft16 x z)
32767	for {
32768		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32769			right := v_0
32770			if right.Op != OpRsh16Ux32 {
32771				continue
32772			}
32773			y := right.Args[1]
32774			x := right.Args[0]
32775			left := v_1
32776			if left.Op != OpLsh16x32 {
32777				continue
32778			}
32779			_ = left.Args[1]
32780			if x != left.Args[0] {
32781				continue
32782			}
32783			z := left.Args[1]
32784			if z.Op != OpSub32 {
32785				continue
32786			}
32787			_ = z.Args[1]
32788			z_0 := z.Args[0]
32789			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
32790				continue
32791			}
32792			v.reset(OpRotateLeft16)
32793			v.AddArg2(x, z)
32794			return true
32795		}
32796		break
32797	}
32798	// match: (Xor16 right:(Rsh16Ux16 x y) left:(Lsh16x16 x z:(Sub16 (Const16 [16]) y)))
32799	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
32800	// result: (RotateLeft16 x z)
32801	for {
32802		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32803			right := v_0
32804			if right.Op != OpRsh16Ux16 {
32805				continue
32806			}
32807			y := right.Args[1]
32808			x := right.Args[0]
32809			left := v_1
32810			if left.Op != OpLsh16x16 {
32811				continue
32812			}
32813			_ = left.Args[1]
32814			if x != left.Args[0] {
32815				continue
32816			}
32817			z := left.Args[1]
32818			if z.Op != OpSub16 {
32819				continue
32820			}
32821			_ = z.Args[1]
32822			z_0 := z.Args[0]
32823			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
32824				continue
32825			}
32826			v.reset(OpRotateLeft16)
32827			v.AddArg2(x, z)
32828			return true
32829		}
32830		break
32831	}
32832	// match: (Xor16 right:(Rsh16Ux8 x y) left:(Lsh16x8 x z:(Sub8 (Const8 [16]) y)))
32833	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
32834	// result: (RotateLeft16 x z)
32835	for {
32836		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32837			right := v_0
32838			if right.Op != OpRsh16Ux8 {
32839				continue
32840			}
32841			y := right.Args[1]
32842			x := right.Args[0]
32843			left := v_1
32844			if left.Op != OpLsh16x8 {
32845				continue
32846			}
32847			_ = left.Args[1]
32848			if x != left.Args[0] {
32849				continue
32850			}
32851			z := left.Args[1]
32852			if z.Op != OpSub8 {
32853				continue
32854			}
32855			_ = z.Args[1]
32856			z_0 := z.Args[0]
32857			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
32858				continue
32859			}
32860			v.reset(OpRotateLeft16)
32861			v.AddArg2(x, z)
32862			return true
32863		}
32864		break
32865	}
32866	return false
32867}
32868func rewriteValuegeneric_OpXor32(v *Value) bool {
32869	v_1 := v.Args[1]
32870	v_0 := v.Args[0]
32871	b := v.Block
32872	config := b.Func.Config
32873	// match: (Xor32 (Const32 [c]) (Const32 [d]))
32874	// result: (Const32 [c^d])
32875	for {
32876		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32877			if v_0.Op != OpConst32 {
32878				continue
32879			}
32880			c := auxIntToInt32(v_0.AuxInt)
32881			if v_1.Op != OpConst32 {
32882				continue
32883			}
32884			d := auxIntToInt32(v_1.AuxInt)
32885			v.reset(OpConst32)
32886			v.AuxInt = int32ToAuxInt(c ^ d)
32887			return true
32888		}
32889		break
32890	}
32891	// match: (Xor32 x x)
32892	// result: (Const32 [0])
32893	for {
32894		x := v_0
32895		if x != v_1 {
32896			break
32897		}
32898		v.reset(OpConst32)
32899		v.AuxInt = int32ToAuxInt(0)
32900		return true
32901	}
32902	// match: (Xor32 (Const32 [0]) x)
32903	// result: x
32904	for {
32905		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32906			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
32907				continue
32908			}
32909			x := v_1
32910			v.copyOf(x)
32911			return true
32912		}
32913		break
32914	}
32915	// match: (Xor32 (Com32 x) x)
32916	// result: (Const32 [-1])
32917	for {
32918		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32919			if v_0.Op != OpCom32 {
32920				continue
32921			}
32922			x := v_0.Args[0]
32923			if x != v_1 {
32924				continue
32925			}
32926			v.reset(OpConst32)
32927			v.AuxInt = int32ToAuxInt(-1)
32928			return true
32929		}
32930		break
32931	}
32932	// match: (Xor32 (Const32 [-1]) x)
32933	// result: (Com32 x)
32934	for {
32935		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32936			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
32937				continue
32938			}
32939			x := v_1
32940			v.reset(OpCom32)
32941			v.AddArg(x)
32942			return true
32943		}
32944		break
32945	}
32946	// match: (Xor32 x (Xor32 x y))
32947	// result: y
32948	for {
32949		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32950			x := v_0
32951			if v_1.Op != OpXor32 {
32952				continue
32953			}
32954			_ = v_1.Args[1]
32955			v_1_0 := v_1.Args[0]
32956			v_1_1 := v_1.Args[1]
32957			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
32958				if x != v_1_0 {
32959					continue
32960				}
32961				y := v_1_1
32962				v.copyOf(y)
32963				return true
32964			}
32965		}
32966		break
32967	}
32968	// match: (Xor32 (Xor32 i:(Const32 <t>) z) x)
32969	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
32970	// result: (Xor32 i (Xor32 <t> z x))
32971	for {
32972		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32973			if v_0.Op != OpXor32 {
32974				continue
32975			}
32976			_ = v_0.Args[1]
32977			v_0_0 := v_0.Args[0]
32978			v_0_1 := v_0.Args[1]
32979			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
32980				i := v_0_0
32981				if i.Op != OpConst32 {
32982					continue
32983				}
32984				t := i.Type
32985				z := v_0_1
32986				x := v_1
32987				if !(z.Op != OpConst32 && x.Op != OpConst32) {
32988					continue
32989				}
32990				v.reset(OpXor32)
32991				v0 := b.NewValue0(v.Pos, OpXor32, t)
32992				v0.AddArg2(z, x)
32993				v.AddArg2(i, v0)
32994				return true
32995			}
32996		}
32997		break
32998	}
32999	// match: (Xor32 (Const32 <t> [c]) (Xor32 (Const32 <t> [d]) x))
33000	// result: (Xor32 (Const32 <t> [c^d]) x)
33001	for {
33002		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33003			if v_0.Op != OpConst32 {
33004				continue
33005			}
33006			t := v_0.Type
33007			c := auxIntToInt32(v_0.AuxInt)
33008			if v_1.Op != OpXor32 {
33009				continue
33010			}
33011			_ = v_1.Args[1]
33012			v_1_0 := v_1.Args[0]
33013			v_1_1 := v_1.Args[1]
33014			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
33015				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
33016					continue
33017				}
33018				d := auxIntToInt32(v_1_0.AuxInt)
33019				x := v_1_1
33020				v.reset(OpXor32)
33021				v0 := b.NewValue0(v.Pos, OpConst32, t)
33022				v0.AuxInt = int32ToAuxInt(c ^ d)
33023				v.AddArg2(v0, x)
33024				return true
33025			}
33026		}
33027		break
33028	}
33029	// match: (Xor32 (Lsh32x64 x z:(Const64 <t> [c])) (Rsh32Ux64 x (Const64 [d])))
33030	// cond: c < 32 && d == 32-c && canRotate(config, 32)
33031	// result: (RotateLeft32 x z)
33032	for {
33033		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33034			if v_0.Op != OpLsh32x64 {
33035				continue
33036			}
33037			_ = v_0.Args[1]
33038			x := v_0.Args[0]
33039			z := v_0.Args[1]
33040			if z.Op != OpConst64 {
33041				continue
33042			}
33043			c := auxIntToInt64(z.AuxInt)
33044			if v_1.Op != OpRsh32Ux64 {
33045				continue
33046			}
33047			_ = v_1.Args[1]
33048			if x != v_1.Args[0] {
33049				continue
33050			}
33051			v_1_1 := v_1.Args[1]
33052			if v_1_1.Op != OpConst64 {
33053				continue
33054			}
33055			d := auxIntToInt64(v_1_1.AuxInt)
33056			if !(c < 32 && d == 32-c && canRotate(config, 32)) {
33057				continue
33058			}
33059			v.reset(OpRotateLeft32)
33060			v.AddArg2(x, z)
33061			return true
33062		}
33063		break
33064	}
33065	// match: (Xor32 left:(Lsh32x64 x y) right:(Rsh32Ux64 x (Sub64 (Const64 [32]) y)))
33066	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
33067	// result: (RotateLeft32 x y)
33068	for {
33069		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33070			left := v_0
33071			if left.Op != OpLsh32x64 {
33072				continue
33073			}
33074			y := left.Args[1]
33075			x := left.Args[0]
33076			right := v_1
33077			if right.Op != OpRsh32Ux64 {
33078				continue
33079			}
33080			_ = right.Args[1]
33081			if x != right.Args[0] {
33082				continue
33083			}
33084			right_1 := right.Args[1]
33085			if right_1.Op != OpSub64 {
33086				continue
33087			}
33088			_ = right_1.Args[1]
33089			right_1_0 := right_1.Args[0]
33090			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
33091				continue
33092			}
33093			v.reset(OpRotateLeft32)
33094			v.AddArg2(x, y)
33095			return true
33096		}
33097		break
33098	}
33099	// match: (Xor32 left:(Lsh32x32 x y) right:(Rsh32Ux32 x (Sub32 (Const32 [32]) y)))
33100	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
33101	// result: (RotateLeft32 x y)
33102	for {
33103		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33104			left := v_0
33105			if left.Op != OpLsh32x32 {
33106				continue
33107			}
33108			y := left.Args[1]
33109			x := left.Args[0]
33110			right := v_1
33111			if right.Op != OpRsh32Ux32 {
33112				continue
33113			}
33114			_ = right.Args[1]
33115			if x != right.Args[0] {
33116				continue
33117			}
33118			right_1 := right.Args[1]
33119			if right_1.Op != OpSub32 {
33120				continue
33121			}
33122			_ = right_1.Args[1]
33123			right_1_0 := right_1.Args[0]
33124			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
33125				continue
33126			}
33127			v.reset(OpRotateLeft32)
33128			v.AddArg2(x, y)
33129			return true
33130		}
33131		break
33132	}
33133	// match: (Xor32 left:(Lsh32x16 x y) right:(Rsh32Ux16 x (Sub16 (Const16 [32]) y)))
33134	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
33135	// result: (RotateLeft32 x y)
33136	for {
33137		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33138			left := v_0
33139			if left.Op != OpLsh32x16 {
33140				continue
33141			}
33142			y := left.Args[1]
33143			x := left.Args[0]
33144			right := v_1
33145			if right.Op != OpRsh32Ux16 {
33146				continue
33147			}
33148			_ = right.Args[1]
33149			if x != right.Args[0] {
33150				continue
33151			}
33152			right_1 := right.Args[1]
33153			if right_1.Op != OpSub16 {
33154				continue
33155			}
33156			_ = right_1.Args[1]
33157			right_1_0 := right_1.Args[0]
33158			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
33159				continue
33160			}
33161			v.reset(OpRotateLeft32)
33162			v.AddArg2(x, y)
33163			return true
33164		}
33165		break
33166	}
33167	// match: (Xor32 left:(Lsh32x8 x y) right:(Rsh32Ux8 x (Sub8 (Const8 [32]) y)))
33168	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
33169	// result: (RotateLeft32 x y)
33170	for {
33171		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33172			left := v_0
33173			if left.Op != OpLsh32x8 {
33174				continue
33175			}
33176			y := left.Args[1]
33177			x := left.Args[0]
33178			right := v_1
33179			if right.Op != OpRsh32Ux8 {
33180				continue
33181			}
33182			_ = right.Args[1]
33183			if x != right.Args[0] {
33184				continue
33185			}
33186			right_1 := right.Args[1]
33187			if right_1.Op != OpSub8 {
33188				continue
33189			}
33190			_ = right_1.Args[1]
33191			right_1_0 := right_1.Args[0]
33192			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
33193				continue
33194			}
33195			v.reset(OpRotateLeft32)
33196			v.AddArg2(x, y)
33197			return true
33198		}
33199		break
33200	}
33201	// match: (Xor32 right:(Rsh32Ux64 x y) left:(Lsh32x64 x z:(Sub64 (Const64 [32]) y)))
33202	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
33203	// result: (RotateLeft32 x z)
33204	for {
33205		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33206			right := v_0
33207			if right.Op != OpRsh32Ux64 {
33208				continue
33209			}
33210			y := right.Args[1]
33211			x := right.Args[0]
33212			left := v_1
33213			if left.Op != OpLsh32x64 {
33214				continue
33215			}
33216			_ = left.Args[1]
33217			if x != left.Args[0] {
33218				continue
33219			}
33220			z := left.Args[1]
33221			if z.Op != OpSub64 {
33222				continue
33223			}
33224			_ = z.Args[1]
33225			z_0 := z.Args[0]
33226			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
33227				continue
33228			}
33229			v.reset(OpRotateLeft32)
33230			v.AddArg2(x, z)
33231			return true
33232		}
33233		break
33234	}
33235	// match: (Xor32 right:(Rsh32Ux32 x y) left:(Lsh32x32 x z:(Sub32 (Const32 [32]) y)))
33236	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
33237	// result: (RotateLeft32 x z)
33238	for {
33239		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33240			right := v_0
33241			if right.Op != OpRsh32Ux32 {
33242				continue
33243			}
33244			y := right.Args[1]
33245			x := right.Args[0]
33246			left := v_1
33247			if left.Op != OpLsh32x32 {
33248				continue
33249			}
33250			_ = left.Args[1]
33251			if x != left.Args[0] {
33252				continue
33253			}
33254			z := left.Args[1]
33255			if z.Op != OpSub32 {
33256				continue
33257			}
33258			_ = z.Args[1]
33259			z_0 := z.Args[0]
33260			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
33261				continue
33262			}
33263			v.reset(OpRotateLeft32)
33264			v.AddArg2(x, z)
33265			return true
33266		}
33267		break
33268	}
33269	// match: (Xor32 right:(Rsh32Ux16 x y) left:(Lsh32x16 x z:(Sub16 (Const16 [32]) y)))
33270	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
33271	// result: (RotateLeft32 x z)
33272	for {
33273		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33274			right := v_0
33275			if right.Op != OpRsh32Ux16 {
33276				continue
33277			}
33278			y := right.Args[1]
33279			x := right.Args[0]
33280			left := v_1
33281			if left.Op != OpLsh32x16 {
33282				continue
33283			}
33284			_ = left.Args[1]
33285			if x != left.Args[0] {
33286				continue
33287			}
33288			z := left.Args[1]
33289			if z.Op != OpSub16 {
33290				continue
33291			}
33292			_ = z.Args[1]
33293			z_0 := z.Args[0]
33294			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
33295				continue
33296			}
33297			v.reset(OpRotateLeft32)
33298			v.AddArg2(x, z)
33299			return true
33300		}
33301		break
33302	}
33303	// match: (Xor32 right:(Rsh32Ux8 x y) left:(Lsh32x8 x z:(Sub8 (Const8 [32]) y)))
33304	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
33305	// result: (RotateLeft32 x z)
33306	for {
33307		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33308			right := v_0
33309			if right.Op != OpRsh32Ux8 {
33310				continue
33311			}
33312			y := right.Args[1]
33313			x := right.Args[0]
33314			left := v_1
33315			if left.Op != OpLsh32x8 {
33316				continue
33317			}
33318			_ = left.Args[1]
33319			if x != left.Args[0] {
33320				continue
33321			}
33322			z := left.Args[1]
33323			if z.Op != OpSub8 {
33324				continue
33325			}
33326			_ = z.Args[1]
33327			z_0 := z.Args[0]
33328			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
33329				continue
33330			}
33331			v.reset(OpRotateLeft32)
33332			v.AddArg2(x, z)
33333			return true
33334		}
33335		break
33336	}
33337	return false
33338}
33339func rewriteValuegeneric_OpXor64(v *Value) bool {
33340	v_1 := v.Args[1]
33341	v_0 := v.Args[0]
33342	b := v.Block
33343	config := b.Func.Config
33344	// match: (Xor64 (Const64 [c]) (Const64 [d]))
33345	// result: (Const64 [c^d])
33346	for {
33347		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33348			if v_0.Op != OpConst64 {
33349				continue
33350			}
33351			c := auxIntToInt64(v_0.AuxInt)
33352			if v_1.Op != OpConst64 {
33353				continue
33354			}
33355			d := auxIntToInt64(v_1.AuxInt)
33356			v.reset(OpConst64)
33357			v.AuxInt = int64ToAuxInt(c ^ d)
33358			return true
33359		}
33360		break
33361	}
33362	// match: (Xor64 x x)
33363	// result: (Const64 [0])
33364	for {
33365		x := v_0
33366		if x != v_1 {
33367			break
33368		}
33369		v.reset(OpConst64)
33370		v.AuxInt = int64ToAuxInt(0)
33371		return true
33372	}
33373	// match: (Xor64 (Const64 [0]) x)
33374	// result: x
33375	for {
33376		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33377			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
33378				continue
33379			}
33380			x := v_1
33381			v.copyOf(x)
33382			return true
33383		}
33384		break
33385	}
33386	// match: (Xor64 (Com64 x) x)
33387	// result: (Const64 [-1])
33388	for {
33389		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33390			if v_0.Op != OpCom64 {
33391				continue
33392			}
33393			x := v_0.Args[0]
33394			if x != v_1 {
33395				continue
33396			}
33397			v.reset(OpConst64)
33398			v.AuxInt = int64ToAuxInt(-1)
33399			return true
33400		}
33401		break
33402	}
33403	// match: (Xor64 (Const64 [-1]) x)
33404	// result: (Com64 x)
33405	for {
33406		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33407			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
33408				continue
33409			}
33410			x := v_1
33411			v.reset(OpCom64)
33412			v.AddArg(x)
33413			return true
33414		}
33415		break
33416	}
33417	// match: (Xor64 x (Xor64 x y))
33418	// result: y
33419	for {
33420		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33421			x := v_0
33422			if v_1.Op != OpXor64 {
33423				continue
33424			}
33425			_ = v_1.Args[1]
33426			v_1_0 := v_1.Args[0]
33427			v_1_1 := v_1.Args[1]
33428			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
33429				if x != v_1_0 {
33430					continue
33431				}
33432				y := v_1_1
33433				v.copyOf(y)
33434				return true
33435			}
33436		}
33437		break
33438	}
33439	// match: (Xor64 (Xor64 i:(Const64 <t>) z) x)
33440	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
33441	// result: (Xor64 i (Xor64 <t> z x))
33442	for {
33443		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33444			if v_0.Op != OpXor64 {
33445				continue
33446			}
33447			_ = v_0.Args[1]
33448			v_0_0 := v_0.Args[0]
33449			v_0_1 := v_0.Args[1]
33450			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
33451				i := v_0_0
33452				if i.Op != OpConst64 {
33453					continue
33454				}
33455				t := i.Type
33456				z := v_0_1
33457				x := v_1
33458				if !(z.Op != OpConst64 && x.Op != OpConst64) {
33459					continue
33460				}
33461				v.reset(OpXor64)
33462				v0 := b.NewValue0(v.Pos, OpXor64, t)
33463				v0.AddArg2(z, x)
33464				v.AddArg2(i, v0)
33465				return true
33466			}
33467		}
33468		break
33469	}
33470	// match: (Xor64 (Const64 <t> [c]) (Xor64 (Const64 <t> [d]) x))
33471	// result: (Xor64 (Const64 <t> [c^d]) x)
33472	for {
33473		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33474			if v_0.Op != OpConst64 {
33475				continue
33476			}
33477			t := v_0.Type
33478			c := auxIntToInt64(v_0.AuxInt)
33479			if v_1.Op != OpXor64 {
33480				continue
33481			}
33482			_ = v_1.Args[1]
33483			v_1_0 := v_1.Args[0]
33484			v_1_1 := v_1.Args[1]
33485			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
33486				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
33487					continue
33488				}
33489				d := auxIntToInt64(v_1_0.AuxInt)
33490				x := v_1_1
33491				v.reset(OpXor64)
33492				v0 := b.NewValue0(v.Pos, OpConst64, t)
33493				v0.AuxInt = int64ToAuxInt(c ^ d)
33494				v.AddArg2(v0, x)
33495				return true
33496			}
33497		}
33498		break
33499	}
33500	// match: (Xor64 (Lsh64x64 x z:(Const64 <t> [c])) (Rsh64Ux64 x (Const64 [d])))
33501	// cond: c < 64 && d == 64-c && canRotate(config, 64)
33502	// result: (RotateLeft64 x z)
33503	for {
33504		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33505			if v_0.Op != OpLsh64x64 {
33506				continue
33507			}
33508			_ = v_0.Args[1]
33509			x := v_0.Args[0]
33510			z := v_0.Args[1]
33511			if z.Op != OpConst64 {
33512				continue
33513			}
33514			c := auxIntToInt64(z.AuxInt)
33515			if v_1.Op != OpRsh64Ux64 {
33516				continue
33517			}
33518			_ = v_1.Args[1]
33519			if x != v_1.Args[0] {
33520				continue
33521			}
33522			v_1_1 := v_1.Args[1]
33523			if v_1_1.Op != OpConst64 {
33524				continue
33525			}
33526			d := auxIntToInt64(v_1_1.AuxInt)
33527			if !(c < 64 && d == 64-c && canRotate(config, 64)) {
33528				continue
33529			}
33530			v.reset(OpRotateLeft64)
33531			v.AddArg2(x, z)
33532			return true
33533		}
33534		break
33535	}
33536	// match: (Xor64 left:(Lsh64x64 x y) right:(Rsh64Ux64 x (Sub64 (Const64 [64]) y)))
33537	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
33538	// result: (RotateLeft64 x y)
33539	for {
33540		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33541			left := v_0
33542			if left.Op != OpLsh64x64 {
33543				continue
33544			}
33545			y := left.Args[1]
33546			x := left.Args[0]
33547			right := v_1
33548			if right.Op != OpRsh64Ux64 {
33549				continue
33550			}
33551			_ = right.Args[1]
33552			if x != right.Args[0] {
33553				continue
33554			}
33555			right_1 := right.Args[1]
33556			if right_1.Op != OpSub64 {
33557				continue
33558			}
33559			_ = right_1.Args[1]
33560			right_1_0 := right_1.Args[0]
33561			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
33562				continue
33563			}
33564			v.reset(OpRotateLeft64)
33565			v.AddArg2(x, y)
33566			return true
33567		}
33568		break
33569	}
33570	// match: (Xor64 left:(Lsh64x32 x y) right:(Rsh64Ux32 x (Sub32 (Const32 [64]) y)))
33571	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
33572	// result: (RotateLeft64 x y)
33573	for {
33574		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33575			left := v_0
33576			if left.Op != OpLsh64x32 {
33577				continue
33578			}
33579			y := left.Args[1]
33580			x := left.Args[0]
33581			right := v_1
33582			if right.Op != OpRsh64Ux32 {
33583				continue
33584			}
33585			_ = right.Args[1]
33586			if x != right.Args[0] {
33587				continue
33588			}
33589			right_1 := right.Args[1]
33590			if right_1.Op != OpSub32 {
33591				continue
33592			}
33593			_ = right_1.Args[1]
33594			right_1_0 := right_1.Args[0]
33595			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
33596				continue
33597			}
33598			v.reset(OpRotateLeft64)
33599			v.AddArg2(x, y)
33600			return true
33601		}
33602		break
33603	}
33604	// match: (Xor64 left:(Lsh64x16 x y) right:(Rsh64Ux16 x (Sub16 (Const16 [64]) y)))
33605	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
33606	// result: (RotateLeft64 x y)
33607	for {
33608		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33609			left := v_0
33610			if left.Op != OpLsh64x16 {
33611				continue
33612			}
33613			y := left.Args[1]
33614			x := left.Args[0]
33615			right := v_1
33616			if right.Op != OpRsh64Ux16 {
33617				continue
33618			}
33619			_ = right.Args[1]
33620			if x != right.Args[0] {
33621				continue
33622			}
33623			right_1 := right.Args[1]
33624			if right_1.Op != OpSub16 {
33625				continue
33626			}
33627			_ = right_1.Args[1]
33628			right_1_0 := right_1.Args[0]
33629			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
33630				continue
33631			}
33632			v.reset(OpRotateLeft64)
33633			v.AddArg2(x, y)
33634			return true
33635		}
33636		break
33637	}
33638	// match: (Xor64 left:(Lsh64x8 x y) right:(Rsh64Ux8 x (Sub8 (Const8 [64]) y)))
33639	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
33640	// result: (RotateLeft64 x y)
33641	for {
33642		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33643			left := v_0
33644			if left.Op != OpLsh64x8 {
33645				continue
33646			}
33647			y := left.Args[1]
33648			x := left.Args[0]
33649			right := v_1
33650			if right.Op != OpRsh64Ux8 {
33651				continue
33652			}
33653			_ = right.Args[1]
33654			if x != right.Args[0] {
33655				continue
33656			}
33657			right_1 := right.Args[1]
33658			if right_1.Op != OpSub8 {
33659				continue
33660			}
33661			_ = right_1.Args[1]
33662			right_1_0 := right_1.Args[0]
33663			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
33664				continue
33665			}
33666			v.reset(OpRotateLeft64)
33667			v.AddArg2(x, y)
33668			return true
33669		}
33670		break
33671	}
33672	// match: (Xor64 right:(Rsh64Ux64 x y) left:(Lsh64x64 x z:(Sub64 (Const64 [64]) y)))
33673	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
33674	// result: (RotateLeft64 x z)
33675	for {
33676		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33677			right := v_0
33678			if right.Op != OpRsh64Ux64 {
33679				continue
33680			}
33681			y := right.Args[1]
33682			x := right.Args[0]
33683			left := v_1
33684			if left.Op != OpLsh64x64 {
33685				continue
33686			}
33687			_ = left.Args[1]
33688			if x != left.Args[0] {
33689				continue
33690			}
33691			z := left.Args[1]
33692			if z.Op != OpSub64 {
33693				continue
33694			}
33695			_ = z.Args[1]
33696			z_0 := z.Args[0]
33697			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
33698				continue
33699			}
33700			v.reset(OpRotateLeft64)
33701			v.AddArg2(x, z)
33702			return true
33703		}
33704		break
33705	}
33706	// match: (Xor64 right:(Rsh64Ux32 x y) left:(Lsh64x32 x z:(Sub32 (Const32 [64]) y)))
33707	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
33708	// result: (RotateLeft64 x z)
33709	for {
33710		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33711			right := v_0
33712			if right.Op != OpRsh64Ux32 {
33713				continue
33714			}
33715			y := right.Args[1]
33716			x := right.Args[0]
33717			left := v_1
33718			if left.Op != OpLsh64x32 {
33719				continue
33720			}
33721			_ = left.Args[1]
33722			if x != left.Args[0] {
33723				continue
33724			}
33725			z := left.Args[1]
33726			if z.Op != OpSub32 {
33727				continue
33728			}
33729			_ = z.Args[1]
33730			z_0 := z.Args[0]
33731			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
33732				continue
33733			}
33734			v.reset(OpRotateLeft64)
33735			v.AddArg2(x, z)
33736			return true
33737		}
33738		break
33739	}
33740	// match: (Xor64 right:(Rsh64Ux16 x y) left:(Lsh64x16 x z:(Sub16 (Const16 [64]) y)))
33741	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
33742	// result: (RotateLeft64 x z)
33743	for {
33744		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33745			right := v_0
33746			if right.Op != OpRsh64Ux16 {
33747				continue
33748			}
33749			y := right.Args[1]
33750			x := right.Args[0]
33751			left := v_1
33752			if left.Op != OpLsh64x16 {
33753				continue
33754			}
33755			_ = left.Args[1]
33756			if x != left.Args[0] {
33757				continue
33758			}
33759			z := left.Args[1]
33760			if z.Op != OpSub16 {
33761				continue
33762			}
33763			_ = z.Args[1]
33764			z_0 := z.Args[0]
33765			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
33766				continue
33767			}
33768			v.reset(OpRotateLeft64)
33769			v.AddArg2(x, z)
33770			return true
33771		}
33772		break
33773	}
33774	// match: (Xor64 right:(Rsh64Ux8 x y) left:(Lsh64x8 x z:(Sub8 (Const8 [64]) y)))
33775	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
33776	// result: (RotateLeft64 x z)
33777	for {
33778		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33779			right := v_0
33780			if right.Op != OpRsh64Ux8 {
33781				continue
33782			}
33783			y := right.Args[1]
33784			x := right.Args[0]
33785			left := v_1
33786			if left.Op != OpLsh64x8 {
33787				continue
33788			}
33789			_ = left.Args[1]
33790			if x != left.Args[0] {
33791				continue
33792			}
33793			z := left.Args[1]
33794			if z.Op != OpSub8 {
33795				continue
33796			}
33797			_ = z.Args[1]
33798			z_0 := z.Args[0]
33799			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
33800				continue
33801			}
33802			v.reset(OpRotateLeft64)
33803			v.AddArg2(x, z)
33804			return true
33805		}
33806		break
33807	}
33808	return false
33809}
33810func rewriteValuegeneric_OpXor8(v *Value) bool {
33811	v_1 := v.Args[1]
33812	v_0 := v.Args[0]
33813	b := v.Block
33814	config := b.Func.Config
33815	// match: (Xor8 (Const8 [c]) (Const8 [d]))
33816	// result: (Const8 [c^d])
33817	for {
33818		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33819			if v_0.Op != OpConst8 {
33820				continue
33821			}
33822			c := auxIntToInt8(v_0.AuxInt)
33823			if v_1.Op != OpConst8 {
33824				continue
33825			}
33826			d := auxIntToInt8(v_1.AuxInt)
33827			v.reset(OpConst8)
33828			v.AuxInt = int8ToAuxInt(c ^ d)
33829			return true
33830		}
33831		break
33832	}
33833	// match: (Xor8 x x)
33834	// result: (Const8 [0])
33835	for {
33836		x := v_0
33837		if x != v_1 {
33838			break
33839		}
33840		v.reset(OpConst8)
33841		v.AuxInt = int8ToAuxInt(0)
33842		return true
33843	}
33844	// match: (Xor8 (Const8 [0]) x)
33845	// result: x
33846	for {
33847		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33848			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
33849				continue
33850			}
33851			x := v_1
33852			v.copyOf(x)
33853			return true
33854		}
33855		break
33856	}
33857	// match: (Xor8 (Com8 x) x)
33858	// result: (Const8 [-1])
33859	for {
33860		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33861			if v_0.Op != OpCom8 {
33862				continue
33863			}
33864			x := v_0.Args[0]
33865			if x != v_1 {
33866				continue
33867			}
33868			v.reset(OpConst8)
33869			v.AuxInt = int8ToAuxInt(-1)
33870			return true
33871		}
33872		break
33873	}
33874	// match: (Xor8 (Const8 [-1]) x)
33875	// result: (Com8 x)
33876	for {
33877		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33878			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
33879				continue
33880			}
33881			x := v_1
33882			v.reset(OpCom8)
33883			v.AddArg(x)
33884			return true
33885		}
33886		break
33887	}
33888	// match: (Xor8 x (Xor8 x y))
33889	// result: y
33890	for {
33891		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33892			x := v_0
33893			if v_1.Op != OpXor8 {
33894				continue
33895			}
33896			_ = v_1.Args[1]
33897			v_1_0 := v_1.Args[0]
33898			v_1_1 := v_1.Args[1]
33899			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
33900				if x != v_1_0 {
33901					continue
33902				}
33903				y := v_1_1
33904				v.copyOf(y)
33905				return true
33906			}
33907		}
33908		break
33909	}
33910	// match: (Xor8 (Xor8 i:(Const8 <t>) z) x)
33911	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
33912	// result: (Xor8 i (Xor8 <t> z x))
33913	for {
33914		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33915			if v_0.Op != OpXor8 {
33916				continue
33917			}
33918			_ = v_0.Args[1]
33919			v_0_0 := v_0.Args[0]
33920			v_0_1 := v_0.Args[1]
33921			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
33922				i := v_0_0
33923				if i.Op != OpConst8 {
33924					continue
33925				}
33926				t := i.Type
33927				z := v_0_1
33928				x := v_1
33929				if !(z.Op != OpConst8 && x.Op != OpConst8) {
33930					continue
33931				}
33932				v.reset(OpXor8)
33933				v0 := b.NewValue0(v.Pos, OpXor8, t)
33934				v0.AddArg2(z, x)
33935				v.AddArg2(i, v0)
33936				return true
33937			}
33938		}
33939		break
33940	}
33941	// match: (Xor8 (Const8 <t> [c]) (Xor8 (Const8 <t> [d]) x))
33942	// result: (Xor8 (Const8 <t> [c^d]) x)
33943	for {
33944		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33945			if v_0.Op != OpConst8 {
33946				continue
33947			}
33948			t := v_0.Type
33949			c := auxIntToInt8(v_0.AuxInt)
33950			if v_1.Op != OpXor8 {
33951				continue
33952			}
33953			_ = v_1.Args[1]
33954			v_1_0 := v_1.Args[0]
33955			v_1_1 := v_1.Args[1]
33956			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
33957				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
33958					continue
33959				}
33960				d := auxIntToInt8(v_1_0.AuxInt)
33961				x := v_1_1
33962				v.reset(OpXor8)
33963				v0 := b.NewValue0(v.Pos, OpConst8, t)
33964				v0.AuxInt = int8ToAuxInt(c ^ d)
33965				v.AddArg2(v0, x)
33966				return true
33967			}
33968		}
33969		break
33970	}
33971	// match: (Xor8 (Lsh8x64 x z:(Const64 <t> [c])) (Rsh8Ux64 x (Const64 [d])))
33972	// cond: c < 8 && d == 8-c && canRotate(config, 8)
33973	// result: (RotateLeft8 x z)
33974	for {
33975		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33976			if v_0.Op != OpLsh8x64 {
33977				continue
33978			}
33979			_ = v_0.Args[1]
33980			x := v_0.Args[0]
33981			z := v_0.Args[1]
33982			if z.Op != OpConst64 {
33983				continue
33984			}
33985			c := auxIntToInt64(z.AuxInt)
33986			if v_1.Op != OpRsh8Ux64 {
33987				continue
33988			}
33989			_ = v_1.Args[1]
33990			if x != v_1.Args[0] {
33991				continue
33992			}
33993			v_1_1 := v_1.Args[1]
33994			if v_1_1.Op != OpConst64 {
33995				continue
33996			}
33997			d := auxIntToInt64(v_1_1.AuxInt)
33998			if !(c < 8 && d == 8-c && canRotate(config, 8)) {
33999				continue
34000			}
34001			v.reset(OpRotateLeft8)
34002			v.AddArg2(x, z)
34003			return true
34004		}
34005		break
34006	}
34007	// match: (Xor8 left:(Lsh8x64 x y) right:(Rsh8Ux64 x (Sub64 (Const64 [8]) y)))
34008	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
34009	// result: (RotateLeft8 x y)
34010	for {
34011		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
34012			left := v_0
34013			if left.Op != OpLsh8x64 {
34014				continue
34015			}
34016			y := left.Args[1]
34017			x := left.Args[0]
34018			right := v_1
34019			if right.Op != OpRsh8Ux64 {
34020				continue
34021			}
34022			_ = right.Args[1]
34023			if x != right.Args[0] {
34024				continue
34025			}
34026			right_1 := right.Args[1]
34027			if right_1.Op != OpSub64 {
34028				continue
34029			}
34030			_ = right_1.Args[1]
34031			right_1_0 := right_1.Args[0]
34032			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
34033				continue
34034			}
34035			v.reset(OpRotateLeft8)
34036			v.AddArg2(x, y)
34037			return true
34038		}
34039		break
34040	}
34041	// match: (Xor8 left:(Lsh8x32 x y) right:(Rsh8Ux32 x (Sub32 (Const32 [8]) y)))
34042	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
34043	// result: (RotateLeft8 x y)
34044	for {
34045		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
34046			left := v_0
34047			if left.Op != OpLsh8x32 {
34048				continue
34049			}
34050			y := left.Args[1]
34051			x := left.Args[0]
34052			right := v_1
34053			if right.Op != OpRsh8Ux32 {
34054				continue
34055			}
34056			_ = right.Args[1]
34057			if x != right.Args[0] {
34058				continue
34059			}
34060			right_1 := right.Args[1]
34061			if right_1.Op != OpSub32 {
34062				continue
34063			}
34064			_ = right_1.Args[1]
34065			right_1_0 := right_1.Args[0]
34066			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
34067				continue
34068			}
34069			v.reset(OpRotateLeft8)
34070			v.AddArg2(x, y)
34071			return true
34072		}
34073		break
34074	}
34075	// match: (Xor8 left:(Lsh8x16 x y) right:(Rsh8Ux16 x (Sub16 (Const16 [8]) y)))
34076	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
34077	// result: (RotateLeft8 x y)
34078	for {
34079		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
34080			left := v_0
34081			if left.Op != OpLsh8x16 {
34082				continue
34083			}
34084			y := left.Args[1]
34085			x := left.Args[0]
34086			right := v_1
34087			if right.Op != OpRsh8Ux16 {
34088				continue
34089			}
34090			_ = right.Args[1]
34091			if x != right.Args[0] {
34092				continue
34093			}
34094			right_1 := right.Args[1]
34095			if right_1.Op != OpSub16 {
34096				continue
34097			}
34098			_ = right_1.Args[1]
34099			right_1_0 := right_1.Args[0]
34100			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
34101				continue
34102			}
34103			v.reset(OpRotateLeft8)
34104			v.AddArg2(x, y)
34105			return true
34106		}
34107		break
34108	}
34109	// match: (Xor8 left:(Lsh8x8 x y) right:(Rsh8Ux8 x (Sub8 (Const8 [8]) y)))
34110	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
34111	// result: (RotateLeft8 x y)
34112	for {
34113		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
34114			left := v_0
34115			if left.Op != OpLsh8x8 {
34116				continue
34117			}
34118			y := left.Args[1]
34119			x := left.Args[0]
34120			right := v_1
34121			if right.Op != OpRsh8Ux8 {
34122				continue
34123			}
34124			_ = right.Args[1]
34125			if x != right.Args[0] {
34126				continue
34127			}
34128			right_1 := right.Args[1]
34129			if right_1.Op != OpSub8 {
34130				continue
34131			}
34132			_ = right_1.Args[1]
34133			right_1_0 := right_1.Args[0]
34134			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
34135				continue
34136			}
34137			v.reset(OpRotateLeft8)
34138			v.AddArg2(x, y)
34139			return true
34140		}
34141		break
34142	}
34143	// match: (Xor8 right:(Rsh8Ux64 x y) left:(Lsh8x64 x z:(Sub64 (Const64 [8]) y)))
34144	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
34145	// result: (RotateLeft8 x z)
34146	for {
34147		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
34148			right := v_0
34149			if right.Op != OpRsh8Ux64 {
34150				continue
34151			}
34152			y := right.Args[1]
34153			x := right.Args[0]
34154			left := v_1
34155			if left.Op != OpLsh8x64 {
34156				continue
34157			}
34158			_ = left.Args[1]
34159			if x != left.Args[0] {
34160				continue
34161			}
34162			z := left.Args[1]
34163			if z.Op != OpSub64 {
34164				continue
34165			}
34166			_ = z.Args[1]
34167			z_0 := z.Args[0]
34168			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
34169				continue
34170			}
34171			v.reset(OpRotateLeft8)
34172			v.AddArg2(x, z)
34173			return true
34174		}
34175		break
34176	}
34177	// match: (Xor8 right:(Rsh8Ux32 x y) left:(Lsh8x32 x z:(Sub32 (Const32 [8]) y)))
34178	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
34179	// result: (RotateLeft8 x z)
34180	for {
34181		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
34182			right := v_0
34183			if right.Op != OpRsh8Ux32 {
34184				continue
34185			}
34186			y := right.Args[1]
34187			x := right.Args[0]
34188			left := v_1
34189			if left.Op != OpLsh8x32 {
34190				continue
34191			}
34192			_ = left.Args[1]
34193			if x != left.Args[0] {
34194				continue
34195			}
34196			z := left.Args[1]
34197			if z.Op != OpSub32 {
34198				continue
34199			}
34200			_ = z.Args[1]
34201			z_0 := z.Args[0]
34202			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
34203				continue
34204			}
34205			v.reset(OpRotateLeft8)
34206			v.AddArg2(x, z)
34207			return true
34208		}
34209		break
34210	}
34211	// match: (Xor8 right:(Rsh8Ux16 x y) left:(Lsh8x16 x z:(Sub16 (Const16 [8]) y)))
34212	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
34213	// result: (RotateLeft8 x z)
34214	for {
34215		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
34216			right := v_0
34217			if right.Op != OpRsh8Ux16 {
34218				continue
34219			}
34220			y := right.Args[1]
34221			x := right.Args[0]
34222			left := v_1
34223			if left.Op != OpLsh8x16 {
34224				continue
34225			}
34226			_ = left.Args[1]
34227			if x != left.Args[0] {
34228				continue
34229			}
34230			z := left.Args[1]
34231			if z.Op != OpSub16 {
34232				continue
34233			}
34234			_ = z.Args[1]
34235			z_0 := z.Args[0]
34236			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
34237				continue
34238			}
34239			v.reset(OpRotateLeft8)
34240			v.AddArg2(x, z)
34241			return true
34242		}
34243		break
34244	}
34245	// match: (Xor8 right:(Rsh8Ux8 x y) left:(Lsh8x8 x z:(Sub8 (Const8 [8]) y)))
34246	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
34247	// result: (RotateLeft8 x z)
34248	for {
34249		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
34250			right := v_0
34251			if right.Op != OpRsh8Ux8 {
34252				continue
34253			}
34254			y := right.Args[1]
34255			x := right.Args[0]
34256			left := v_1
34257			if left.Op != OpLsh8x8 {
34258				continue
34259			}
34260			_ = left.Args[1]
34261			if x != left.Args[0] {
34262				continue
34263			}
34264			z := left.Args[1]
34265			if z.Op != OpSub8 {
34266				continue
34267			}
34268			_ = z.Args[1]
34269			z_0 := z.Args[0]
34270			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
34271				continue
34272			}
34273			v.reset(OpRotateLeft8)
34274			v.AddArg2(x, z)
34275			return true
34276		}
34277		break
34278	}
34279	return false
34280}
34281func rewriteValuegeneric_OpZero(v *Value) bool {
34282	v_1 := v.Args[1]
34283	v_0 := v.Args[0]
34284	b := v.Block
34285	// match: (Zero (SelectN [0] call:(StaticLECall _ _)) mem:(SelectN [1] call))
34286	// cond: isSameCall(call.Aux, "runtime.newobject")
34287	// result: mem
34288	for {
34289		if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
34290			break
34291		}
34292		call := v_0.Args[0]
34293		if call.Op != OpStaticLECall || len(call.Args) != 2 {
34294			break
34295		}
34296		mem := v_1
34297		if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isSameCall(call.Aux, "runtime.newobject")) {
34298			break
34299		}
34300		v.copyOf(mem)
34301		return true
34302	}
34303	// match: (Zero {t1} [n] p1 store:(Store {t2} (OffPtr [o2] p2) _ mem))
34304	// cond: isSamePtr(p1, p2) && store.Uses == 1 && n >= o2 + t2.Size() && clobber(store)
34305	// result: (Zero {t1} [n] p1 mem)
34306	for {
34307		n := auxIntToInt64(v.AuxInt)
34308		t1 := auxToType(v.Aux)
34309		p1 := v_0
34310		store := v_1
34311		if store.Op != OpStore {
34312			break
34313		}
34314		t2 := auxToType(store.Aux)
34315		mem := store.Args[2]
34316		store_0 := store.Args[0]
34317		if store_0.Op != OpOffPtr {
34318			break
34319		}
34320		o2 := auxIntToInt64(store_0.AuxInt)
34321		p2 := store_0.Args[0]
34322		if !(isSamePtr(p1, p2) && store.Uses == 1 && n >= o2+t2.Size() && clobber(store)) {
34323			break
34324		}
34325		v.reset(OpZero)
34326		v.AuxInt = int64ToAuxInt(n)
34327		v.Aux = typeToAux(t1)
34328		v.AddArg2(p1, mem)
34329		return true
34330	}
34331	// match: (Zero {t} [n] dst1 move:(Move {t} [n] dst2 _ mem))
34332	// cond: move.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move)
34333	// result: (Zero {t} [n] dst1 mem)
34334	for {
34335		n := auxIntToInt64(v.AuxInt)
34336		t := auxToType(v.Aux)
34337		dst1 := v_0
34338		move := v_1
34339		if move.Op != OpMove || auxIntToInt64(move.AuxInt) != n || auxToType(move.Aux) != t {
34340			break
34341		}
34342		mem := move.Args[2]
34343		dst2 := move.Args[0]
34344		if !(move.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move)) {
34345			break
34346		}
34347		v.reset(OpZero)
34348		v.AuxInt = int64ToAuxInt(n)
34349		v.Aux = typeToAux(t)
34350		v.AddArg2(dst1, mem)
34351		return true
34352	}
34353	// match: (Zero {t} [n] dst1 vardef:(VarDef {x} move:(Move {t} [n] dst2 _ mem)))
34354	// cond: move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move, vardef)
34355	// result: (Zero {t} [n] dst1 (VarDef {x} mem))
34356	for {
34357		n := auxIntToInt64(v.AuxInt)
34358		t := auxToType(v.Aux)
34359		dst1 := v_0
34360		vardef := v_1
34361		if vardef.Op != OpVarDef {
34362			break
34363		}
34364		x := auxToSym(vardef.Aux)
34365		move := vardef.Args[0]
34366		if move.Op != OpMove || auxIntToInt64(move.AuxInt) != n || auxToType(move.Aux) != t {
34367			break
34368		}
34369		mem := move.Args[2]
34370		dst2 := move.Args[0]
34371		if !(move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move, vardef)) {
34372			break
34373		}
34374		v.reset(OpZero)
34375		v.AuxInt = int64ToAuxInt(n)
34376		v.Aux = typeToAux(t)
34377		v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
34378		v0.Aux = symToAux(x)
34379		v0.AddArg(mem)
34380		v.AddArg2(dst1, v0)
34381		return true
34382	}
34383	// match: (Zero {t} [s] dst1 zero:(Zero {t} [s] dst2 _))
34384	// cond: isSamePtr(dst1, dst2)
34385	// result: zero
34386	for {
34387		s := auxIntToInt64(v.AuxInt)
34388		t := auxToType(v.Aux)
34389		dst1 := v_0
34390		zero := v_1
34391		if zero.Op != OpZero || auxIntToInt64(zero.AuxInt) != s || auxToType(zero.Aux) != t {
34392			break
34393		}
34394		dst2 := zero.Args[0]
34395		if !(isSamePtr(dst1, dst2)) {
34396			break
34397		}
34398		v.copyOf(zero)
34399		return true
34400	}
34401	// match: (Zero {t} [s] dst1 vardef:(VarDef (Zero {t} [s] dst2 _)))
34402	// cond: isSamePtr(dst1, dst2)
34403	// result: vardef
34404	for {
34405		s := auxIntToInt64(v.AuxInt)
34406		t := auxToType(v.Aux)
34407		dst1 := v_0
34408		vardef := v_1
34409		if vardef.Op != OpVarDef {
34410			break
34411		}
34412		vardef_0 := vardef.Args[0]
34413		if vardef_0.Op != OpZero || auxIntToInt64(vardef_0.AuxInt) != s || auxToType(vardef_0.Aux) != t {
34414			break
34415		}
34416		dst2 := vardef_0.Args[0]
34417		if !(isSamePtr(dst1, dst2)) {
34418			break
34419		}
34420		v.copyOf(vardef)
34421		return true
34422	}
34423	return false
34424}
34425func rewriteValuegeneric_OpZeroExt16to32(v *Value) bool {
34426	v_0 := v.Args[0]
34427	// match: (ZeroExt16to32 (Const16 [c]))
34428	// result: (Const32 [int32(uint16(c))])
34429	for {
34430		if v_0.Op != OpConst16 {
34431			break
34432		}
34433		c := auxIntToInt16(v_0.AuxInt)
34434		v.reset(OpConst32)
34435		v.AuxInt = int32ToAuxInt(int32(uint16(c)))
34436		return true
34437	}
34438	// match: (ZeroExt16to32 (Trunc32to16 x:(Rsh32Ux64 _ (Const64 [s]))))
34439	// cond: s >= 16
34440	// result: x
34441	for {
34442		if v_0.Op != OpTrunc32to16 {
34443			break
34444		}
34445		x := v_0.Args[0]
34446		if x.Op != OpRsh32Ux64 {
34447			break
34448		}
34449		_ = x.Args[1]
34450		x_1 := x.Args[1]
34451		if x_1.Op != OpConst64 {
34452			break
34453		}
34454		s := auxIntToInt64(x_1.AuxInt)
34455		if !(s >= 16) {
34456			break
34457		}
34458		v.copyOf(x)
34459		return true
34460	}
34461	return false
34462}
34463func rewriteValuegeneric_OpZeroExt16to64(v *Value) bool {
34464	v_0 := v.Args[0]
34465	// match: (ZeroExt16to64 (Const16 [c]))
34466	// result: (Const64 [int64(uint16(c))])
34467	for {
34468		if v_0.Op != OpConst16 {
34469			break
34470		}
34471		c := auxIntToInt16(v_0.AuxInt)
34472		v.reset(OpConst64)
34473		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
34474		return true
34475	}
34476	// match: (ZeroExt16to64 (Trunc64to16 x:(Rsh64Ux64 _ (Const64 [s]))))
34477	// cond: s >= 48
34478	// result: x
34479	for {
34480		if v_0.Op != OpTrunc64to16 {
34481			break
34482		}
34483		x := v_0.Args[0]
34484		if x.Op != OpRsh64Ux64 {
34485			break
34486		}
34487		_ = x.Args[1]
34488		x_1 := x.Args[1]
34489		if x_1.Op != OpConst64 {
34490			break
34491		}
34492		s := auxIntToInt64(x_1.AuxInt)
34493		if !(s >= 48) {
34494			break
34495		}
34496		v.copyOf(x)
34497		return true
34498	}
34499	return false
34500}
34501func rewriteValuegeneric_OpZeroExt32to64(v *Value) bool {
34502	v_0 := v.Args[0]
34503	// match: (ZeroExt32to64 (Const32 [c]))
34504	// result: (Const64 [int64(uint32(c))])
34505	for {
34506		if v_0.Op != OpConst32 {
34507			break
34508		}
34509		c := auxIntToInt32(v_0.AuxInt)
34510		v.reset(OpConst64)
34511		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
34512		return true
34513	}
34514	// match: (ZeroExt32to64 (Trunc64to32 x:(Rsh64Ux64 _ (Const64 [s]))))
34515	// cond: s >= 32
34516	// result: x
34517	for {
34518		if v_0.Op != OpTrunc64to32 {
34519			break
34520		}
34521		x := v_0.Args[0]
34522		if x.Op != OpRsh64Ux64 {
34523			break
34524		}
34525		_ = x.Args[1]
34526		x_1 := x.Args[1]
34527		if x_1.Op != OpConst64 {
34528			break
34529		}
34530		s := auxIntToInt64(x_1.AuxInt)
34531		if !(s >= 32) {
34532			break
34533		}
34534		v.copyOf(x)
34535		return true
34536	}
34537	return false
34538}
34539func rewriteValuegeneric_OpZeroExt8to16(v *Value) bool {
34540	v_0 := v.Args[0]
34541	// match: (ZeroExt8to16 (Const8 [c]))
34542	// result: (Const16 [int16( uint8(c))])
34543	for {
34544		if v_0.Op != OpConst8 {
34545			break
34546		}
34547		c := auxIntToInt8(v_0.AuxInt)
34548		v.reset(OpConst16)
34549		v.AuxInt = int16ToAuxInt(int16(uint8(c)))
34550		return true
34551	}
34552	// match: (ZeroExt8to16 (Trunc16to8 x:(Rsh16Ux64 _ (Const64 [s]))))
34553	// cond: s >= 8
34554	// result: x
34555	for {
34556		if v_0.Op != OpTrunc16to8 {
34557			break
34558		}
34559		x := v_0.Args[0]
34560		if x.Op != OpRsh16Ux64 {
34561			break
34562		}
34563		_ = x.Args[1]
34564		x_1 := x.Args[1]
34565		if x_1.Op != OpConst64 {
34566			break
34567		}
34568		s := auxIntToInt64(x_1.AuxInt)
34569		if !(s >= 8) {
34570			break
34571		}
34572		v.copyOf(x)
34573		return true
34574	}
34575	return false
34576}
34577func rewriteValuegeneric_OpZeroExt8to32(v *Value) bool {
34578	v_0 := v.Args[0]
34579	// match: (ZeroExt8to32 (Const8 [c]))
34580	// result: (Const32 [int32( uint8(c))])
34581	for {
34582		if v_0.Op != OpConst8 {
34583			break
34584		}
34585		c := auxIntToInt8(v_0.AuxInt)
34586		v.reset(OpConst32)
34587		v.AuxInt = int32ToAuxInt(int32(uint8(c)))
34588		return true
34589	}
34590	// match: (ZeroExt8to32 (Trunc32to8 x:(Rsh32Ux64 _ (Const64 [s]))))
34591	// cond: s >= 24
34592	// result: x
34593	for {
34594		if v_0.Op != OpTrunc32to8 {
34595			break
34596		}
34597		x := v_0.Args[0]
34598		if x.Op != OpRsh32Ux64 {
34599			break
34600		}
34601		_ = x.Args[1]
34602		x_1 := x.Args[1]
34603		if x_1.Op != OpConst64 {
34604			break
34605		}
34606		s := auxIntToInt64(x_1.AuxInt)
34607		if !(s >= 24) {
34608			break
34609		}
34610		v.copyOf(x)
34611		return true
34612	}
34613	return false
34614}
34615func rewriteValuegeneric_OpZeroExt8to64(v *Value) bool {
34616	v_0 := v.Args[0]
34617	// match: (ZeroExt8to64 (Const8 [c]))
34618	// result: (Const64 [int64( uint8(c))])
34619	for {
34620		if v_0.Op != OpConst8 {
34621			break
34622		}
34623		c := auxIntToInt8(v_0.AuxInt)
34624		v.reset(OpConst64)
34625		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
34626		return true
34627	}
34628	// match: (ZeroExt8to64 (Trunc64to8 x:(Rsh64Ux64 _ (Const64 [s]))))
34629	// cond: s >= 56
34630	// result: x
34631	for {
34632		if v_0.Op != OpTrunc64to8 {
34633			break
34634		}
34635		x := v_0.Args[0]
34636		if x.Op != OpRsh64Ux64 {
34637			break
34638		}
34639		_ = x.Args[1]
34640		x_1 := x.Args[1]
34641		if x_1.Op != OpConst64 {
34642			break
34643		}
34644		s := auxIntToInt64(x_1.AuxInt)
34645		if !(s >= 56) {
34646			break
34647		}
34648		v.copyOf(x)
34649		return true
34650	}
34651	return false
34652}
34653func rewriteBlockgeneric(b *Block) bool {
34654	switch b.Kind {
34655	case BlockIf:
34656		// match: (If (Not cond) yes no)
34657		// result: (If cond no yes)
34658		for b.Controls[0].Op == OpNot {
34659			v_0 := b.Controls[0]
34660			cond := v_0.Args[0]
34661			b.resetWithControl(BlockIf, cond)
34662			b.swapSuccessors()
34663			return true
34664		}
34665		// match: (If (ConstBool [c]) yes no)
34666		// cond: c
34667		// result: (First yes no)
34668		for b.Controls[0].Op == OpConstBool {
34669			v_0 := b.Controls[0]
34670			c := auxIntToBool(v_0.AuxInt)
34671			if !(c) {
34672				break
34673			}
34674			b.Reset(BlockFirst)
34675			return true
34676		}
34677		// match: (If (ConstBool [c]) yes no)
34678		// cond: !c
34679		// result: (First no yes)
34680		for b.Controls[0].Op == OpConstBool {
34681			v_0 := b.Controls[0]
34682			c := auxIntToBool(v_0.AuxInt)
34683			if !(!c) {
34684				break
34685			}
34686			b.Reset(BlockFirst)
34687			b.swapSuccessors()
34688			return true
34689		}
34690	}
34691	return false
34692}
34693