1// Code generated from _gen/ARM64.rules using 'go generate'; DO NOT EDIT.
2
3package ssa
4
5import "cmd/compile/internal/types"
6
7func rewriteValueARM64(v *Value) bool {
8	switch v.Op {
9	case OpARM64ADCSflags:
10		return rewriteValueARM64_OpARM64ADCSflags(v)
11	case OpARM64ADD:
12		return rewriteValueARM64_OpARM64ADD(v)
13	case OpARM64ADDSflags:
14		return rewriteValueARM64_OpARM64ADDSflags(v)
15	case OpARM64ADDconst:
16		return rewriteValueARM64_OpARM64ADDconst(v)
17	case OpARM64ADDshiftLL:
18		return rewriteValueARM64_OpARM64ADDshiftLL(v)
19	case OpARM64ADDshiftRA:
20		return rewriteValueARM64_OpARM64ADDshiftRA(v)
21	case OpARM64ADDshiftRL:
22		return rewriteValueARM64_OpARM64ADDshiftRL(v)
23	case OpARM64AND:
24		return rewriteValueARM64_OpARM64AND(v)
25	case OpARM64ANDconst:
26		return rewriteValueARM64_OpARM64ANDconst(v)
27	case OpARM64ANDshiftLL:
28		return rewriteValueARM64_OpARM64ANDshiftLL(v)
29	case OpARM64ANDshiftRA:
30		return rewriteValueARM64_OpARM64ANDshiftRA(v)
31	case OpARM64ANDshiftRL:
32		return rewriteValueARM64_OpARM64ANDshiftRL(v)
33	case OpARM64ANDshiftRO:
34		return rewriteValueARM64_OpARM64ANDshiftRO(v)
35	case OpARM64BIC:
36		return rewriteValueARM64_OpARM64BIC(v)
37	case OpARM64BICshiftLL:
38		return rewriteValueARM64_OpARM64BICshiftLL(v)
39	case OpARM64BICshiftRA:
40		return rewriteValueARM64_OpARM64BICshiftRA(v)
41	case OpARM64BICshiftRL:
42		return rewriteValueARM64_OpARM64BICshiftRL(v)
43	case OpARM64BICshiftRO:
44		return rewriteValueARM64_OpARM64BICshiftRO(v)
45	case OpARM64CMN:
46		return rewriteValueARM64_OpARM64CMN(v)
47	case OpARM64CMNW:
48		return rewriteValueARM64_OpARM64CMNW(v)
49	case OpARM64CMNWconst:
50		return rewriteValueARM64_OpARM64CMNWconst(v)
51	case OpARM64CMNconst:
52		return rewriteValueARM64_OpARM64CMNconst(v)
53	case OpARM64CMNshiftLL:
54		return rewriteValueARM64_OpARM64CMNshiftLL(v)
55	case OpARM64CMNshiftRA:
56		return rewriteValueARM64_OpARM64CMNshiftRA(v)
57	case OpARM64CMNshiftRL:
58		return rewriteValueARM64_OpARM64CMNshiftRL(v)
59	case OpARM64CMP:
60		return rewriteValueARM64_OpARM64CMP(v)
61	case OpARM64CMPW:
62		return rewriteValueARM64_OpARM64CMPW(v)
63	case OpARM64CMPWconst:
64		return rewriteValueARM64_OpARM64CMPWconst(v)
65	case OpARM64CMPconst:
66		return rewriteValueARM64_OpARM64CMPconst(v)
67	case OpARM64CMPshiftLL:
68		return rewriteValueARM64_OpARM64CMPshiftLL(v)
69	case OpARM64CMPshiftRA:
70		return rewriteValueARM64_OpARM64CMPshiftRA(v)
71	case OpARM64CMPshiftRL:
72		return rewriteValueARM64_OpARM64CMPshiftRL(v)
73	case OpARM64CSEL:
74		return rewriteValueARM64_OpARM64CSEL(v)
75	case OpARM64CSEL0:
76		return rewriteValueARM64_OpARM64CSEL0(v)
77	case OpARM64CSETM:
78		return rewriteValueARM64_OpARM64CSETM(v)
79	case OpARM64CSINC:
80		return rewriteValueARM64_OpARM64CSINC(v)
81	case OpARM64CSINV:
82		return rewriteValueARM64_OpARM64CSINV(v)
83	case OpARM64CSNEG:
84		return rewriteValueARM64_OpARM64CSNEG(v)
85	case OpARM64DIV:
86		return rewriteValueARM64_OpARM64DIV(v)
87	case OpARM64DIVW:
88		return rewriteValueARM64_OpARM64DIVW(v)
89	case OpARM64EON:
90		return rewriteValueARM64_OpARM64EON(v)
91	case OpARM64EONshiftLL:
92		return rewriteValueARM64_OpARM64EONshiftLL(v)
93	case OpARM64EONshiftRA:
94		return rewriteValueARM64_OpARM64EONshiftRA(v)
95	case OpARM64EONshiftRL:
96		return rewriteValueARM64_OpARM64EONshiftRL(v)
97	case OpARM64EONshiftRO:
98		return rewriteValueARM64_OpARM64EONshiftRO(v)
99	case OpARM64Equal:
100		return rewriteValueARM64_OpARM64Equal(v)
101	case OpARM64FADDD:
102		return rewriteValueARM64_OpARM64FADDD(v)
103	case OpARM64FADDS:
104		return rewriteValueARM64_OpARM64FADDS(v)
105	case OpARM64FCMPD:
106		return rewriteValueARM64_OpARM64FCMPD(v)
107	case OpARM64FCMPS:
108		return rewriteValueARM64_OpARM64FCMPS(v)
109	case OpARM64FMOVDfpgp:
110		return rewriteValueARM64_OpARM64FMOVDfpgp(v)
111	case OpARM64FMOVDgpfp:
112		return rewriteValueARM64_OpARM64FMOVDgpfp(v)
113	case OpARM64FMOVDload:
114		return rewriteValueARM64_OpARM64FMOVDload(v)
115	case OpARM64FMOVDloadidx:
116		return rewriteValueARM64_OpARM64FMOVDloadidx(v)
117	case OpARM64FMOVDloadidx8:
118		return rewriteValueARM64_OpARM64FMOVDloadidx8(v)
119	case OpARM64FMOVDstore:
120		return rewriteValueARM64_OpARM64FMOVDstore(v)
121	case OpARM64FMOVDstoreidx:
122		return rewriteValueARM64_OpARM64FMOVDstoreidx(v)
123	case OpARM64FMOVDstoreidx8:
124		return rewriteValueARM64_OpARM64FMOVDstoreidx8(v)
125	case OpARM64FMOVSload:
126		return rewriteValueARM64_OpARM64FMOVSload(v)
127	case OpARM64FMOVSloadidx:
128		return rewriteValueARM64_OpARM64FMOVSloadidx(v)
129	case OpARM64FMOVSloadidx4:
130		return rewriteValueARM64_OpARM64FMOVSloadidx4(v)
131	case OpARM64FMOVSstore:
132		return rewriteValueARM64_OpARM64FMOVSstore(v)
133	case OpARM64FMOVSstoreidx:
134		return rewriteValueARM64_OpARM64FMOVSstoreidx(v)
135	case OpARM64FMOVSstoreidx4:
136		return rewriteValueARM64_OpARM64FMOVSstoreidx4(v)
137	case OpARM64FMULD:
138		return rewriteValueARM64_OpARM64FMULD(v)
139	case OpARM64FMULS:
140		return rewriteValueARM64_OpARM64FMULS(v)
141	case OpARM64FNEGD:
142		return rewriteValueARM64_OpARM64FNEGD(v)
143	case OpARM64FNEGS:
144		return rewriteValueARM64_OpARM64FNEGS(v)
145	case OpARM64FNMULD:
146		return rewriteValueARM64_OpARM64FNMULD(v)
147	case OpARM64FNMULS:
148		return rewriteValueARM64_OpARM64FNMULS(v)
149	case OpARM64FSUBD:
150		return rewriteValueARM64_OpARM64FSUBD(v)
151	case OpARM64FSUBS:
152		return rewriteValueARM64_OpARM64FSUBS(v)
153	case OpARM64GreaterEqual:
154		return rewriteValueARM64_OpARM64GreaterEqual(v)
155	case OpARM64GreaterEqualF:
156		return rewriteValueARM64_OpARM64GreaterEqualF(v)
157	case OpARM64GreaterEqualNoov:
158		return rewriteValueARM64_OpARM64GreaterEqualNoov(v)
159	case OpARM64GreaterEqualU:
160		return rewriteValueARM64_OpARM64GreaterEqualU(v)
161	case OpARM64GreaterThan:
162		return rewriteValueARM64_OpARM64GreaterThan(v)
163	case OpARM64GreaterThanF:
164		return rewriteValueARM64_OpARM64GreaterThanF(v)
165	case OpARM64GreaterThanU:
166		return rewriteValueARM64_OpARM64GreaterThanU(v)
167	case OpARM64LDP:
168		return rewriteValueARM64_OpARM64LDP(v)
169	case OpARM64LessEqual:
170		return rewriteValueARM64_OpARM64LessEqual(v)
171	case OpARM64LessEqualF:
172		return rewriteValueARM64_OpARM64LessEqualF(v)
173	case OpARM64LessEqualU:
174		return rewriteValueARM64_OpARM64LessEqualU(v)
175	case OpARM64LessThan:
176		return rewriteValueARM64_OpARM64LessThan(v)
177	case OpARM64LessThanF:
178		return rewriteValueARM64_OpARM64LessThanF(v)
179	case OpARM64LessThanNoov:
180		return rewriteValueARM64_OpARM64LessThanNoov(v)
181	case OpARM64LessThanU:
182		return rewriteValueARM64_OpARM64LessThanU(v)
183	case OpARM64MADD:
184		return rewriteValueARM64_OpARM64MADD(v)
185	case OpARM64MADDW:
186		return rewriteValueARM64_OpARM64MADDW(v)
187	case OpARM64MNEG:
188		return rewriteValueARM64_OpARM64MNEG(v)
189	case OpARM64MNEGW:
190		return rewriteValueARM64_OpARM64MNEGW(v)
191	case OpARM64MOD:
192		return rewriteValueARM64_OpARM64MOD(v)
193	case OpARM64MODW:
194		return rewriteValueARM64_OpARM64MODW(v)
195	case OpARM64MOVBUload:
196		return rewriteValueARM64_OpARM64MOVBUload(v)
197	case OpARM64MOVBUloadidx:
198		return rewriteValueARM64_OpARM64MOVBUloadidx(v)
199	case OpARM64MOVBUreg:
200		return rewriteValueARM64_OpARM64MOVBUreg(v)
201	case OpARM64MOVBload:
202		return rewriteValueARM64_OpARM64MOVBload(v)
203	case OpARM64MOVBloadidx:
204		return rewriteValueARM64_OpARM64MOVBloadidx(v)
205	case OpARM64MOVBreg:
206		return rewriteValueARM64_OpARM64MOVBreg(v)
207	case OpARM64MOVBstore:
208		return rewriteValueARM64_OpARM64MOVBstore(v)
209	case OpARM64MOVBstoreidx:
210		return rewriteValueARM64_OpARM64MOVBstoreidx(v)
211	case OpARM64MOVBstorezero:
212		return rewriteValueARM64_OpARM64MOVBstorezero(v)
213	case OpARM64MOVBstorezeroidx:
214		return rewriteValueARM64_OpARM64MOVBstorezeroidx(v)
215	case OpARM64MOVDload:
216		return rewriteValueARM64_OpARM64MOVDload(v)
217	case OpARM64MOVDloadidx:
218		return rewriteValueARM64_OpARM64MOVDloadidx(v)
219	case OpARM64MOVDloadidx8:
220		return rewriteValueARM64_OpARM64MOVDloadidx8(v)
221	case OpARM64MOVDnop:
222		return rewriteValueARM64_OpARM64MOVDnop(v)
223	case OpARM64MOVDreg:
224		return rewriteValueARM64_OpARM64MOVDreg(v)
225	case OpARM64MOVDstore:
226		return rewriteValueARM64_OpARM64MOVDstore(v)
227	case OpARM64MOVDstoreidx:
228		return rewriteValueARM64_OpARM64MOVDstoreidx(v)
229	case OpARM64MOVDstoreidx8:
230		return rewriteValueARM64_OpARM64MOVDstoreidx8(v)
231	case OpARM64MOVDstorezero:
232		return rewriteValueARM64_OpARM64MOVDstorezero(v)
233	case OpARM64MOVDstorezeroidx:
234		return rewriteValueARM64_OpARM64MOVDstorezeroidx(v)
235	case OpARM64MOVDstorezeroidx8:
236		return rewriteValueARM64_OpARM64MOVDstorezeroidx8(v)
237	case OpARM64MOVHUload:
238		return rewriteValueARM64_OpARM64MOVHUload(v)
239	case OpARM64MOVHUloadidx:
240		return rewriteValueARM64_OpARM64MOVHUloadidx(v)
241	case OpARM64MOVHUloadidx2:
242		return rewriteValueARM64_OpARM64MOVHUloadidx2(v)
243	case OpARM64MOVHUreg:
244		return rewriteValueARM64_OpARM64MOVHUreg(v)
245	case OpARM64MOVHload:
246		return rewriteValueARM64_OpARM64MOVHload(v)
247	case OpARM64MOVHloadidx:
248		return rewriteValueARM64_OpARM64MOVHloadidx(v)
249	case OpARM64MOVHloadidx2:
250		return rewriteValueARM64_OpARM64MOVHloadidx2(v)
251	case OpARM64MOVHreg:
252		return rewriteValueARM64_OpARM64MOVHreg(v)
253	case OpARM64MOVHstore:
254		return rewriteValueARM64_OpARM64MOVHstore(v)
255	case OpARM64MOVHstoreidx:
256		return rewriteValueARM64_OpARM64MOVHstoreidx(v)
257	case OpARM64MOVHstoreidx2:
258		return rewriteValueARM64_OpARM64MOVHstoreidx2(v)
259	case OpARM64MOVHstorezero:
260		return rewriteValueARM64_OpARM64MOVHstorezero(v)
261	case OpARM64MOVHstorezeroidx:
262		return rewriteValueARM64_OpARM64MOVHstorezeroidx(v)
263	case OpARM64MOVHstorezeroidx2:
264		return rewriteValueARM64_OpARM64MOVHstorezeroidx2(v)
265	case OpARM64MOVQstorezero:
266		return rewriteValueARM64_OpARM64MOVQstorezero(v)
267	case OpARM64MOVWUload:
268		return rewriteValueARM64_OpARM64MOVWUload(v)
269	case OpARM64MOVWUloadidx:
270		return rewriteValueARM64_OpARM64MOVWUloadidx(v)
271	case OpARM64MOVWUloadidx4:
272		return rewriteValueARM64_OpARM64MOVWUloadidx4(v)
273	case OpARM64MOVWUreg:
274		return rewriteValueARM64_OpARM64MOVWUreg(v)
275	case OpARM64MOVWload:
276		return rewriteValueARM64_OpARM64MOVWload(v)
277	case OpARM64MOVWloadidx:
278		return rewriteValueARM64_OpARM64MOVWloadidx(v)
279	case OpARM64MOVWloadidx4:
280		return rewriteValueARM64_OpARM64MOVWloadidx4(v)
281	case OpARM64MOVWreg:
282		return rewriteValueARM64_OpARM64MOVWreg(v)
283	case OpARM64MOVWstore:
284		return rewriteValueARM64_OpARM64MOVWstore(v)
285	case OpARM64MOVWstoreidx:
286		return rewriteValueARM64_OpARM64MOVWstoreidx(v)
287	case OpARM64MOVWstoreidx4:
288		return rewriteValueARM64_OpARM64MOVWstoreidx4(v)
289	case OpARM64MOVWstorezero:
290		return rewriteValueARM64_OpARM64MOVWstorezero(v)
291	case OpARM64MOVWstorezeroidx:
292		return rewriteValueARM64_OpARM64MOVWstorezeroidx(v)
293	case OpARM64MOVWstorezeroidx4:
294		return rewriteValueARM64_OpARM64MOVWstorezeroidx4(v)
295	case OpARM64MSUB:
296		return rewriteValueARM64_OpARM64MSUB(v)
297	case OpARM64MSUBW:
298		return rewriteValueARM64_OpARM64MSUBW(v)
299	case OpARM64MUL:
300		return rewriteValueARM64_OpARM64MUL(v)
301	case OpARM64MULW:
302		return rewriteValueARM64_OpARM64MULW(v)
303	case OpARM64MVN:
304		return rewriteValueARM64_OpARM64MVN(v)
305	case OpARM64MVNshiftLL:
306		return rewriteValueARM64_OpARM64MVNshiftLL(v)
307	case OpARM64MVNshiftRA:
308		return rewriteValueARM64_OpARM64MVNshiftRA(v)
309	case OpARM64MVNshiftRL:
310		return rewriteValueARM64_OpARM64MVNshiftRL(v)
311	case OpARM64MVNshiftRO:
312		return rewriteValueARM64_OpARM64MVNshiftRO(v)
313	case OpARM64NEG:
314		return rewriteValueARM64_OpARM64NEG(v)
315	case OpARM64NEGshiftLL:
316		return rewriteValueARM64_OpARM64NEGshiftLL(v)
317	case OpARM64NEGshiftRA:
318		return rewriteValueARM64_OpARM64NEGshiftRA(v)
319	case OpARM64NEGshiftRL:
320		return rewriteValueARM64_OpARM64NEGshiftRL(v)
321	case OpARM64NotEqual:
322		return rewriteValueARM64_OpARM64NotEqual(v)
323	case OpARM64OR:
324		return rewriteValueARM64_OpARM64OR(v)
325	case OpARM64ORN:
326		return rewriteValueARM64_OpARM64ORN(v)
327	case OpARM64ORNshiftLL:
328		return rewriteValueARM64_OpARM64ORNshiftLL(v)
329	case OpARM64ORNshiftRA:
330		return rewriteValueARM64_OpARM64ORNshiftRA(v)
331	case OpARM64ORNshiftRL:
332		return rewriteValueARM64_OpARM64ORNshiftRL(v)
333	case OpARM64ORNshiftRO:
334		return rewriteValueARM64_OpARM64ORNshiftRO(v)
335	case OpARM64ORconst:
336		return rewriteValueARM64_OpARM64ORconst(v)
337	case OpARM64ORshiftLL:
338		return rewriteValueARM64_OpARM64ORshiftLL(v)
339	case OpARM64ORshiftRA:
340		return rewriteValueARM64_OpARM64ORshiftRA(v)
341	case OpARM64ORshiftRL:
342		return rewriteValueARM64_OpARM64ORshiftRL(v)
343	case OpARM64ORshiftRO:
344		return rewriteValueARM64_OpARM64ORshiftRO(v)
345	case OpARM64REV:
346		return rewriteValueARM64_OpARM64REV(v)
347	case OpARM64REVW:
348		return rewriteValueARM64_OpARM64REVW(v)
349	case OpARM64ROR:
350		return rewriteValueARM64_OpARM64ROR(v)
351	case OpARM64RORW:
352		return rewriteValueARM64_OpARM64RORW(v)
353	case OpARM64SBCSflags:
354		return rewriteValueARM64_OpARM64SBCSflags(v)
355	case OpARM64SLL:
356		return rewriteValueARM64_OpARM64SLL(v)
357	case OpARM64SLLconst:
358		return rewriteValueARM64_OpARM64SLLconst(v)
359	case OpARM64SRA:
360		return rewriteValueARM64_OpARM64SRA(v)
361	case OpARM64SRAconst:
362		return rewriteValueARM64_OpARM64SRAconst(v)
363	case OpARM64SRL:
364		return rewriteValueARM64_OpARM64SRL(v)
365	case OpARM64SRLconst:
366		return rewriteValueARM64_OpARM64SRLconst(v)
367	case OpARM64STP:
368		return rewriteValueARM64_OpARM64STP(v)
369	case OpARM64SUB:
370		return rewriteValueARM64_OpARM64SUB(v)
371	case OpARM64SUBconst:
372		return rewriteValueARM64_OpARM64SUBconst(v)
373	case OpARM64SUBshiftLL:
374		return rewriteValueARM64_OpARM64SUBshiftLL(v)
375	case OpARM64SUBshiftRA:
376		return rewriteValueARM64_OpARM64SUBshiftRA(v)
377	case OpARM64SUBshiftRL:
378		return rewriteValueARM64_OpARM64SUBshiftRL(v)
379	case OpARM64TST:
380		return rewriteValueARM64_OpARM64TST(v)
381	case OpARM64TSTW:
382		return rewriteValueARM64_OpARM64TSTW(v)
383	case OpARM64TSTWconst:
384		return rewriteValueARM64_OpARM64TSTWconst(v)
385	case OpARM64TSTconst:
386		return rewriteValueARM64_OpARM64TSTconst(v)
387	case OpARM64TSTshiftLL:
388		return rewriteValueARM64_OpARM64TSTshiftLL(v)
389	case OpARM64TSTshiftRA:
390		return rewriteValueARM64_OpARM64TSTshiftRA(v)
391	case OpARM64TSTshiftRL:
392		return rewriteValueARM64_OpARM64TSTshiftRL(v)
393	case OpARM64TSTshiftRO:
394		return rewriteValueARM64_OpARM64TSTshiftRO(v)
395	case OpARM64UBFIZ:
396		return rewriteValueARM64_OpARM64UBFIZ(v)
397	case OpARM64UBFX:
398		return rewriteValueARM64_OpARM64UBFX(v)
399	case OpARM64UDIV:
400		return rewriteValueARM64_OpARM64UDIV(v)
401	case OpARM64UDIVW:
402		return rewriteValueARM64_OpARM64UDIVW(v)
403	case OpARM64UMOD:
404		return rewriteValueARM64_OpARM64UMOD(v)
405	case OpARM64UMODW:
406		return rewriteValueARM64_OpARM64UMODW(v)
407	case OpARM64XOR:
408		return rewriteValueARM64_OpARM64XOR(v)
409	case OpARM64XORconst:
410		return rewriteValueARM64_OpARM64XORconst(v)
411	case OpARM64XORshiftLL:
412		return rewriteValueARM64_OpARM64XORshiftLL(v)
413	case OpARM64XORshiftRA:
414		return rewriteValueARM64_OpARM64XORshiftRA(v)
415	case OpARM64XORshiftRL:
416		return rewriteValueARM64_OpARM64XORshiftRL(v)
417	case OpARM64XORshiftRO:
418		return rewriteValueARM64_OpARM64XORshiftRO(v)
419	case OpAbs:
420		v.Op = OpARM64FABSD
421		return true
422	case OpAdd16:
423		v.Op = OpARM64ADD
424		return true
425	case OpAdd32:
426		v.Op = OpARM64ADD
427		return true
428	case OpAdd32F:
429		v.Op = OpARM64FADDS
430		return true
431	case OpAdd64:
432		v.Op = OpARM64ADD
433		return true
434	case OpAdd64F:
435		v.Op = OpARM64FADDD
436		return true
437	case OpAdd8:
438		v.Op = OpARM64ADD
439		return true
440	case OpAddPtr:
441		v.Op = OpARM64ADD
442		return true
443	case OpAddr:
444		return rewriteValueARM64_OpAddr(v)
445	case OpAnd16:
446		v.Op = OpARM64AND
447		return true
448	case OpAnd32:
449		v.Op = OpARM64AND
450		return true
451	case OpAnd64:
452		v.Op = OpARM64AND
453		return true
454	case OpAnd8:
455		v.Op = OpARM64AND
456		return true
457	case OpAndB:
458		v.Op = OpARM64AND
459		return true
460	case OpAtomicAdd32:
461		v.Op = OpARM64LoweredAtomicAdd32
462		return true
463	case OpAtomicAdd32Variant:
464		v.Op = OpARM64LoweredAtomicAdd32Variant
465		return true
466	case OpAtomicAdd64:
467		v.Op = OpARM64LoweredAtomicAdd64
468		return true
469	case OpAtomicAdd64Variant:
470		v.Op = OpARM64LoweredAtomicAdd64Variant
471		return true
472	case OpAtomicAnd32:
473		v.Op = OpARM64LoweredAtomicAnd32
474		return true
475	case OpAtomicAnd32Variant:
476		v.Op = OpARM64LoweredAtomicAnd32Variant
477		return true
478	case OpAtomicAnd64:
479		v.Op = OpARM64LoweredAtomicAnd64
480		return true
481	case OpAtomicAnd64Variant:
482		v.Op = OpARM64LoweredAtomicAnd64Variant
483		return true
484	case OpAtomicAnd8:
485		v.Op = OpARM64LoweredAtomicAnd8
486		return true
487	case OpAtomicAnd8Variant:
488		v.Op = OpARM64LoweredAtomicAnd8Variant
489		return true
490	case OpAtomicCompareAndSwap32:
491		v.Op = OpARM64LoweredAtomicCas32
492		return true
493	case OpAtomicCompareAndSwap32Variant:
494		v.Op = OpARM64LoweredAtomicCas32Variant
495		return true
496	case OpAtomicCompareAndSwap64:
497		v.Op = OpARM64LoweredAtomicCas64
498		return true
499	case OpAtomicCompareAndSwap64Variant:
500		v.Op = OpARM64LoweredAtomicCas64Variant
501		return true
502	case OpAtomicExchange32:
503		v.Op = OpARM64LoweredAtomicExchange32
504		return true
505	case OpAtomicExchange32Variant:
506		v.Op = OpARM64LoweredAtomicExchange32Variant
507		return true
508	case OpAtomicExchange64:
509		v.Op = OpARM64LoweredAtomicExchange64
510		return true
511	case OpAtomicExchange64Variant:
512		v.Op = OpARM64LoweredAtomicExchange64Variant
513		return true
514	case OpAtomicLoad32:
515		v.Op = OpARM64LDARW
516		return true
517	case OpAtomicLoad64:
518		v.Op = OpARM64LDAR
519		return true
520	case OpAtomicLoad8:
521		v.Op = OpARM64LDARB
522		return true
523	case OpAtomicLoadPtr:
524		v.Op = OpARM64LDAR
525		return true
526	case OpAtomicOr32:
527		v.Op = OpARM64LoweredAtomicOr32
528		return true
529	case OpAtomicOr32Variant:
530		v.Op = OpARM64LoweredAtomicOr32Variant
531		return true
532	case OpAtomicOr64:
533		v.Op = OpARM64LoweredAtomicOr64
534		return true
535	case OpAtomicOr64Variant:
536		v.Op = OpARM64LoweredAtomicOr64Variant
537		return true
538	case OpAtomicOr8:
539		v.Op = OpARM64LoweredAtomicOr8
540		return true
541	case OpAtomicOr8Variant:
542		v.Op = OpARM64LoweredAtomicOr8Variant
543		return true
544	case OpAtomicStore32:
545		v.Op = OpARM64STLRW
546		return true
547	case OpAtomicStore64:
548		v.Op = OpARM64STLR
549		return true
550	case OpAtomicStore8:
551		v.Op = OpARM64STLRB
552		return true
553	case OpAtomicStorePtrNoWB:
554		v.Op = OpARM64STLR
555		return true
556	case OpAvg64u:
557		return rewriteValueARM64_OpAvg64u(v)
558	case OpBitLen32:
559		return rewriteValueARM64_OpBitLen32(v)
560	case OpBitLen64:
561		return rewriteValueARM64_OpBitLen64(v)
562	case OpBitRev16:
563		return rewriteValueARM64_OpBitRev16(v)
564	case OpBitRev32:
565		v.Op = OpARM64RBITW
566		return true
567	case OpBitRev64:
568		v.Op = OpARM64RBIT
569		return true
570	case OpBitRev8:
571		return rewriteValueARM64_OpBitRev8(v)
572	case OpBswap16:
573		v.Op = OpARM64REV16W
574		return true
575	case OpBswap32:
576		v.Op = OpARM64REVW
577		return true
578	case OpBswap64:
579		v.Op = OpARM64REV
580		return true
581	case OpCeil:
582		v.Op = OpARM64FRINTPD
583		return true
584	case OpClosureCall:
585		v.Op = OpARM64CALLclosure
586		return true
587	case OpCom16:
588		v.Op = OpARM64MVN
589		return true
590	case OpCom32:
591		v.Op = OpARM64MVN
592		return true
593	case OpCom64:
594		v.Op = OpARM64MVN
595		return true
596	case OpCom8:
597		v.Op = OpARM64MVN
598		return true
599	case OpCondSelect:
600		return rewriteValueARM64_OpCondSelect(v)
601	case OpConst16:
602		return rewriteValueARM64_OpConst16(v)
603	case OpConst32:
604		return rewriteValueARM64_OpConst32(v)
605	case OpConst32F:
606		return rewriteValueARM64_OpConst32F(v)
607	case OpConst64:
608		return rewriteValueARM64_OpConst64(v)
609	case OpConst64F:
610		return rewriteValueARM64_OpConst64F(v)
611	case OpConst8:
612		return rewriteValueARM64_OpConst8(v)
613	case OpConstBool:
614		return rewriteValueARM64_OpConstBool(v)
615	case OpConstNil:
616		return rewriteValueARM64_OpConstNil(v)
617	case OpCtz16:
618		return rewriteValueARM64_OpCtz16(v)
619	case OpCtz16NonZero:
620		v.Op = OpCtz32
621		return true
622	case OpCtz32:
623		return rewriteValueARM64_OpCtz32(v)
624	case OpCtz32NonZero:
625		v.Op = OpCtz32
626		return true
627	case OpCtz64:
628		return rewriteValueARM64_OpCtz64(v)
629	case OpCtz64NonZero:
630		v.Op = OpCtz64
631		return true
632	case OpCtz8:
633		return rewriteValueARM64_OpCtz8(v)
634	case OpCtz8NonZero:
635		v.Op = OpCtz32
636		return true
637	case OpCvt32Fto32:
638		v.Op = OpARM64FCVTZSSW
639		return true
640	case OpCvt32Fto32U:
641		v.Op = OpARM64FCVTZUSW
642		return true
643	case OpCvt32Fto64:
644		v.Op = OpARM64FCVTZSS
645		return true
646	case OpCvt32Fto64F:
647		v.Op = OpARM64FCVTSD
648		return true
649	case OpCvt32Fto64U:
650		v.Op = OpARM64FCVTZUS
651		return true
652	case OpCvt32Uto32F:
653		v.Op = OpARM64UCVTFWS
654		return true
655	case OpCvt32Uto64F:
656		v.Op = OpARM64UCVTFWD
657		return true
658	case OpCvt32to32F:
659		v.Op = OpARM64SCVTFWS
660		return true
661	case OpCvt32to64F:
662		v.Op = OpARM64SCVTFWD
663		return true
664	case OpCvt64Fto32:
665		v.Op = OpARM64FCVTZSDW
666		return true
667	case OpCvt64Fto32F:
668		v.Op = OpARM64FCVTDS
669		return true
670	case OpCvt64Fto32U:
671		v.Op = OpARM64FCVTZUDW
672		return true
673	case OpCvt64Fto64:
674		v.Op = OpARM64FCVTZSD
675		return true
676	case OpCvt64Fto64U:
677		v.Op = OpARM64FCVTZUD
678		return true
679	case OpCvt64Uto32F:
680		v.Op = OpARM64UCVTFS
681		return true
682	case OpCvt64Uto64F:
683		v.Op = OpARM64UCVTFD
684		return true
685	case OpCvt64to32F:
686		v.Op = OpARM64SCVTFS
687		return true
688	case OpCvt64to64F:
689		v.Op = OpARM64SCVTFD
690		return true
691	case OpCvtBoolToUint8:
692		v.Op = OpCopy
693		return true
694	case OpDiv16:
695		return rewriteValueARM64_OpDiv16(v)
696	case OpDiv16u:
697		return rewriteValueARM64_OpDiv16u(v)
698	case OpDiv32:
699		return rewriteValueARM64_OpDiv32(v)
700	case OpDiv32F:
701		v.Op = OpARM64FDIVS
702		return true
703	case OpDiv32u:
704		v.Op = OpARM64UDIVW
705		return true
706	case OpDiv64:
707		return rewriteValueARM64_OpDiv64(v)
708	case OpDiv64F:
709		v.Op = OpARM64FDIVD
710		return true
711	case OpDiv64u:
712		v.Op = OpARM64UDIV
713		return true
714	case OpDiv8:
715		return rewriteValueARM64_OpDiv8(v)
716	case OpDiv8u:
717		return rewriteValueARM64_OpDiv8u(v)
718	case OpEq16:
719		return rewriteValueARM64_OpEq16(v)
720	case OpEq32:
721		return rewriteValueARM64_OpEq32(v)
722	case OpEq32F:
723		return rewriteValueARM64_OpEq32F(v)
724	case OpEq64:
725		return rewriteValueARM64_OpEq64(v)
726	case OpEq64F:
727		return rewriteValueARM64_OpEq64F(v)
728	case OpEq8:
729		return rewriteValueARM64_OpEq8(v)
730	case OpEqB:
731		return rewriteValueARM64_OpEqB(v)
732	case OpEqPtr:
733		return rewriteValueARM64_OpEqPtr(v)
734	case OpFMA:
735		return rewriteValueARM64_OpFMA(v)
736	case OpFloor:
737		v.Op = OpARM64FRINTMD
738		return true
739	case OpGetCallerPC:
740		v.Op = OpARM64LoweredGetCallerPC
741		return true
742	case OpGetCallerSP:
743		v.Op = OpARM64LoweredGetCallerSP
744		return true
745	case OpGetClosurePtr:
746		v.Op = OpARM64LoweredGetClosurePtr
747		return true
748	case OpHmul32:
749		return rewriteValueARM64_OpHmul32(v)
750	case OpHmul32u:
751		return rewriteValueARM64_OpHmul32u(v)
752	case OpHmul64:
753		v.Op = OpARM64MULH
754		return true
755	case OpHmul64u:
756		v.Op = OpARM64UMULH
757		return true
758	case OpInterCall:
759		v.Op = OpARM64CALLinter
760		return true
761	case OpIsInBounds:
762		return rewriteValueARM64_OpIsInBounds(v)
763	case OpIsNonNil:
764		return rewriteValueARM64_OpIsNonNil(v)
765	case OpIsSliceInBounds:
766		return rewriteValueARM64_OpIsSliceInBounds(v)
767	case OpLeq16:
768		return rewriteValueARM64_OpLeq16(v)
769	case OpLeq16U:
770		return rewriteValueARM64_OpLeq16U(v)
771	case OpLeq32:
772		return rewriteValueARM64_OpLeq32(v)
773	case OpLeq32F:
774		return rewriteValueARM64_OpLeq32F(v)
775	case OpLeq32U:
776		return rewriteValueARM64_OpLeq32U(v)
777	case OpLeq64:
778		return rewriteValueARM64_OpLeq64(v)
779	case OpLeq64F:
780		return rewriteValueARM64_OpLeq64F(v)
781	case OpLeq64U:
782		return rewriteValueARM64_OpLeq64U(v)
783	case OpLeq8:
784		return rewriteValueARM64_OpLeq8(v)
785	case OpLeq8U:
786		return rewriteValueARM64_OpLeq8U(v)
787	case OpLess16:
788		return rewriteValueARM64_OpLess16(v)
789	case OpLess16U:
790		return rewriteValueARM64_OpLess16U(v)
791	case OpLess32:
792		return rewriteValueARM64_OpLess32(v)
793	case OpLess32F:
794		return rewriteValueARM64_OpLess32F(v)
795	case OpLess32U:
796		return rewriteValueARM64_OpLess32U(v)
797	case OpLess64:
798		return rewriteValueARM64_OpLess64(v)
799	case OpLess64F:
800		return rewriteValueARM64_OpLess64F(v)
801	case OpLess64U:
802		return rewriteValueARM64_OpLess64U(v)
803	case OpLess8:
804		return rewriteValueARM64_OpLess8(v)
805	case OpLess8U:
806		return rewriteValueARM64_OpLess8U(v)
807	case OpLoad:
808		return rewriteValueARM64_OpLoad(v)
809	case OpLocalAddr:
810		return rewriteValueARM64_OpLocalAddr(v)
811	case OpLsh16x16:
812		return rewriteValueARM64_OpLsh16x16(v)
813	case OpLsh16x32:
814		return rewriteValueARM64_OpLsh16x32(v)
815	case OpLsh16x64:
816		return rewriteValueARM64_OpLsh16x64(v)
817	case OpLsh16x8:
818		return rewriteValueARM64_OpLsh16x8(v)
819	case OpLsh32x16:
820		return rewriteValueARM64_OpLsh32x16(v)
821	case OpLsh32x32:
822		return rewriteValueARM64_OpLsh32x32(v)
823	case OpLsh32x64:
824		return rewriteValueARM64_OpLsh32x64(v)
825	case OpLsh32x8:
826		return rewriteValueARM64_OpLsh32x8(v)
827	case OpLsh64x16:
828		return rewriteValueARM64_OpLsh64x16(v)
829	case OpLsh64x32:
830		return rewriteValueARM64_OpLsh64x32(v)
831	case OpLsh64x64:
832		return rewriteValueARM64_OpLsh64x64(v)
833	case OpLsh64x8:
834		return rewriteValueARM64_OpLsh64x8(v)
835	case OpLsh8x16:
836		return rewriteValueARM64_OpLsh8x16(v)
837	case OpLsh8x32:
838		return rewriteValueARM64_OpLsh8x32(v)
839	case OpLsh8x64:
840		return rewriteValueARM64_OpLsh8x64(v)
841	case OpLsh8x8:
842		return rewriteValueARM64_OpLsh8x8(v)
843	case OpMax32F:
844		v.Op = OpARM64FMAXS
845		return true
846	case OpMax64F:
847		v.Op = OpARM64FMAXD
848		return true
849	case OpMin32F:
850		v.Op = OpARM64FMINS
851		return true
852	case OpMin64F:
853		v.Op = OpARM64FMIND
854		return true
855	case OpMod16:
856		return rewriteValueARM64_OpMod16(v)
857	case OpMod16u:
858		return rewriteValueARM64_OpMod16u(v)
859	case OpMod32:
860		return rewriteValueARM64_OpMod32(v)
861	case OpMod32u:
862		v.Op = OpARM64UMODW
863		return true
864	case OpMod64:
865		return rewriteValueARM64_OpMod64(v)
866	case OpMod64u:
867		v.Op = OpARM64UMOD
868		return true
869	case OpMod8:
870		return rewriteValueARM64_OpMod8(v)
871	case OpMod8u:
872		return rewriteValueARM64_OpMod8u(v)
873	case OpMove:
874		return rewriteValueARM64_OpMove(v)
875	case OpMul16:
876		v.Op = OpARM64MULW
877		return true
878	case OpMul32:
879		v.Op = OpARM64MULW
880		return true
881	case OpMul32F:
882		v.Op = OpARM64FMULS
883		return true
884	case OpMul64:
885		v.Op = OpARM64MUL
886		return true
887	case OpMul64F:
888		v.Op = OpARM64FMULD
889		return true
890	case OpMul8:
891		v.Op = OpARM64MULW
892		return true
893	case OpNeg16:
894		v.Op = OpARM64NEG
895		return true
896	case OpNeg32:
897		v.Op = OpARM64NEG
898		return true
899	case OpNeg32F:
900		v.Op = OpARM64FNEGS
901		return true
902	case OpNeg64:
903		v.Op = OpARM64NEG
904		return true
905	case OpNeg64F:
906		v.Op = OpARM64FNEGD
907		return true
908	case OpNeg8:
909		v.Op = OpARM64NEG
910		return true
911	case OpNeq16:
912		return rewriteValueARM64_OpNeq16(v)
913	case OpNeq32:
914		return rewriteValueARM64_OpNeq32(v)
915	case OpNeq32F:
916		return rewriteValueARM64_OpNeq32F(v)
917	case OpNeq64:
918		return rewriteValueARM64_OpNeq64(v)
919	case OpNeq64F:
920		return rewriteValueARM64_OpNeq64F(v)
921	case OpNeq8:
922		return rewriteValueARM64_OpNeq8(v)
923	case OpNeqB:
924		v.Op = OpARM64XOR
925		return true
926	case OpNeqPtr:
927		return rewriteValueARM64_OpNeqPtr(v)
928	case OpNilCheck:
929		v.Op = OpARM64LoweredNilCheck
930		return true
931	case OpNot:
932		return rewriteValueARM64_OpNot(v)
933	case OpOffPtr:
934		return rewriteValueARM64_OpOffPtr(v)
935	case OpOr16:
936		v.Op = OpARM64OR
937		return true
938	case OpOr32:
939		v.Op = OpARM64OR
940		return true
941	case OpOr64:
942		v.Op = OpARM64OR
943		return true
944	case OpOr8:
945		v.Op = OpARM64OR
946		return true
947	case OpOrB:
948		v.Op = OpARM64OR
949		return true
950	case OpPanicBounds:
951		return rewriteValueARM64_OpPanicBounds(v)
952	case OpPopCount16:
953		return rewriteValueARM64_OpPopCount16(v)
954	case OpPopCount32:
955		return rewriteValueARM64_OpPopCount32(v)
956	case OpPopCount64:
957		return rewriteValueARM64_OpPopCount64(v)
958	case OpPrefetchCache:
959		return rewriteValueARM64_OpPrefetchCache(v)
960	case OpPrefetchCacheStreamed:
961		return rewriteValueARM64_OpPrefetchCacheStreamed(v)
962	case OpPubBarrier:
963		return rewriteValueARM64_OpPubBarrier(v)
964	case OpRotateLeft16:
965		return rewriteValueARM64_OpRotateLeft16(v)
966	case OpRotateLeft32:
967		return rewriteValueARM64_OpRotateLeft32(v)
968	case OpRotateLeft64:
969		return rewriteValueARM64_OpRotateLeft64(v)
970	case OpRotateLeft8:
971		return rewriteValueARM64_OpRotateLeft8(v)
972	case OpRound:
973		v.Op = OpARM64FRINTAD
974		return true
975	case OpRound32F:
976		v.Op = OpARM64LoweredRound32F
977		return true
978	case OpRound64F:
979		v.Op = OpARM64LoweredRound64F
980		return true
981	case OpRoundToEven:
982		v.Op = OpARM64FRINTND
983		return true
984	case OpRsh16Ux16:
985		return rewriteValueARM64_OpRsh16Ux16(v)
986	case OpRsh16Ux32:
987		return rewriteValueARM64_OpRsh16Ux32(v)
988	case OpRsh16Ux64:
989		return rewriteValueARM64_OpRsh16Ux64(v)
990	case OpRsh16Ux8:
991		return rewriteValueARM64_OpRsh16Ux8(v)
992	case OpRsh16x16:
993		return rewriteValueARM64_OpRsh16x16(v)
994	case OpRsh16x32:
995		return rewriteValueARM64_OpRsh16x32(v)
996	case OpRsh16x64:
997		return rewriteValueARM64_OpRsh16x64(v)
998	case OpRsh16x8:
999		return rewriteValueARM64_OpRsh16x8(v)
1000	case OpRsh32Ux16:
1001		return rewriteValueARM64_OpRsh32Ux16(v)
1002	case OpRsh32Ux32:
1003		return rewriteValueARM64_OpRsh32Ux32(v)
1004	case OpRsh32Ux64:
1005		return rewriteValueARM64_OpRsh32Ux64(v)
1006	case OpRsh32Ux8:
1007		return rewriteValueARM64_OpRsh32Ux8(v)
1008	case OpRsh32x16:
1009		return rewriteValueARM64_OpRsh32x16(v)
1010	case OpRsh32x32:
1011		return rewriteValueARM64_OpRsh32x32(v)
1012	case OpRsh32x64:
1013		return rewriteValueARM64_OpRsh32x64(v)
1014	case OpRsh32x8:
1015		return rewriteValueARM64_OpRsh32x8(v)
1016	case OpRsh64Ux16:
1017		return rewriteValueARM64_OpRsh64Ux16(v)
1018	case OpRsh64Ux32:
1019		return rewriteValueARM64_OpRsh64Ux32(v)
1020	case OpRsh64Ux64:
1021		return rewriteValueARM64_OpRsh64Ux64(v)
1022	case OpRsh64Ux8:
1023		return rewriteValueARM64_OpRsh64Ux8(v)
1024	case OpRsh64x16:
1025		return rewriteValueARM64_OpRsh64x16(v)
1026	case OpRsh64x32:
1027		return rewriteValueARM64_OpRsh64x32(v)
1028	case OpRsh64x64:
1029		return rewriteValueARM64_OpRsh64x64(v)
1030	case OpRsh64x8:
1031		return rewriteValueARM64_OpRsh64x8(v)
1032	case OpRsh8Ux16:
1033		return rewriteValueARM64_OpRsh8Ux16(v)
1034	case OpRsh8Ux32:
1035		return rewriteValueARM64_OpRsh8Ux32(v)
1036	case OpRsh8Ux64:
1037		return rewriteValueARM64_OpRsh8Ux64(v)
1038	case OpRsh8Ux8:
1039		return rewriteValueARM64_OpRsh8Ux8(v)
1040	case OpRsh8x16:
1041		return rewriteValueARM64_OpRsh8x16(v)
1042	case OpRsh8x32:
1043		return rewriteValueARM64_OpRsh8x32(v)
1044	case OpRsh8x64:
1045		return rewriteValueARM64_OpRsh8x64(v)
1046	case OpRsh8x8:
1047		return rewriteValueARM64_OpRsh8x8(v)
1048	case OpSelect0:
1049		return rewriteValueARM64_OpSelect0(v)
1050	case OpSelect1:
1051		return rewriteValueARM64_OpSelect1(v)
1052	case OpSelectN:
1053		return rewriteValueARM64_OpSelectN(v)
1054	case OpSignExt16to32:
1055		v.Op = OpARM64MOVHreg
1056		return true
1057	case OpSignExt16to64:
1058		v.Op = OpARM64MOVHreg
1059		return true
1060	case OpSignExt32to64:
1061		v.Op = OpARM64MOVWreg
1062		return true
1063	case OpSignExt8to16:
1064		v.Op = OpARM64MOVBreg
1065		return true
1066	case OpSignExt8to32:
1067		v.Op = OpARM64MOVBreg
1068		return true
1069	case OpSignExt8to64:
1070		v.Op = OpARM64MOVBreg
1071		return true
1072	case OpSlicemask:
1073		return rewriteValueARM64_OpSlicemask(v)
1074	case OpSqrt:
1075		v.Op = OpARM64FSQRTD
1076		return true
1077	case OpSqrt32:
1078		v.Op = OpARM64FSQRTS
1079		return true
1080	case OpStaticCall:
1081		v.Op = OpARM64CALLstatic
1082		return true
1083	case OpStore:
1084		return rewriteValueARM64_OpStore(v)
1085	case OpSub16:
1086		v.Op = OpARM64SUB
1087		return true
1088	case OpSub32:
1089		v.Op = OpARM64SUB
1090		return true
1091	case OpSub32F:
1092		v.Op = OpARM64FSUBS
1093		return true
1094	case OpSub64:
1095		v.Op = OpARM64SUB
1096		return true
1097	case OpSub64F:
1098		v.Op = OpARM64FSUBD
1099		return true
1100	case OpSub8:
1101		v.Op = OpARM64SUB
1102		return true
1103	case OpSubPtr:
1104		v.Op = OpARM64SUB
1105		return true
1106	case OpTailCall:
1107		v.Op = OpARM64CALLtail
1108		return true
1109	case OpTrunc:
1110		v.Op = OpARM64FRINTZD
1111		return true
1112	case OpTrunc16to8:
1113		v.Op = OpCopy
1114		return true
1115	case OpTrunc32to16:
1116		v.Op = OpCopy
1117		return true
1118	case OpTrunc32to8:
1119		v.Op = OpCopy
1120		return true
1121	case OpTrunc64to16:
1122		v.Op = OpCopy
1123		return true
1124	case OpTrunc64to32:
1125		v.Op = OpCopy
1126		return true
1127	case OpTrunc64to8:
1128		v.Op = OpCopy
1129		return true
1130	case OpWB:
1131		v.Op = OpARM64LoweredWB
1132		return true
1133	case OpXor16:
1134		v.Op = OpARM64XOR
1135		return true
1136	case OpXor32:
1137		v.Op = OpARM64XOR
1138		return true
1139	case OpXor64:
1140		v.Op = OpARM64XOR
1141		return true
1142	case OpXor8:
1143		v.Op = OpARM64XOR
1144		return true
1145	case OpZero:
1146		return rewriteValueARM64_OpZero(v)
1147	case OpZeroExt16to32:
1148		v.Op = OpARM64MOVHUreg
1149		return true
1150	case OpZeroExt16to64:
1151		v.Op = OpARM64MOVHUreg
1152		return true
1153	case OpZeroExt32to64:
1154		v.Op = OpARM64MOVWUreg
1155		return true
1156	case OpZeroExt8to16:
1157		v.Op = OpARM64MOVBUreg
1158		return true
1159	case OpZeroExt8to32:
1160		v.Op = OpARM64MOVBUreg
1161		return true
1162	case OpZeroExt8to64:
1163		v.Op = OpARM64MOVBUreg
1164		return true
1165	}
1166	return false
1167}
1168func rewriteValueARM64_OpARM64ADCSflags(v *Value) bool {
1169	v_2 := v.Args[2]
1170	v_1 := v.Args[1]
1171	v_0 := v.Args[0]
1172	b := v.Block
1173	typ := &b.Func.Config.Types
1174	// match: (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] (ADCzerocarry <typ.UInt64> c))))
1175	// result: (ADCSflags x y c)
1176	for {
1177		x := v_0
1178		y := v_1
1179		if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags {
1180			break
1181		}
1182		v_2_0 := v_2.Args[0]
1183		if v_2_0.Op != OpARM64ADDSconstflags || auxIntToInt64(v_2_0.AuxInt) != -1 {
1184			break
1185		}
1186		v_2_0_0 := v_2_0.Args[0]
1187		if v_2_0_0.Op != OpARM64ADCzerocarry || v_2_0_0.Type != typ.UInt64 {
1188			break
1189		}
1190		c := v_2_0_0.Args[0]
1191		v.reset(OpARM64ADCSflags)
1192		v.AddArg3(x, y, c)
1193		return true
1194	}
1195	// match: (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] (MOVDconst [0]))))
1196	// result: (ADDSflags x y)
1197	for {
1198		x := v_0
1199		y := v_1
1200		if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags {
1201			break
1202		}
1203		v_2_0 := v_2.Args[0]
1204		if v_2_0.Op != OpARM64ADDSconstflags || auxIntToInt64(v_2_0.AuxInt) != -1 {
1205			break
1206		}
1207		v_2_0_0 := v_2_0.Args[0]
1208		if v_2_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_2_0_0.AuxInt) != 0 {
1209			break
1210		}
1211		v.reset(OpARM64ADDSflags)
1212		v.AddArg2(x, y)
1213		return true
1214	}
1215	return false
1216}
1217func rewriteValueARM64_OpARM64ADD(v *Value) bool {
1218	v_1 := v.Args[1]
1219	v_0 := v.Args[0]
1220	// match: (ADD x (MOVDconst <t> [c]))
1221	// cond: !t.IsPtr()
1222	// result: (ADDconst [c] x)
1223	for {
1224		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1225			x := v_0
1226			if v_1.Op != OpARM64MOVDconst {
1227				continue
1228			}
1229			t := v_1.Type
1230			c := auxIntToInt64(v_1.AuxInt)
1231			if !(!t.IsPtr()) {
1232				continue
1233			}
1234			v.reset(OpARM64ADDconst)
1235			v.AuxInt = int64ToAuxInt(c)
1236			v.AddArg(x)
1237			return true
1238		}
1239		break
1240	}
1241	// match: (ADD a l:(MUL x y))
1242	// cond: l.Uses==1 && clobber(l)
1243	// result: (MADD a x y)
1244	for {
1245		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1246			a := v_0
1247			l := v_1
1248			if l.Op != OpARM64MUL {
1249				continue
1250			}
1251			y := l.Args[1]
1252			x := l.Args[0]
1253			if !(l.Uses == 1 && clobber(l)) {
1254				continue
1255			}
1256			v.reset(OpARM64MADD)
1257			v.AddArg3(a, x, y)
1258			return true
1259		}
1260		break
1261	}
1262	// match: (ADD a l:(MNEG x y))
1263	// cond: l.Uses==1 && clobber(l)
1264	// result: (MSUB a x y)
1265	for {
1266		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1267			a := v_0
1268			l := v_1
1269			if l.Op != OpARM64MNEG {
1270				continue
1271			}
1272			y := l.Args[1]
1273			x := l.Args[0]
1274			if !(l.Uses == 1 && clobber(l)) {
1275				continue
1276			}
1277			v.reset(OpARM64MSUB)
1278			v.AddArg3(a, x, y)
1279			return true
1280		}
1281		break
1282	}
1283	// match: (ADD a l:(MULW x y))
1284	// cond: v.Type.Size() <= 4 && l.Uses==1 && clobber(l)
1285	// result: (MADDW a x y)
1286	for {
1287		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1288			a := v_0
1289			l := v_1
1290			if l.Op != OpARM64MULW {
1291				continue
1292			}
1293			y := l.Args[1]
1294			x := l.Args[0]
1295			if !(v.Type.Size() <= 4 && l.Uses == 1 && clobber(l)) {
1296				continue
1297			}
1298			v.reset(OpARM64MADDW)
1299			v.AddArg3(a, x, y)
1300			return true
1301		}
1302		break
1303	}
1304	// match: (ADD a l:(MNEGW x y))
1305	// cond: v.Type.Size() <= 4 && l.Uses==1 && clobber(l)
1306	// result: (MSUBW a x y)
1307	for {
1308		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1309			a := v_0
1310			l := v_1
1311			if l.Op != OpARM64MNEGW {
1312				continue
1313			}
1314			y := l.Args[1]
1315			x := l.Args[0]
1316			if !(v.Type.Size() <= 4 && l.Uses == 1 && clobber(l)) {
1317				continue
1318			}
1319			v.reset(OpARM64MSUBW)
1320			v.AddArg3(a, x, y)
1321			return true
1322		}
1323		break
1324	}
1325	// match: (ADD x (NEG y))
1326	// result: (SUB x y)
1327	for {
1328		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1329			x := v_0
1330			if v_1.Op != OpARM64NEG {
1331				continue
1332			}
1333			y := v_1.Args[0]
1334			v.reset(OpARM64SUB)
1335			v.AddArg2(x, y)
1336			return true
1337		}
1338		break
1339	}
1340	// match: (ADD x0 x1:(SLLconst [c] y))
1341	// cond: clobberIfDead(x1)
1342	// result: (ADDshiftLL x0 y [c])
1343	for {
1344		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1345			x0 := v_0
1346			x1 := v_1
1347			if x1.Op != OpARM64SLLconst {
1348				continue
1349			}
1350			c := auxIntToInt64(x1.AuxInt)
1351			y := x1.Args[0]
1352			if !(clobberIfDead(x1)) {
1353				continue
1354			}
1355			v.reset(OpARM64ADDshiftLL)
1356			v.AuxInt = int64ToAuxInt(c)
1357			v.AddArg2(x0, y)
1358			return true
1359		}
1360		break
1361	}
1362	// match: (ADD x0 x1:(SRLconst [c] y))
1363	// cond: clobberIfDead(x1)
1364	// result: (ADDshiftRL x0 y [c])
1365	for {
1366		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1367			x0 := v_0
1368			x1 := v_1
1369			if x1.Op != OpARM64SRLconst {
1370				continue
1371			}
1372			c := auxIntToInt64(x1.AuxInt)
1373			y := x1.Args[0]
1374			if !(clobberIfDead(x1)) {
1375				continue
1376			}
1377			v.reset(OpARM64ADDshiftRL)
1378			v.AuxInt = int64ToAuxInt(c)
1379			v.AddArg2(x0, y)
1380			return true
1381		}
1382		break
1383	}
1384	// match: (ADD x0 x1:(SRAconst [c] y))
1385	// cond: clobberIfDead(x1)
1386	// result: (ADDshiftRA x0 y [c])
1387	for {
1388		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1389			x0 := v_0
1390			x1 := v_1
1391			if x1.Op != OpARM64SRAconst {
1392				continue
1393			}
1394			c := auxIntToInt64(x1.AuxInt)
1395			y := x1.Args[0]
1396			if !(clobberIfDead(x1)) {
1397				continue
1398			}
1399			v.reset(OpARM64ADDshiftRA)
1400			v.AuxInt = int64ToAuxInt(c)
1401			v.AddArg2(x0, y)
1402			return true
1403		}
1404		break
1405	}
1406	return false
1407}
1408func rewriteValueARM64_OpARM64ADDSflags(v *Value) bool {
1409	v_1 := v.Args[1]
1410	v_0 := v.Args[0]
1411	// match: (ADDSflags x (MOVDconst [c]))
1412	// result: (ADDSconstflags [c] x)
1413	for {
1414		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1415			x := v_0
1416			if v_1.Op != OpARM64MOVDconst {
1417				continue
1418			}
1419			c := auxIntToInt64(v_1.AuxInt)
1420			v.reset(OpARM64ADDSconstflags)
1421			v.AuxInt = int64ToAuxInt(c)
1422			v.AddArg(x)
1423			return true
1424		}
1425		break
1426	}
1427	return false
1428}
1429func rewriteValueARM64_OpARM64ADDconst(v *Value) bool {
1430	v_0 := v.Args[0]
1431	// match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr))
1432	// cond: is32Bit(off1+int64(off2))
1433	// result: (MOVDaddr [int32(off1)+off2] {sym} ptr)
1434	for {
1435		off1 := auxIntToInt64(v.AuxInt)
1436		if v_0.Op != OpARM64MOVDaddr {
1437			break
1438		}
1439		off2 := auxIntToInt32(v_0.AuxInt)
1440		sym := auxToSym(v_0.Aux)
1441		ptr := v_0.Args[0]
1442		if !(is32Bit(off1 + int64(off2))) {
1443			break
1444		}
1445		v.reset(OpARM64MOVDaddr)
1446		v.AuxInt = int32ToAuxInt(int32(off1) + off2)
1447		v.Aux = symToAux(sym)
1448		v.AddArg(ptr)
1449		return true
1450	}
1451	// match: (ADDconst [c] y)
1452	// cond: c < 0
1453	// result: (SUBconst [-c] y)
1454	for {
1455		c := auxIntToInt64(v.AuxInt)
1456		y := v_0
1457		if !(c < 0) {
1458			break
1459		}
1460		v.reset(OpARM64SUBconst)
1461		v.AuxInt = int64ToAuxInt(-c)
1462		v.AddArg(y)
1463		return true
1464	}
1465	// match: (ADDconst [0] x)
1466	// result: x
1467	for {
1468		if auxIntToInt64(v.AuxInt) != 0 {
1469			break
1470		}
1471		x := v_0
1472		v.copyOf(x)
1473		return true
1474	}
1475	// match: (ADDconst [c] (MOVDconst [d]))
1476	// result: (MOVDconst [c+d])
1477	for {
1478		c := auxIntToInt64(v.AuxInt)
1479		if v_0.Op != OpARM64MOVDconst {
1480			break
1481		}
1482		d := auxIntToInt64(v_0.AuxInt)
1483		v.reset(OpARM64MOVDconst)
1484		v.AuxInt = int64ToAuxInt(c + d)
1485		return true
1486	}
1487	// match: (ADDconst [c] (ADDconst [d] x))
1488	// result: (ADDconst [c+d] x)
1489	for {
1490		c := auxIntToInt64(v.AuxInt)
1491		if v_0.Op != OpARM64ADDconst {
1492			break
1493		}
1494		d := auxIntToInt64(v_0.AuxInt)
1495		x := v_0.Args[0]
1496		v.reset(OpARM64ADDconst)
1497		v.AuxInt = int64ToAuxInt(c + d)
1498		v.AddArg(x)
1499		return true
1500	}
1501	// match: (ADDconst [c] (SUBconst [d] x))
1502	// result: (ADDconst [c-d] x)
1503	for {
1504		c := auxIntToInt64(v.AuxInt)
1505		if v_0.Op != OpARM64SUBconst {
1506			break
1507		}
1508		d := auxIntToInt64(v_0.AuxInt)
1509		x := v_0.Args[0]
1510		v.reset(OpARM64ADDconst)
1511		v.AuxInt = int64ToAuxInt(c - d)
1512		v.AddArg(x)
1513		return true
1514	}
1515	return false
1516}
1517func rewriteValueARM64_OpARM64ADDshiftLL(v *Value) bool {
1518	v_1 := v.Args[1]
1519	v_0 := v.Args[0]
1520	b := v.Block
1521	typ := &b.Func.Config.Types
1522	// match: (ADDshiftLL (MOVDconst [c]) x [d])
1523	// result: (ADDconst [c] (SLLconst <x.Type> x [d]))
1524	for {
1525		d := auxIntToInt64(v.AuxInt)
1526		if v_0.Op != OpARM64MOVDconst {
1527			break
1528		}
1529		c := auxIntToInt64(v_0.AuxInt)
1530		x := v_1
1531		v.reset(OpARM64ADDconst)
1532		v.AuxInt = int64ToAuxInt(c)
1533		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
1534		v0.AuxInt = int64ToAuxInt(d)
1535		v0.AddArg(x)
1536		v.AddArg(v0)
1537		return true
1538	}
1539	// match: (ADDshiftLL x (MOVDconst [c]) [d])
1540	// result: (ADDconst x [int64(uint64(c)<<uint64(d))])
1541	for {
1542		d := auxIntToInt64(v.AuxInt)
1543		x := v_0
1544		if v_1.Op != OpARM64MOVDconst {
1545			break
1546		}
1547		c := auxIntToInt64(v_1.AuxInt)
1548		v.reset(OpARM64ADDconst)
1549		v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
1550		v.AddArg(x)
1551		return true
1552	}
1553	// match: (ADDshiftLL <typ.UInt16> [8] (UBFX <typ.UInt16> [armBFAuxInt(8, 8)] x) x)
1554	// result: (REV16W x)
1555	for {
1556		if v.Type != typ.UInt16 || auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || v_0.Type != typ.UInt16 || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 8) {
1557			break
1558		}
1559		x := v_0.Args[0]
1560		if x != v_1 {
1561			break
1562		}
1563		v.reset(OpARM64REV16W)
1564		v.AddArg(x)
1565		return true
1566	}
1567	// match: (ADDshiftLL [8] (UBFX [armBFAuxInt(8, 24)] (ANDconst [c1] x)) (ANDconst [c2] x))
1568	// cond: uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff
1569	// result: (REV16W x)
1570	for {
1571		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 24) {
1572			break
1573		}
1574		v_0_0 := v_0.Args[0]
1575		if v_0_0.Op != OpARM64ANDconst {
1576			break
1577		}
1578		c1 := auxIntToInt64(v_0_0.AuxInt)
1579		x := v_0_0.Args[0]
1580		if v_1.Op != OpARM64ANDconst {
1581			break
1582		}
1583		c2 := auxIntToInt64(v_1.AuxInt)
1584		if x != v_1.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) {
1585			break
1586		}
1587		v.reset(OpARM64REV16W)
1588		v.AddArg(x)
1589		return true
1590	}
1591	// match: (ADDshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x))
1592	// cond: (uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff)
1593	// result: (REV16 x)
1594	for {
1595		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
1596			break
1597		}
1598		v_0_0 := v_0.Args[0]
1599		if v_0_0.Op != OpARM64ANDconst {
1600			break
1601		}
1602		c1 := auxIntToInt64(v_0_0.AuxInt)
1603		x := v_0_0.Args[0]
1604		if v_1.Op != OpARM64ANDconst {
1605			break
1606		}
1607		c2 := auxIntToInt64(v_1.AuxInt)
1608		if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) {
1609			break
1610		}
1611		v.reset(OpARM64REV16)
1612		v.AddArg(x)
1613		return true
1614	}
1615	// match: (ADDshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x))
1616	// cond: (uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff)
1617	// result: (REV16 (ANDconst <x.Type> [0xffffffff] x))
1618	for {
1619		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
1620			break
1621		}
1622		v_0_0 := v_0.Args[0]
1623		if v_0_0.Op != OpARM64ANDconst {
1624			break
1625		}
1626		c1 := auxIntToInt64(v_0_0.AuxInt)
1627		x := v_0_0.Args[0]
1628		if v_1.Op != OpARM64ANDconst {
1629			break
1630		}
1631		c2 := auxIntToInt64(v_1.AuxInt)
1632		if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) {
1633			break
1634		}
1635		v.reset(OpARM64REV16)
1636		v0 := b.NewValue0(v.Pos, OpARM64ANDconst, x.Type)
1637		v0.AuxInt = int64ToAuxInt(0xffffffff)
1638		v0.AddArg(x)
1639		v.AddArg(v0)
1640		return true
1641	}
1642	// match: (ADDshiftLL [c] (SRLconst x [64-c]) x2)
1643	// result: (EXTRconst [64-c] x2 x)
1644	for {
1645		c := auxIntToInt64(v.AuxInt)
1646		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 64-c {
1647			break
1648		}
1649		x := v_0.Args[0]
1650		x2 := v_1
1651		v.reset(OpARM64EXTRconst)
1652		v.AuxInt = int64ToAuxInt(64 - c)
1653		v.AddArg2(x2, x)
1654		return true
1655	}
1656	// match: (ADDshiftLL <t> [c] (UBFX [bfc] x) x2)
1657	// cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)
1658	// result: (EXTRWconst [32-c] x2 x)
1659	for {
1660		t := v.Type
1661		c := auxIntToInt64(v.AuxInt)
1662		if v_0.Op != OpARM64UBFX {
1663			break
1664		}
1665		bfc := auxIntToArm64BitField(v_0.AuxInt)
1666		x := v_0.Args[0]
1667		x2 := v_1
1668		if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
1669			break
1670		}
1671		v.reset(OpARM64EXTRWconst)
1672		v.AuxInt = int64ToAuxInt(32 - c)
1673		v.AddArg2(x2, x)
1674		return true
1675	}
1676	return false
1677}
1678func rewriteValueARM64_OpARM64ADDshiftRA(v *Value) bool {
1679	v_1 := v.Args[1]
1680	v_0 := v.Args[0]
1681	b := v.Block
1682	// match: (ADDshiftRA (MOVDconst [c]) x [d])
1683	// result: (ADDconst [c] (SRAconst <x.Type> x [d]))
1684	for {
1685		d := auxIntToInt64(v.AuxInt)
1686		if v_0.Op != OpARM64MOVDconst {
1687			break
1688		}
1689		c := auxIntToInt64(v_0.AuxInt)
1690		x := v_1
1691		v.reset(OpARM64ADDconst)
1692		v.AuxInt = int64ToAuxInt(c)
1693		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
1694		v0.AuxInt = int64ToAuxInt(d)
1695		v0.AddArg(x)
1696		v.AddArg(v0)
1697		return true
1698	}
1699	// match: (ADDshiftRA x (MOVDconst [c]) [d])
1700	// result: (ADDconst x [c>>uint64(d)])
1701	for {
1702		d := auxIntToInt64(v.AuxInt)
1703		x := v_0
1704		if v_1.Op != OpARM64MOVDconst {
1705			break
1706		}
1707		c := auxIntToInt64(v_1.AuxInt)
1708		v.reset(OpARM64ADDconst)
1709		v.AuxInt = int64ToAuxInt(c >> uint64(d))
1710		v.AddArg(x)
1711		return true
1712	}
1713	return false
1714}
1715func rewriteValueARM64_OpARM64ADDshiftRL(v *Value) bool {
1716	v_1 := v.Args[1]
1717	v_0 := v.Args[0]
1718	b := v.Block
1719	// match: (ADDshiftRL (MOVDconst [c]) x [d])
1720	// result: (ADDconst [c] (SRLconst <x.Type> x [d]))
1721	for {
1722		d := auxIntToInt64(v.AuxInt)
1723		if v_0.Op != OpARM64MOVDconst {
1724			break
1725		}
1726		c := auxIntToInt64(v_0.AuxInt)
1727		x := v_1
1728		v.reset(OpARM64ADDconst)
1729		v.AuxInt = int64ToAuxInt(c)
1730		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
1731		v0.AuxInt = int64ToAuxInt(d)
1732		v0.AddArg(x)
1733		v.AddArg(v0)
1734		return true
1735	}
1736	// match: (ADDshiftRL x (MOVDconst [c]) [d])
1737	// result: (ADDconst x [int64(uint64(c)>>uint64(d))])
1738	for {
1739		d := auxIntToInt64(v.AuxInt)
1740		x := v_0
1741		if v_1.Op != OpARM64MOVDconst {
1742			break
1743		}
1744		c := auxIntToInt64(v_1.AuxInt)
1745		v.reset(OpARM64ADDconst)
1746		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
1747		v.AddArg(x)
1748		return true
1749	}
1750	return false
1751}
1752func rewriteValueARM64_OpARM64AND(v *Value) bool {
1753	v_1 := v.Args[1]
1754	v_0 := v.Args[0]
1755	// match: (AND x (MOVDconst [c]))
1756	// result: (ANDconst [c] x)
1757	for {
1758		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1759			x := v_0
1760			if v_1.Op != OpARM64MOVDconst {
1761				continue
1762			}
1763			c := auxIntToInt64(v_1.AuxInt)
1764			v.reset(OpARM64ANDconst)
1765			v.AuxInt = int64ToAuxInt(c)
1766			v.AddArg(x)
1767			return true
1768		}
1769		break
1770	}
1771	// match: (AND x x)
1772	// result: x
1773	for {
1774		x := v_0
1775		if x != v_1 {
1776			break
1777		}
1778		v.copyOf(x)
1779		return true
1780	}
1781	// match: (AND x (MVN y))
1782	// result: (BIC x y)
1783	for {
1784		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1785			x := v_0
1786			if v_1.Op != OpARM64MVN {
1787				continue
1788			}
1789			y := v_1.Args[0]
1790			v.reset(OpARM64BIC)
1791			v.AddArg2(x, y)
1792			return true
1793		}
1794		break
1795	}
1796	// match: (AND x0 x1:(SLLconst [c] y))
1797	// cond: clobberIfDead(x1)
1798	// result: (ANDshiftLL x0 y [c])
1799	for {
1800		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1801			x0 := v_0
1802			x1 := v_1
1803			if x1.Op != OpARM64SLLconst {
1804				continue
1805			}
1806			c := auxIntToInt64(x1.AuxInt)
1807			y := x1.Args[0]
1808			if !(clobberIfDead(x1)) {
1809				continue
1810			}
1811			v.reset(OpARM64ANDshiftLL)
1812			v.AuxInt = int64ToAuxInt(c)
1813			v.AddArg2(x0, y)
1814			return true
1815		}
1816		break
1817	}
1818	// match: (AND x0 x1:(SRLconst [c] y))
1819	// cond: clobberIfDead(x1)
1820	// result: (ANDshiftRL x0 y [c])
1821	for {
1822		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1823			x0 := v_0
1824			x1 := v_1
1825			if x1.Op != OpARM64SRLconst {
1826				continue
1827			}
1828			c := auxIntToInt64(x1.AuxInt)
1829			y := x1.Args[0]
1830			if !(clobberIfDead(x1)) {
1831				continue
1832			}
1833			v.reset(OpARM64ANDshiftRL)
1834			v.AuxInt = int64ToAuxInt(c)
1835			v.AddArg2(x0, y)
1836			return true
1837		}
1838		break
1839	}
1840	// match: (AND x0 x1:(SRAconst [c] y))
1841	// cond: clobberIfDead(x1)
1842	// result: (ANDshiftRA x0 y [c])
1843	for {
1844		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1845			x0 := v_0
1846			x1 := v_1
1847			if x1.Op != OpARM64SRAconst {
1848				continue
1849			}
1850			c := auxIntToInt64(x1.AuxInt)
1851			y := x1.Args[0]
1852			if !(clobberIfDead(x1)) {
1853				continue
1854			}
1855			v.reset(OpARM64ANDshiftRA)
1856			v.AuxInt = int64ToAuxInt(c)
1857			v.AddArg2(x0, y)
1858			return true
1859		}
1860		break
1861	}
1862	// match: (AND x0 x1:(RORconst [c] y))
1863	// cond: clobberIfDead(x1)
1864	// result: (ANDshiftRO x0 y [c])
1865	for {
1866		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1867			x0 := v_0
1868			x1 := v_1
1869			if x1.Op != OpARM64RORconst {
1870				continue
1871			}
1872			c := auxIntToInt64(x1.AuxInt)
1873			y := x1.Args[0]
1874			if !(clobberIfDead(x1)) {
1875				continue
1876			}
1877			v.reset(OpARM64ANDshiftRO)
1878			v.AuxInt = int64ToAuxInt(c)
1879			v.AddArg2(x0, y)
1880			return true
1881		}
1882		break
1883	}
1884	return false
1885}
1886func rewriteValueARM64_OpARM64ANDconst(v *Value) bool {
1887	v_0 := v.Args[0]
1888	// match: (ANDconst [0] _)
1889	// result: (MOVDconst [0])
1890	for {
1891		if auxIntToInt64(v.AuxInt) != 0 {
1892			break
1893		}
1894		v.reset(OpARM64MOVDconst)
1895		v.AuxInt = int64ToAuxInt(0)
1896		return true
1897	}
1898	// match: (ANDconst [-1] x)
1899	// result: x
1900	for {
1901		if auxIntToInt64(v.AuxInt) != -1 {
1902			break
1903		}
1904		x := v_0
1905		v.copyOf(x)
1906		return true
1907	}
1908	// match: (ANDconst [c] (MOVDconst [d]))
1909	// result: (MOVDconst [c&d])
1910	for {
1911		c := auxIntToInt64(v.AuxInt)
1912		if v_0.Op != OpARM64MOVDconst {
1913			break
1914		}
1915		d := auxIntToInt64(v_0.AuxInt)
1916		v.reset(OpARM64MOVDconst)
1917		v.AuxInt = int64ToAuxInt(c & d)
1918		return true
1919	}
1920	// match: (ANDconst [c] (ANDconst [d] x))
1921	// result: (ANDconst [c&d] x)
1922	for {
1923		c := auxIntToInt64(v.AuxInt)
1924		if v_0.Op != OpARM64ANDconst {
1925			break
1926		}
1927		d := auxIntToInt64(v_0.AuxInt)
1928		x := v_0.Args[0]
1929		v.reset(OpARM64ANDconst)
1930		v.AuxInt = int64ToAuxInt(c & d)
1931		v.AddArg(x)
1932		return true
1933	}
1934	// match: (ANDconst [c] (MOVWUreg x))
1935	// result: (ANDconst [c&(1<<32-1)] x)
1936	for {
1937		c := auxIntToInt64(v.AuxInt)
1938		if v_0.Op != OpARM64MOVWUreg {
1939			break
1940		}
1941		x := v_0.Args[0]
1942		v.reset(OpARM64ANDconst)
1943		v.AuxInt = int64ToAuxInt(c & (1<<32 - 1))
1944		v.AddArg(x)
1945		return true
1946	}
1947	// match: (ANDconst [c] (MOVHUreg x))
1948	// result: (ANDconst [c&(1<<16-1)] x)
1949	for {
1950		c := auxIntToInt64(v.AuxInt)
1951		if v_0.Op != OpARM64MOVHUreg {
1952			break
1953		}
1954		x := v_0.Args[0]
1955		v.reset(OpARM64ANDconst)
1956		v.AuxInt = int64ToAuxInt(c & (1<<16 - 1))
1957		v.AddArg(x)
1958		return true
1959	}
1960	// match: (ANDconst [c] (MOVBUreg x))
1961	// result: (ANDconst [c&(1<<8-1)] x)
1962	for {
1963		c := auxIntToInt64(v.AuxInt)
1964		if v_0.Op != OpARM64MOVBUreg {
1965			break
1966		}
1967		x := v_0.Args[0]
1968		v.reset(OpARM64ANDconst)
1969		v.AuxInt = int64ToAuxInt(c & (1<<8 - 1))
1970		v.AddArg(x)
1971		return true
1972	}
1973	// match: (ANDconst [ac] (SLLconst [sc] x))
1974	// cond: isARM64BFMask(sc, ac, sc)
1975	// result: (UBFIZ [armBFAuxInt(sc, arm64BFWidth(ac, sc))] x)
1976	for {
1977		ac := auxIntToInt64(v.AuxInt)
1978		if v_0.Op != OpARM64SLLconst {
1979			break
1980		}
1981		sc := auxIntToInt64(v_0.AuxInt)
1982		x := v_0.Args[0]
1983		if !(isARM64BFMask(sc, ac, sc)) {
1984			break
1985		}
1986		v.reset(OpARM64UBFIZ)
1987		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, sc)))
1988		v.AddArg(x)
1989		return true
1990	}
1991	// match: (ANDconst [ac] (SRLconst [sc] x))
1992	// cond: isARM64BFMask(sc, ac, 0)
1993	// result: (UBFX [armBFAuxInt(sc, arm64BFWidth(ac, 0))] x)
1994	for {
1995		ac := auxIntToInt64(v.AuxInt)
1996		if v_0.Op != OpARM64SRLconst {
1997			break
1998		}
1999		sc := auxIntToInt64(v_0.AuxInt)
2000		x := v_0.Args[0]
2001		if !(isARM64BFMask(sc, ac, 0)) {
2002			break
2003		}
2004		v.reset(OpARM64UBFX)
2005		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, 0)))
2006		v.AddArg(x)
2007		return true
2008	}
2009	// match: (ANDconst [c] (UBFX [bfc] x))
2010	// cond: isARM64BFMask(0, c, 0)
2011	// result: (UBFX [armBFAuxInt(bfc.getARM64BFlsb(), min(bfc.getARM64BFwidth(), arm64BFWidth(c, 0)))] x)
2012	for {
2013		c := auxIntToInt64(v.AuxInt)
2014		if v_0.Op != OpARM64UBFX {
2015			break
2016		}
2017		bfc := auxIntToArm64BitField(v_0.AuxInt)
2018		x := v_0.Args[0]
2019		if !(isARM64BFMask(0, c, 0)) {
2020			break
2021		}
2022		v.reset(OpARM64UBFX)
2023		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb(), min(bfc.getARM64BFwidth(), arm64BFWidth(c, 0))))
2024		v.AddArg(x)
2025		return true
2026	}
2027	return false
2028}
2029func rewriteValueARM64_OpARM64ANDshiftLL(v *Value) bool {
2030	v_1 := v.Args[1]
2031	v_0 := v.Args[0]
2032	b := v.Block
2033	// match: (ANDshiftLL (MOVDconst [c]) x [d])
2034	// result: (ANDconst [c] (SLLconst <x.Type> x [d]))
2035	for {
2036		d := auxIntToInt64(v.AuxInt)
2037		if v_0.Op != OpARM64MOVDconst {
2038			break
2039		}
2040		c := auxIntToInt64(v_0.AuxInt)
2041		x := v_1
2042		v.reset(OpARM64ANDconst)
2043		v.AuxInt = int64ToAuxInt(c)
2044		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
2045		v0.AuxInt = int64ToAuxInt(d)
2046		v0.AddArg(x)
2047		v.AddArg(v0)
2048		return true
2049	}
2050	// match: (ANDshiftLL x (MOVDconst [c]) [d])
2051	// result: (ANDconst x [int64(uint64(c)<<uint64(d))])
2052	for {
2053		d := auxIntToInt64(v.AuxInt)
2054		x := v_0
2055		if v_1.Op != OpARM64MOVDconst {
2056			break
2057		}
2058		c := auxIntToInt64(v_1.AuxInt)
2059		v.reset(OpARM64ANDconst)
2060		v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
2061		v.AddArg(x)
2062		return true
2063	}
2064	// match: (ANDshiftLL y:(SLLconst x [c]) x [c])
2065	// result: y
2066	for {
2067		c := auxIntToInt64(v.AuxInt)
2068		y := v_0
2069		if y.Op != OpARM64SLLconst || auxIntToInt64(y.AuxInt) != c {
2070			break
2071		}
2072		x := y.Args[0]
2073		if x != v_1 {
2074			break
2075		}
2076		v.copyOf(y)
2077		return true
2078	}
2079	return false
2080}
2081func rewriteValueARM64_OpARM64ANDshiftRA(v *Value) bool {
2082	v_1 := v.Args[1]
2083	v_0 := v.Args[0]
2084	b := v.Block
2085	// match: (ANDshiftRA (MOVDconst [c]) x [d])
2086	// result: (ANDconst [c] (SRAconst <x.Type> x [d]))
2087	for {
2088		d := auxIntToInt64(v.AuxInt)
2089		if v_0.Op != OpARM64MOVDconst {
2090			break
2091		}
2092		c := auxIntToInt64(v_0.AuxInt)
2093		x := v_1
2094		v.reset(OpARM64ANDconst)
2095		v.AuxInt = int64ToAuxInt(c)
2096		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
2097		v0.AuxInt = int64ToAuxInt(d)
2098		v0.AddArg(x)
2099		v.AddArg(v0)
2100		return true
2101	}
2102	// match: (ANDshiftRA x (MOVDconst [c]) [d])
2103	// result: (ANDconst x [c>>uint64(d)])
2104	for {
2105		d := auxIntToInt64(v.AuxInt)
2106		x := v_0
2107		if v_1.Op != OpARM64MOVDconst {
2108			break
2109		}
2110		c := auxIntToInt64(v_1.AuxInt)
2111		v.reset(OpARM64ANDconst)
2112		v.AuxInt = int64ToAuxInt(c >> uint64(d))
2113		v.AddArg(x)
2114		return true
2115	}
2116	// match: (ANDshiftRA y:(SRAconst x [c]) x [c])
2117	// result: y
2118	for {
2119		c := auxIntToInt64(v.AuxInt)
2120		y := v_0
2121		if y.Op != OpARM64SRAconst || auxIntToInt64(y.AuxInt) != c {
2122			break
2123		}
2124		x := y.Args[0]
2125		if x != v_1 {
2126			break
2127		}
2128		v.copyOf(y)
2129		return true
2130	}
2131	return false
2132}
2133func rewriteValueARM64_OpARM64ANDshiftRL(v *Value) bool {
2134	v_1 := v.Args[1]
2135	v_0 := v.Args[0]
2136	b := v.Block
2137	// match: (ANDshiftRL (MOVDconst [c]) x [d])
2138	// result: (ANDconst [c] (SRLconst <x.Type> x [d]))
2139	for {
2140		d := auxIntToInt64(v.AuxInt)
2141		if v_0.Op != OpARM64MOVDconst {
2142			break
2143		}
2144		c := auxIntToInt64(v_0.AuxInt)
2145		x := v_1
2146		v.reset(OpARM64ANDconst)
2147		v.AuxInt = int64ToAuxInt(c)
2148		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
2149		v0.AuxInt = int64ToAuxInt(d)
2150		v0.AddArg(x)
2151		v.AddArg(v0)
2152		return true
2153	}
2154	// match: (ANDshiftRL x (MOVDconst [c]) [d])
2155	// result: (ANDconst x [int64(uint64(c)>>uint64(d))])
2156	for {
2157		d := auxIntToInt64(v.AuxInt)
2158		x := v_0
2159		if v_1.Op != OpARM64MOVDconst {
2160			break
2161		}
2162		c := auxIntToInt64(v_1.AuxInt)
2163		v.reset(OpARM64ANDconst)
2164		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
2165		v.AddArg(x)
2166		return true
2167	}
2168	// match: (ANDshiftRL y:(SRLconst x [c]) x [c])
2169	// result: y
2170	for {
2171		c := auxIntToInt64(v.AuxInt)
2172		y := v_0
2173		if y.Op != OpARM64SRLconst || auxIntToInt64(y.AuxInt) != c {
2174			break
2175		}
2176		x := y.Args[0]
2177		if x != v_1 {
2178			break
2179		}
2180		v.copyOf(y)
2181		return true
2182	}
2183	return false
2184}
2185func rewriteValueARM64_OpARM64ANDshiftRO(v *Value) bool {
2186	v_1 := v.Args[1]
2187	v_0 := v.Args[0]
2188	b := v.Block
2189	// match: (ANDshiftRO (MOVDconst [c]) x [d])
2190	// result: (ANDconst [c] (RORconst <x.Type> x [d]))
2191	for {
2192		d := auxIntToInt64(v.AuxInt)
2193		if v_0.Op != OpARM64MOVDconst {
2194			break
2195		}
2196		c := auxIntToInt64(v_0.AuxInt)
2197		x := v_1
2198		v.reset(OpARM64ANDconst)
2199		v.AuxInt = int64ToAuxInt(c)
2200		v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type)
2201		v0.AuxInt = int64ToAuxInt(d)
2202		v0.AddArg(x)
2203		v.AddArg(v0)
2204		return true
2205	}
2206	// match: (ANDshiftRO x (MOVDconst [c]) [d])
2207	// result: (ANDconst x [rotateRight64(c, d)])
2208	for {
2209		d := auxIntToInt64(v.AuxInt)
2210		x := v_0
2211		if v_1.Op != OpARM64MOVDconst {
2212			break
2213		}
2214		c := auxIntToInt64(v_1.AuxInt)
2215		v.reset(OpARM64ANDconst)
2216		v.AuxInt = int64ToAuxInt(rotateRight64(c, d))
2217		v.AddArg(x)
2218		return true
2219	}
2220	// match: (ANDshiftRO y:(RORconst x [c]) x [c])
2221	// result: y
2222	for {
2223		c := auxIntToInt64(v.AuxInt)
2224		y := v_0
2225		if y.Op != OpARM64RORconst || auxIntToInt64(y.AuxInt) != c {
2226			break
2227		}
2228		x := y.Args[0]
2229		if x != v_1 {
2230			break
2231		}
2232		v.copyOf(y)
2233		return true
2234	}
2235	return false
2236}
2237func rewriteValueARM64_OpARM64BIC(v *Value) bool {
2238	v_1 := v.Args[1]
2239	v_0 := v.Args[0]
2240	// match: (BIC x (MOVDconst [c]))
2241	// result: (ANDconst [^c] x)
2242	for {
2243		x := v_0
2244		if v_1.Op != OpARM64MOVDconst {
2245			break
2246		}
2247		c := auxIntToInt64(v_1.AuxInt)
2248		v.reset(OpARM64ANDconst)
2249		v.AuxInt = int64ToAuxInt(^c)
2250		v.AddArg(x)
2251		return true
2252	}
2253	// match: (BIC x x)
2254	// result: (MOVDconst [0])
2255	for {
2256		x := v_0
2257		if x != v_1 {
2258			break
2259		}
2260		v.reset(OpARM64MOVDconst)
2261		v.AuxInt = int64ToAuxInt(0)
2262		return true
2263	}
2264	// match: (BIC x0 x1:(SLLconst [c] y))
2265	// cond: clobberIfDead(x1)
2266	// result: (BICshiftLL x0 y [c])
2267	for {
2268		x0 := v_0
2269		x1 := v_1
2270		if x1.Op != OpARM64SLLconst {
2271			break
2272		}
2273		c := auxIntToInt64(x1.AuxInt)
2274		y := x1.Args[0]
2275		if !(clobberIfDead(x1)) {
2276			break
2277		}
2278		v.reset(OpARM64BICshiftLL)
2279		v.AuxInt = int64ToAuxInt(c)
2280		v.AddArg2(x0, y)
2281		return true
2282	}
2283	// match: (BIC x0 x1:(SRLconst [c] y))
2284	// cond: clobberIfDead(x1)
2285	// result: (BICshiftRL x0 y [c])
2286	for {
2287		x0 := v_0
2288		x1 := v_1
2289		if x1.Op != OpARM64SRLconst {
2290			break
2291		}
2292		c := auxIntToInt64(x1.AuxInt)
2293		y := x1.Args[0]
2294		if !(clobberIfDead(x1)) {
2295			break
2296		}
2297		v.reset(OpARM64BICshiftRL)
2298		v.AuxInt = int64ToAuxInt(c)
2299		v.AddArg2(x0, y)
2300		return true
2301	}
2302	// match: (BIC x0 x1:(SRAconst [c] y))
2303	// cond: clobberIfDead(x1)
2304	// result: (BICshiftRA x0 y [c])
2305	for {
2306		x0 := v_0
2307		x1 := v_1
2308		if x1.Op != OpARM64SRAconst {
2309			break
2310		}
2311		c := auxIntToInt64(x1.AuxInt)
2312		y := x1.Args[0]
2313		if !(clobberIfDead(x1)) {
2314			break
2315		}
2316		v.reset(OpARM64BICshiftRA)
2317		v.AuxInt = int64ToAuxInt(c)
2318		v.AddArg2(x0, y)
2319		return true
2320	}
2321	// match: (BIC x0 x1:(RORconst [c] y))
2322	// cond: clobberIfDead(x1)
2323	// result: (BICshiftRO x0 y [c])
2324	for {
2325		x0 := v_0
2326		x1 := v_1
2327		if x1.Op != OpARM64RORconst {
2328			break
2329		}
2330		c := auxIntToInt64(x1.AuxInt)
2331		y := x1.Args[0]
2332		if !(clobberIfDead(x1)) {
2333			break
2334		}
2335		v.reset(OpARM64BICshiftRO)
2336		v.AuxInt = int64ToAuxInt(c)
2337		v.AddArg2(x0, y)
2338		return true
2339	}
2340	return false
2341}
2342func rewriteValueARM64_OpARM64BICshiftLL(v *Value) bool {
2343	v_1 := v.Args[1]
2344	v_0 := v.Args[0]
2345	// match: (BICshiftLL x (MOVDconst [c]) [d])
2346	// result: (ANDconst x [^int64(uint64(c)<<uint64(d))])
2347	for {
2348		d := auxIntToInt64(v.AuxInt)
2349		x := v_0
2350		if v_1.Op != OpARM64MOVDconst {
2351			break
2352		}
2353		c := auxIntToInt64(v_1.AuxInt)
2354		v.reset(OpARM64ANDconst)
2355		v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d)))
2356		v.AddArg(x)
2357		return true
2358	}
2359	// match: (BICshiftLL (SLLconst x [c]) x [c])
2360	// result: (MOVDconst [0])
2361	for {
2362		c := auxIntToInt64(v.AuxInt)
2363		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
2364			break
2365		}
2366		x := v_0.Args[0]
2367		if x != v_1 {
2368			break
2369		}
2370		v.reset(OpARM64MOVDconst)
2371		v.AuxInt = int64ToAuxInt(0)
2372		return true
2373	}
2374	return false
2375}
2376func rewriteValueARM64_OpARM64BICshiftRA(v *Value) bool {
2377	v_1 := v.Args[1]
2378	v_0 := v.Args[0]
2379	// match: (BICshiftRA x (MOVDconst [c]) [d])
2380	// result: (ANDconst x [^(c>>uint64(d))])
2381	for {
2382		d := auxIntToInt64(v.AuxInt)
2383		x := v_0
2384		if v_1.Op != OpARM64MOVDconst {
2385			break
2386		}
2387		c := auxIntToInt64(v_1.AuxInt)
2388		v.reset(OpARM64ANDconst)
2389		v.AuxInt = int64ToAuxInt(^(c >> uint64(d)))
2390		v.AddArg(x)
2391		return true
2392	}
2393	// match: (BICshiftRA (SRAconst x [c]) x [c])
2394	// result: (MOVDconst [0])
2395	for {
2396		c := auxIntToInt64(v.AuxInt)
2397		if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c {
2398			break
2399		}
2400		x := v_0.Args[0]
2401		if x != v_1 {
2402			break
2403		}
2404		v.reset(OpARM64MOVDconst)
2405		v.AuxInt = int64ToAuxInt(0)
2406		return true
2407	}
2408	return false
2409}
2410func rewriteValueARM64_OpARM64BICshiftRL(v *Value) bool {
2411	v_1 := v.Args[1]
2412	v_0 := v.Args[0]
2413	// match: (BICshiftRL x (MOVDconst [c]) [d])
2414	// result: (ANDconst x [^int64(uint64(c)>>uint64(d))])
2415	for {
2416		d := auxIntToInt64(v.AuxInt)
2417		x := v_0
2418		if v_1.Op != OpARM64MOVDconst {
2419			break
2420		}
2421		c := auxIntToInt64(v_1.AuxInt)
2422		v.reset(OpARM64ANDconst)
2423		v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d)))
2424		v.AddArg(x)
2425		return true
2426	}
2427	// match: (BICshiftRL (SRLconst x [c]) x [c])
2428	// result: (MOVDconst [0])
2429	for {
2430		c := auxIntToInt64(v.AuxInt)
2431		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
2432			break
2433		}
2434		x := v_0.Args[0]
2435		if x != v_1 {
2436			break
2437		}
2438		v.reset(OpARM64MOVDconst)
2439		v.AuxInt = int64ToAuxInt(0)
2440		return true
2441	}
2442	return false
2443}
2444func rewriteValueARM64_OpARM64BICshiftRO(v *Value) bool {
2445	v_1 := v.Args[1]
2446	v_0 := v.Args[0]
2447	// match: (BICshiftRO x (MOVDconst [c]) [d])
2448	// result: (ANDconst x [^rotateRight64(c, d)])
2449	for {
2450		d := auxIntToInt64(v.AuxInt)
2451		x := v_0
2452		if v_1.Op != OpARM64MOVDconst {
2453			break
2454		}
2455		c := auxIntToInt64(v_1.AuxInt)
2456		v.reset(OpARM64ANDconst)
2457		v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
2458		v.AddArg(x)
2459		return true
2460	}
2461	// match: (BICshiftRO (RORconst x [c]) x [c])
2462	// result: (MOVDconst [0])
2463	for {
2464		c := auxIntToInt64(v.AuxInt)
2465		if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c {
2466			break
2467		}
2468		x := v_0.Args[0]
2469		if x != v_1 {
2470			break
2471		}
2472		v.reset(OpARM64MOVDconst)
2473		v.AuxInt = int64ToAuxInt(0)
2474		return true
2475	}
2476	return false
2477}
2478func rewriteValueARM64_OpARM64CMN(v *Value) bool {
2479	v_1 := v.Args[1]
2480	v_0 := v.Args[0]
2481	// match: (CMN x (MOVDconst [c]))
2482	// result: (CMNconst [c] x)
2483	for {
2484		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2485			x := v_0
2486			if v_1.Op != OpARM64MOVDconst {
2487				continue
2488			}
2489			c := auxIntToInt64(v_1.AuxInt)
2490			v.reset(OpARM64CMNconst)
2491			v.AuxInt = int64ToAuxInt(c)
2492			v.AddArg(x)
2493			return true
2494		}
2495		break
2496	}
2497	// match: (CMN x0 x1:(SLLconst [c] y))
2498	// cond: clobberIfDead(x1)
2499	// result: (CMNshiftLL x0 y [c])
2500	for {
2501		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2502			x0 := v_0
2503			x1 := v_1
2504			if x1.Op != OpARM64SLLconst {
2505				continue
2506			}
2507			c := auxIntToInt64(x1.AuxInt)
2508			y := x1.Args[0]
2509			if !(clobberIfDead(x1)) {
2510				continue
2511			}
2512			v.reset(OpARM64CMNshiftLL)
2513			v.AuxInt = int64ToAuxInt(c)
2514			v.AddArg2(x0, y)
2515			return true
2516		}
2517		break
2518	}
2519	// match: (CMN x0 x1:(SRLconst [c] y))
2520	// cond: clobberIfDead(x1)
2521	// result: (CMNshiftRL x0 y [c])
2522	for {
2523		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2524			x0 := v_0
2525			x1 := v_1
2526			if x1.Op != OpARM64SRLconst {
2527				continue
2528			}
2529			c := auxIntToInt64(x1.AuxInt)
2530			y := x1.Args[0]
2531			if !(clobberIfDead(x1)) {
2532				continue
2533			}
2534			v.reset(OpARM64CMNshiftRL)
2535			v.AuxInt = int64ToAuxInt(c)
2536			v.AddArg2(x0, y)
2537			return true
2538		}
2539		break
2540	}
2541	// match: (CMN x0 x1:(SRAconst [c] y))
2542	// cond: clobberIfDead(x1)
2543	// result: (CMNshiftRA x0 y [c])
2544	for {
2545		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2546			x0 := v_0
2547			x1 := v_1
2548			if x1.Op != OpARM64SRAconst {
2549				continue
2550			}
2551			c := auxIntToInt64(x1.AuxInt)
2552			y := x1.Args[0]
2553			if !(clobberIfDead(x1)) {
2554				continue
2555			}
2556			v.reset(OpARM64CMNshiftRA)
2557			v.AuxInt = int64ToAuxInt(c)
2558			v.AddArg2(x0, y)
2559			return true
2560		}
2561		break
2562	}
2563	return false
2564}
2565func rewriteValueARM64_OpARM64CMNW(v *Value) bool {
2566	v_1 := v.Args[1]
2567	v_0 := v.Args[0]
2568	// match: (CMNW x (MOVDconst [c]))
2569	// result: (CMNWconst [int32(c)] x)
2570	for {
2571		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2572			x := v_0
2573			if v_1.Op != OpARM64MOVDconst {
2574				continue
2575			}
2576			c := auxIntToInt64(v_1.AuxInt)
2577			v.reset(OpARM64CMNWconst)
2578			v.AuxInt = int32ToAuxInt(int32(c))
2579			v.AddArg(x)
2580			return true
2581		}
2582		break
2583	}
2584	return false
2585}
2586func rewriteValueARM64_OpARM64CMNWconst(v *Value) bool {
2587	v_0 := v.Args[0]
2588	// match: (CMNWconst [c] y)
2589	// cond: c < 0 && c != -1<<31
2590	// result: (CMPWconst [-c] y)
2591	for {
2592		c := auxIntToInt32(v.AuxInt)
2593		y := v_0
2594		if !(c < 0 && c != -1<<31) {
2595			break
2596		}
2597		v.reset(OpARM64CMPWconst)
2598		v.AuxInt = int32ToAuxInt(-c)
2599		v.AddArg(y)
2600		return true
2601	}
2602	// match: (CMNWconst (MOVDconst [x]) [y])
2603	// result: (FlagConstant [addFlags32(int32(x),y)])
2604	for {
2605		y := auxIntToInt32(v.AuxInt)
2606		if v_0.Op != OpARM64MOVDconst {
2607			break
2608		}
2609		x := auxIntToInt64(v_0.AuxInt)
2610		v.reset(OpARM64FlagConstant)
2611		v.AuxInt = flagConstantToAuxInt(addFlags32(int32(x), y))
2612		return true
2613	}
2614	return false
2615}
2616func rewriteValueARM64_OpARM64CMNconst(v *Value) bool {
2617	v_0 := v.Args[0]
2618	// match: (CMNconst [c] y)
2619	// cond: c < 0 && c != -1<<63
2620	// result: (CMPconst [-c] y)
2621	for {
2622		c := auxIntToInt64(v.AuxInt)
2623		y := v_0
2624		if !(c < 0 && c != -1<<63) {
2625			break
2626		}
2627		v.reset(OpARM64CMPconst)
2628		v.AuxInt = int64ToAuxInt(-c)
2629		v.AddArg(y)
2630		return true
2631	}
2632	// match: (CMNconst (MOVDconst [x]) [y])
2633	// result: (FlagConstant [addFlags64(x,y)])
2634	for {
2635		y := auxIntToInt64(v.AuxInt)
2636		if v_0.Op != OpARM64MOVDconst {
2637			break
2638		}
2639		x := auxIntToInt64(v_0.AuxInt)
2640		v.reset(OpARM64FlagConstant)
2641		v.AuxInt = flagConstantToAuxInt(addFlags64(x, y))
2642		return true
2643	}
2644	return false
2645}
2646func rewriteValueARM64_OpARM64CMNshiftLL(v *Value) bool {
2647	v_1 := v.Args[1]
2648	v_0 := v.Args[0]
2649	b := v.Block
2650	// match: (CMNshiftLL (MOVDconst [c]) x [d])
2651	// result: (CMNconst [c] (SLLconst <x.Type> x [d]))
2652	for {
2653		d := auxIntToInt64(v.AuxInt)
2654		if v_0.Op != OpARM64MOVDconst {
2655			break
2656		}
2657		c := auxIntToInt64(v_0.AuxInt)
2658		x := v_1
2659		v.reset(OpARM64CMNconst)
2660		v.AuxInt = int64ToAuxInt(c)
2661		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
2662		v0.AuxInt = int64ToAuxInt(d)
2663		v0.AddArg(x)
2664		v.AddArg(v0)
2665		return true
2666	}
2667	// match: (CMNshiftLL x (MOVDconst [c]) [d])
2668	// result: (CMNconst x [int64(uint64(c)<<uint64(d))])
2669	for {
2670		d := auxIntToInt64(v.AuxInt)
2671		x := v_0
2672		if v_1.Op != OpARM64MOVDconst {
2673			break
2674		}
2675		c := auxIntToInt64(v_1.AuxInt)
2676		v.reset(OpARM64CMNconst)
2677		v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
2678		v.AddArg(x)
2679		return true
2680	}
2681	return false
2682}
2683func rewriteValueARM64_OpARM64CMNshiftRA(v *Value) bool {
2684	v_1 := v.Args[1]
2685	v_0 := v.Args[0]
2686	b := v.Block
2687	// match: (CMNshiftRA (MOVDconst [c]) x [d])
2688	// result: (CMNconst [c] (SRAconst <x.Type> x [d]))
2689	for {
2690		d := auxIntToInt64(v.AuxInt)
2691		if v_0.Op != OpARM64MOVDconst {
2692			break
2693		}
2694		c := auxIntToInt64(v_0.AuxInt)
2695		x := v_1
2696		v.reset(OpARM64CMNconst)
2697		v.AuxInt = int64ToAuxInt(c)
2698		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
2699		v0.AuxInt = int64ToAuxInt(d)
2700		v0.AddArg(x)
2701		v.AddArg(v0)
2702		return true
2703	}
2704	// match: (CMNshiftRA x (MOVDconst [c]) [d])
2705	// result: (CMNconst x [c>>uint64(d)])
2706	for {
2707		d := auxIntToInt64(v.AuxInt)
2708		x := v_0
2709		if v_1.Op != OpARM64MOVDconst {
2710			break
2711		}
2712		c := auxIntToInt64(v_1.AuxInt)
2713		v.reset(OpARM64CMNconst)
2714		v.AuxInt = int64ToAuxInt(c >> uint64(d))
2715		v.AddArg(x)
2716		return true
2717	}
2718	return false
2719}
2720func rewriteValueARM64_OpARM64CMNshiftRL(v *Value) bool {
2721	v_1 := v.Args[1]
2722	v_0 := v.Args[0]
2723	b := v.Block
2724	// match: (CMNshiftRL (MOVDconst [c]) x [d])
2725	// result: (CMNconst [c] (SRLconst <x.Type> x [d]))
2726	for {
2727		d := auxIntToInt64(v.AuxInt)
2728		if v_0.Op != OpARM64MOVDconst {
2729			break
2730		}
2731		c := auxIntToInt64(v_0.AuxInt)
2732		x := v_1
2733		v.reset(OpARM64CMNconst)
2734		v.AuxInt = int64ToAuxInt(c)
2735		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
2736		v0.AuxInt = int64ToAuxInt(d)
2737		v0.AddArg(x)
2738		v.AddArg(v0)
2739		return true
2740	}
2741	// match: (CMNshiftRL x (MOVDconst [c]) [d])
2742	// result: (CMNconst x [int64(uint64(c)>>uint64(d))])
2743	for {
2744		d := auxIntToInt64(v.AuxInt)
2745		x := v_0
2746		if v_1.Op != OpARM64MOVDconst {
2747			break
2748		}
2749		c := auxIntToInt64(v_1.AuxInt)
2750		v.reset(OpARM64CMNconst)
2751		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
2752		v.AddArg(x)
2753		return true
2754	}
2755	return false
2756}
2757func rewriteValueARM64_OpARM64CMP(v *Value) bool {
2758	v_1 := v.Args[1]
2759	v_0 := v.Args[0]
2760	b := v.Block
2761	// match: (CMP x (MOVDconst [c]))
2762	// result: (CMPconst [c] x)
2763	for {
2764		x := v_0
2765		if v_1.Op != OpARM64MOVDconst {
2766			break
2767		}
2768		c := auxIntToInt64(v_1.AuxInt)
2769		v.reset(OpARM64CMPconst)
2770		v.AuxInt = int64ToAuxInt(c)
2771		v.AddArg(x)
2772		return true
2773	}
2774	// match: (CMP (MOVDconst [c]) x)
2775	// result: (InvertFlags (CMPconst [c] x))
2776	for {
2777		if v_0.Op != OpARM64MOVDconst {
2778			break
2779		}
2780		c := auxIntToInt64(v_0.AuxInt)
2781		x := v_1
2782		v.reset(OpARM64InvertFlags)
2783		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
2784		v0.AuxInt = int64ToAuxInt(c)
2785		v0.AddArg(x)
2786		v.AddArg(v0)
2787		return true
2788	}
2789	// match: (CMP x y)
2790	// cond: canonLessThan(x,y)
2791	// result: (InvertFlags (CMP y x))
2792	for {
2793		x := v_0
2794		y := v_1
2795		if !(canonLessThan(x, y)) {
2796			break
2797		}
2798		v.reset(OpARM64InvertFlags)
2799		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
2800		v0.AddArg2(y, x)
2801		v.AddArg(v0)
2802		return true
2803	}
2804	// match: (CMP x0 x1:(SLLconst [c] y))
2805	// cond: clobberIfDead(x1)
2806	// result: (CMPshiftLL x0 y [c])
2807	for {
2808		x0 := v_0
2809		x1 := v_1
2810		if x1.Op != OpARM64SLLconst {
2811			break
2812		}
2813		c := auxIntToInt64(x1.AuxInt)
2814		y := x1.Args[0]
2815		if !(clobberIfDead(x1)) {
2816			break
2817		}
2818		v.reset(OpARM64CMPshiftLL)
2819		v.AuxInt = int64ToAuxInt(c)
2820		v.AddArg2(x0, y)
2821		return true
2822	}
2823	// match: (CMP x0:(SLLconst [c] y) x1)
2824	// cond: clobberIfDead(x0)
2825	// result: (InvertFlags (CMPshiftLL x1 y [c]))
2826	for {
2827		x0 := v_0
2828		if x0.Op != OpARM64SLLconst {
2829			break
2830		}
2831		c := auxIntToInt64(x0.AuxInt)
2832		y := x0.Args[0]
2833		x1 := v_1
2834		if !(clobberIfDead(x0)) {
2835			break
2836		}
2837		v.reset(OpARM64InvertFlags)
2838		v0 := b.NewValue0(v.Pos, OpARM64CMPshiftLL, types.TypeFlags)
2839		v0.AuxInt = int64ToAuxInt(c)
2840		v0.AddArg2(x1, y)
2841		v.AddArg(v0)
2842		return true
2843	}
2844	// match: (CMP x0 x1:(SRLconst [c] y))
2845	// cond: clobberIfDead(x1)
2846	// result: (CMPshiftRL x0 y [c])
2847	for {
2848		x0 := v_0
2849		x1 := v_1
2850		if x1.Op != OpARM64SRLconst {
2851			break
2852		}
2853		c := auxIntToInt64(x1.AuxInt)
2854		y := x1.Args[0]
2855		if !(clobberIfDead(x1)) {
2856			break
2857		}
2858		v.reset(OpARM64CMPshiftRL)
2859		v.AuxInt = int64ToAuxInt(c)
2860		v.AddArg2(x0, y)
2861		return true
2862	}
2863	// match: (CMP x0:(SRLconst [c] y) x1)
2864	// cond: clobberIfDead(x0)
2865	// result: (InvertFlags (CMPshiftRL x1 y [c]))
2866	for {
2867		x0 := v_0
2868		if x0.Op != OpARM64SRLconst {
2869			break
2870		}
2871		c := auxIntToInt64(x0.AuxInt)
2872		y := x0.Args[0]
2873		x1 := v_1
2874		if !(clobberIfDead(x0)) {
2875			break
2876		}
2877		v.reset(OpARM64InvertFlags)
2878		v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRL, types.TypeFlags)
2879		v0.AuxInt = int64ToAuxInt(c)
2880		v0.AddArg2(x1, y)
2881		v.AddArg(v0)
2882		return true
2883	}
2884	// match: (CMP x0 x1:(SRAconst [c] y))
2885	// cond: clobberIfDead(x1)
2886	// result: (CMPshiftRA x0 y [c])
2887	for {
2888		x0 := v_0
2889		x1 := v_1
2890		if x1.Op != OpARM64SRAconst {
2891			break
2892		}
2893		c := auxIntToInt64(x1.AuxInt)
2894		y := x1.Args[0]
2895		if !(clobberIfDead(x1)) {
2896			break
2897		}
2898		v.reset(OpARM64CMPshiftRA)
2899		v.AuxInt = int64ToAuxInt(c)
2900		v.AddArg2(x0, y)
2901		return true
2902	}
2903	// match: (CMP x0:(SRAconst [c] y) x1)
2904	// cond: clobberIfDead(x0)
2905	// result: (InvertFlags (CMPshiftRA x1 y [c]))
2906	for {
2907		x0 := v_0
2908		if x0.Op != OpARM64SRAconst {
2909			break
2910		}
2911		c := auxIntToInt64(x0.AuxInt)
2912		y := x0.Args[0]
2913		x1 := v_1
2914		if !(clobberIfDead(x0)) {
2915			break
2916		}
2917		v.reset(OpARM64InvertFlags)
2918		v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRA, types.TypeFlags)
2919		v0.AuxInt = int64ToAuxInt(c)
2920		v0.AddArg2(x1, y)
2921		v.AddArg(v0)
2922		return true
2923	}
2924	return false
2925}
2926func rewriteValueARM64_OpARM64CMPW(v *Value) bool {
2927	v_1 := v.Args[1]
2928	v_0 := v.Args[0]
2929	b := v.Block
2930	// match: (CMPW x (MOVDconst [c]))
2931	// result: (CMPWconst [int32(c)] x)
2932	for {
2933		x := v_0
2934		if v_1.Op != OpARM64MOVDconst {
2935			break
2936		}
2937		c := auxIntToInt64(v_1.AuxInt)
2938		v.reset(OpARM64CMPWconst)
2939		v.AuxInt = int32ToAuxInt(int32(c))
2940		v.AddArg(x)
2941		return true
2942	}
2943	// match: (CMPW (MOVDconst [c]) x)
2944	// result: (InvertFlags (CMPWconst [int32(c)] x))
2945	for {
2946		if v_0.Op != OpARM64MOVDconst {
2947			break
2948		}
2949		c := auxIntToInt64(v_0.AuxInt)
2950		x := v_1
2951		v.reset(OpARM64InvertFlags)
2952		v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags)
2953		v0.AuxInt = int32ToAuxInt(int32(c))
2954		v0.AddArg(x)
2955		v.AddArg(v0)
2956		return true
2957	}
2958	// match: (CMPW x y)
2959	// cond: canonLessThan(x,y)
2960	// result: (InvertFlags (CMPW y x))
2961	for {
2962		x := v_0
2963		y := v_1
2964		if !(canonLessThan(x, y)) {
2965			break
2966		}
2967		v.reset(OpARM64InvertFlags)
2968		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
2969		v0.AddArg2(y, x)
2970		v.AddArg(v0)
2971		return true
2972	}
2973	return false
2974}
2975func rewriteValueARM64_OpARM64CMPWconst(v *Value) bool {
2976	v_0 := v.Args[0]
2977	// match: (CMPWconst [c] y)
2978	// cond: c < 0 && c != -1<<31
2979	// result: (CMNWconst [-c] y)
2980	for {
2981		c := auxIntToInt32(v.AuxInt)
2982		y := v_0
2983		if !(c < 0 && c != -1<<31) {
2984			break
2985		}
2986		v.reset(OpARM64CMNWconst)
2987		v.AuxInt = int32ToAuxInt(-c)
2988		v.AddArg(y)
2989		return true
2990	}
2991	// match: (CMPWconst (MOVDconst [x]) [y])
2992	// result: (FlagConstant [subFlags32(int32(x),y)])
2993	for {
2994		y := auxIntToInt32(v.AuxInt)
2995		if v_0.Op != OpARM64MOVDconst {
2996			break
2997		}
2998		x := auxIntToInt64(v_0.AuxInt)
2999		v.reset(OpARM64FlagConstant)
3000		v.AuxInt = flagConstantToAuxInt(subFlags32(int32(x), y))
3001		return true
3002	}
3003	// match: (CMPWconst (MOVBUreg _) [c])
3004	// cond: 0xff < c
3005	// result: (FlagConstant [subFlags64(0,1)])
3006	for {
3007		c := auxIntToInt32(v.AuxInt)
3008		if v_0.Op != OpARM64MOVBUreg || !(0xff < c) {
3009			break
3010		}
3011		v.reset(OpARM64FlagConstant)
3012		v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
3013		return true
3014	}
3015	// match: (CMPWconst (MOVHUreg _) [c])
3016	// cond: 0xffff < c
3017	// result: (FlagConstant [subFlags64(0,1)])
3018	for {
3019		c := auxIntToInt32(v.AuxInt)
3020		if v_0.Op != OpARM64MOVHUreg || !(0xffff < c) {
3021			break
3022		}
3023		v.reset(OpARM64FlagConstant)
3024		v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
3025		return true
3026	}
3027	return false
3028}
3029func rewriteValueARM64_OpARM64CMPconst(v *Value) bool {
3030	v_0 := v.Args[0]
3031	// match: (CMPconst [c] y)
3032	// cond: c < 0 && c != -1<<63
3033	// result: (CMNconst [-c] y)
3034	for {
3035		c := auxIntToInt64(v.AuxInt)
3036		y := v_0
3037		if !(c < 0 && c != -1<<63) {
3038			break
3039		}
3040		v.reset(OpARM64CMNconst)
3041		v.AuxInt = int64ToAuxInt(-c)
3042		v.AddArg(y)
3043		return true
3044	}
3045	// match: (CMPconst (MOVDconst [x]) [y])
3046	// result: (FlagConstant [subFlags64(x,y)])
3047	for {
3048		y := auxIntToInt64(v.AuxInt)
3049		if v_0.Op != OpARM64MOVDconst {
3050			break
3051		}
3052		x := auxIntToInt64(v_0.AuxInt)
3053		v.reset(OpARM64FlagConstant)
3054		v.AuxInt = flagConstantToAuxInt(subFlags64(x, y))
3055		return true
3056	}
3057	// match: (CMPconst (MOVBUreg _) [c])
3058	// cond: 0xff < c
3059	// result: (FlagConstant [subFlags64(0,1)])
3060	for {
3061		c := auxIntToInt64(v.AuxInt)
3062		if v_0.Op != OpARM64MOVBUreg || !(0xff < c) {
3063			break
3064		}
3065		v.reset(OpARM64FlagConstant)
3066		v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
3067		return true
3068	}
3069	// match: (CMPconst (MOVHUreg _) [c])
3070	// cond: 0xffff < c
3071	// result: (FlagConstant [subFlags64(0,1)])
3072	for {
3073		c := auxIntToInt64(v.AuxInt)
3074		if v_0.Op != OpARM64MOVHUreg || !(0xffff < c) {
3075			break
3076		}
3077		v.reset(OpARM64FlagConstant)
3078		v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
3079		return true
3080	}
3081	// match: (CMPconst (MOVWUreg _) [c])
3082	// cond: 0xffffffff < c
3083	// result: (FlagConstant [subFlags64(0,1)])
3084	for {
3085		c := auxIntToInt64(v.AuxInt)
3086		if v_0.Op != OpARM64MOVWUreg || !(0xffffffff < c) {
3087			break
3088		}
3089		v.reset(OpARM64FlagConstant)
3090		v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
3091		return true
3092	}
3093	// match: (CMPconst (ANDconst _ [m]) [n])
3094	// cond: 0 <= m && m < n
3095	// result: (FlagConstant [subFlags64(0,1)])
3096	for {
3097		n := auxIntToInt64(v.AuxInt)
3098		if v_0.Op != OpARM64ANDconst {
3099			break
3100		}
3101		m := auxIntToInt64(v_0.AuxInt)
3102		if !(0 <= m && m < n) {
3103			break
3104		}
3105		v.reset(OpARM64FlagConstant)
3106		v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
3107		return true
3108	}
3109	// match: (CMPconst (SRLconst _ [c]) [n])
3110	// cond: 0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)
3111	// result: (FlagConstant [subFlags64(0,1)])
3112	for {
3113		n := auxIntToInt64(v.AuxInt)
3114		if v_0.Op != OpARM64SRLconst {
3115			break
3116		}
3117		c := auxIntToInt64(v_0.AuxInt)
3118		if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) {
3119			break
3120		}
3121		v.reset(OpARM64FlagConstant)
3122		v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
3123		return true
3124	}
3125	return false
3126}
3127func rewriteValueARM64_OpARM64CMPshiftLL(v *Value) bool {
3128	v_1 := v.Args[1]
3129	v_0 := v.Args[0]
3130	b := v.Block
3131	// match: (CMPshiftLL (MOVDconst [c]) x [d])
3132	// result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d])))
3133	for {
3134		d := auxIntToInt64(v.AuxInt)
3135		if v_0.Op != OpARM64MOVDconst {
3136			break
3137		}
3138		c := auxIntToInt64(v_0.AuxInt)
3139		x := v_1
3140		v.reset(OpARM64InvertFlags)
3141		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
3142		v0.AuxInt = int64ToAuxInt(c)
3143		v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
3144		v1.AuxInt = int64ToAuxInt(d)
3145		v1.AddArg(x)
3146		v0.AddArg(v1)
3147		v.AddArg(v0)
3148		return true
3149	}
3150	// match: (CMPshiftLL x (MOVDconst [c]) [d])
3151	// result: (CMPconst x [int64(uint64(c)<<uint64(d))])
3152	for {
3153		d := auxIntToInt64(v.AuxInt)
3154		x := v_0
3155		if v_1.Op != OpARM64MOVDconst {
3156			break
3157		}
3158		c := auxIntToInt64(v_1.AuxInt)
3159		v.reset(OpARM64CMPconst)
3160		v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
3161		v.AddArg(x)
3162		return true
3163	}
3164	return false
3165}
3166func rewriteValueARM64_OpARM64CMPshiftRA(v *Value) bool {
3167	v_1 := v.Args[1]
3168	v_0 := v.Args[0]
3169	b := v.Block
3170	// match: (CMPshiftRA (MOVDconst [c]) x [d])
3171	// result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d])))
3172	for {
3173		d := auxIntToInt64(v.AuxInt)
3174		if v_0.Op != OpARM64MOVDconst {
3175			break
3176		}
3177		c := auxIntToInt64(v_0.AuxInt)
3178		x := v_1
3179		v.reset(OpARM64InvertFlags)
3180		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
3181		v0.AuxInt = int64ToAuxInt(c)
3182		v1 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
3183		v1.AuxInt = int64ToAuxInt(d)
3184		v1.AddArg(x)
3185		v0.AddArg(v1)
3186		v.AddArg(v0)
3187		return true
3188	}
3189	// match: (CMPshiftRA x (MOVDconst [c]) [d])
3190	// result: (CMPconst x [c>>uint64(d)])
3191	for {
3192		d := auxIntToInt64(v.AuxInt)
3193		x := v_0
3194		if v_1.Op != OpARM64MOVDconst {
3195			break
3196		}
3197		c := auxIntToInt64(v_1.AuxInt)
3198		v.reset(OpARM64CMPconst)
3199		v.AuxInt = int64ToAuxInt(c >> uint64(d))
3200		v.AddArg(x)
3201		return true
3202	}
3203	return false
3204}
3205func rewriteValueARM64_OpARM64CMPshiftRL(v *Value) bool {
3206	v_1 := v.Args[1]
3207	v_0 := v.Args[0]
3208	b := v.Block
3209	// match: (CMPshiftRL (MOVDconst [c]) x [d])
3210	// result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d])))
3211	for {
3212		d := auxIntToInt64(v.AuxInt)
3213		if v_0.Op != OpARM64MOVDconst {
3214			break
3215		}
3216		c := auxIntToInt64(v_0.AuxInt)
3217		x := v_1
3218		v.reset(OpARM64InvertFlags)
3219		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
3220		v0.AuxInt = int64ToAuxInt(c)
3221		v1 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
3222		v1.AuxInt = int64ToAuxInt(d)
3223		v1.AddArg(x)
3224		v0.AddArg(v1)
3225		v.AddArg(v0)
3226		return true
3227	}
3228	// match: (CMPshiftRL x (MOVDconst [c]) [d])
3229	// result: (CMPconst x [int64(uint64(c)>>uint64(d))])
3230	for {
3231		d := auxIntToInt64(v.AuxInt)
3232		x := v_0
3233		if v_1.Op != OpARM64MOVDconst {
3234			break
3235		}
3236		c := auxIntToInt64(v_1.AuxInt)
3237		v.reset(OpARM64CMPconst)
3238		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
3239		v.AddArg(x)
3240		return true
3241	}
3242	return false
3243}
3244func rewriteValueARM64_OpARM64CSEL(v *Value) bool {
3245	v_2 := v.Args[2]
3246	v_1 := v.Args[1]
3247	v_0 := v.Args[0]
3248	// match: (CSEL [cc] (MOVDconst [-1]) (MOVDconst [0]) flag)
3249	// result: (CSETM [cc] flag)
3250	for {
3251		cc := auxIntToOp(v.AuxInt)
3252		if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != -1 || v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
3253			break
3254		}
3255		flag := v_2
3256		v.reset(OpARM64CSETM)
3257		v.AuxInt = opToAuxInt(cc)
3258		v.AddArg(flag)
3259		return true
3260	}
3261	// match: (CSEL [cc] (MOVDconst [0]) (MOVDconst [-1]) flag)
3262	// result: (CSETM [arm64Negate(cc)] flag)
3263	for {
3264		cc := auxIntToOp(v.AuxInt)
3265		if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 || v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 {
3266			break
3267		}
3268		flag := v_2
3269		v.reset(OpARM64CSETM)
3270		v.AuxInt = opToAuxInt(arm64Negate(cc))
3271		v.AddArg(flag)
3272		return true
3273	}
3274	// match: (CSEL [cc] x (MOVDconst [0]) flag)
3275	// result: (CSEL0 [cc] x flag)
3276	for {
3277		cc := auxIntToOp(v.AuxInt)
3278		x := v_0
3279		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
3280			break
3281		}
3282		flag := v_2
3283		v.reset(OpARM64CSEL0)
3284		v.AuxInt = opToAuxInt(cc)
3285		v.AddArg2(x, flag)
3286		return true
3287	}
3288	// match: (CSEL [cc] (MOVDconst [0]) y flag)
3289	// result: (CSEL0 [arm64Negate(cc)] y flag)
3290	for {
3291		cc := auxIntToOp(v.AuxInt)
3292		if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
3293			break
3294		}
3295		y := v_1
3296		flag := v_2
3297		v.reset(OpARM64CSEL0)
3298		v.AuxInt = opToAuxInt(arm64Negate(cc))
3299		v.AddArg2(y, flag)
3300		return true
3301	}
3302	// match: (CSEL [cc] x (ADDconst [1] a) flag)
3303	// result: (CSINC [cc] x a flag)
3304	for {
3305		cc := auxIntToOp(v.AuxInt)
3306		x := v_0
3307		if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 1 {
3308			break
3309		}
3310		a := v_1.Args[0]
3311		flag := v_2
3312		v.reset(OpARM64CSINC)
3313		v.AuxInt = opToAuxInt(cc)
3314		v.AddArg3(x, a, flag)
3315		return true
3316	}
3317	// match: (CSEL [cc] (ADDconst [1] a) x flag)
3318	// result: (CSINC [arm64Negate(cc)] x a flag)
3319	for {
3320		cc := auxIntToOp(v.AuxInt)
3321		if v_0.Op != OpARM64ADDconst || auxIntToInt64(v_0.AuxInt) != 1 {
3322			break
3323		}
3324		a := v_0.Args[0]
3325		x := v_1
3326		flag := v_2
3327		v.reset(OpARM64CSINC)
3328		v.AuxInt = opToAuxInt(arm64Negate(cc))
3329		v.AddArg3(x, a, flag)
3330		return true
3331	}
3332	// match: (CSEL [cc] x (MVN a) flag)
3333	// result: (CSINV [cc] x a flag)
3334	for {
3335		cc := auxIntToOp(v.AuxInt)
3336		x := v_0
3337		if v_1.Op != OpARM64MVN {
3338			break
3339		}
3340		a := v_1.Args[0]
3341		flag := v_2
3342		v.reset(OpARM64CSINV)
3343		v.AuxInt = opToAuxInt(cc)
3344		v.AddArg3(x, a, flag)
3345		return true
3346	}
3347	// match: (CSEL [cc] (MVN a) x flag)
3348	// result: (CSINV [arm64Negate(cc)] x a flag)
3349	for {
3350		cc := auxIntToOp(v.AuxInt)
3351		if v_0.Op != OpARM64MVN {
3352			break
3353		}
3354		a := v_0.Args[0]
3355		x := v_1
3356		flag := v_2
3357		v.reset(OpARM64CSINV)
3358		v.AuxInt = opToAuxInt(arm64Negate(cc))
3359		v.AddArg3(x, a, flag)
3360		return true
3361	}
3362	// match: (CSEL [cc] x (NEG a) flag)
3363	// result: (CSNEG [cc] x a flag)
3364	for {
3365		cc := auxIntToOp(v.AuxInt)
3366		x := v_0
3367		if v_1.Op != OpARM64NEG {
3368			break
3369		}
3370		a := v_1.Args[0]
3371		flag := v_2
3372		v.reset(OpARM64CSNEG)
3373		v.AuxInt = opToAuxInt(cc)
3374		v.AddArg3(x, a, flag)
3375		return true
3376	}
3377	// match: (CSEL [cc] (NEG a) x flag)
3378	// result: (CSNEG [arm64Negate(cc)] x a flag)
3379	for {
3380		cc := auxIntToOp(v.AuxInt)
3381		if v_0.Op != OpARM64NEG {
3382			break
3383		}
3384		a := v_0.Args[0]
3385		x := v_1
3386		flag := v_2
3387		v.reset(OpARM64CSNEG)
3388		v.AuxInt = opToAuxInt(arm64Negate(cc))
3389		v.AddArg3(x, a, flag)
3390		return true
3391	}
3392	// match: (CSEL [cc] x y (InvertFlags cmp))
3393	// result: (CSEL [arm64Invert(cc)] x y cmp)
3394	for {
3395		cc := auxIntToOp(v.AuxInt)
3396		x := v_0
3397		y := v_1
3398		if v_2.Op != OpARM64InvertFlags {
3399			break
3400		}
3401		cmp := v_2.Args[0]
3402		v.reset(OpARM64CSEL)
3403		v.AuxInt = opToAuxInt(arm64Invert(cc))
3404		v.AddArg3(x, y, cmp)
3405		return true
3406	}
3407	// match: (CSEL [cc] x _ flag)
3408	// cond: ccARM64Eval(cc, flag) > 0
3409	// result: x
3410	for {
3411		cc := auxIntToOp(v.AuxInt)
3412		x := v_0
3413		flag := v_2
3414		if !(ccARM64Eval(cc, flag) > 0) {
3415			break
3416		}
3417		v.copyOf(x)
3418		return true
3419	}
3420	// match: (CSEL [cc] _ y flag)
3421	// cond: ccARM64Eval(cc, flag) < 0
3422	// result: y
3423	for {
3424		cc := auxIntToOp(v.AuxInt)
3425		y := v_1
3426		flag := v_2
3427		if !(ccARM64Eval(cc, flag) < 0) {
3428			break
3429		}
3430		v.copyOf(y)
3431		return true
3432	}
3433	// match: (CSEL [cc] x y (CMPWconst [0] boolval))
3434	// cond: cc == OpARM64NotEqual && flagArg(boolval) != nil
3435	// result: (CSEL [boolval.Op] x y flagArg(boolval))
3436	for {
3437		cc := auxIntToOp(v.AuxInt)
3438		x := v_0
3439		y := v_1
3440		if v_2.Op != OpARM64CMPWconst || auxIntToInt32(v_2.AuxInt) != 0 {
3441			break
3442		}
3443		boolval := v_2.Args[0]
3444		if !(cc == OpARM64NotEqual && flagArg(boolval) != nil) {
3445			break
3446		}
3447		v.reset(OpARM64CSEL)
3448		v.AuxInt = opToAuxInt(boolval.Op)
3449		v.AddArg3(x, y, flagArg(boolval))
3450		return true
3451	}
3452	// match: (CSEL [cc] x y (CMPWconst [0] boolval))
3453	// cond: cc == OpARM64Equal && flagArg(boolval) != nil
3454	// result: (CSEL [arm64Negate(boolval.Op)] x y flagArg(boolval))
3455	for {
3456		cc := auxIntToOp(v.AuxInt)
3457		x := v_0
3458		y := v_1
3459		if v_2.Op != OpARM64CMPWconst || auxIntToInt32(v_2.AuxInt) != 0 {
3460			break
3461		}
3462		boolval := v_2.Args[0]
3463		if !(cc == OpARM64Equal && flagArg(boolval) != nil) {
3464			break
3465		}
3466		v.reset(OpARM64CSEL)
3467		v.AuxInt = opToAuxInt(arm64Negate(boolval.Op))
3468		v.AddArg3(x, y, flagArg(boolval))
3469		return true
3470	}
3471	return false
3472}
3473func rewriteValueARM64_OpARM64CSEL0(v *Value) bool {
3474	v_1 := v.Args[1]
3475	v_0 := v.Args[0]
3476	// match: (CSEL0 [cc] x (InvertFlags cmp))
3477	// result: (CSEL0 [arm64Invert(cc)] x cmp)
3478	for {
3479		cc := auxIntToOp(v.AuxInt)
3480		x := v_0
3481		if v_1.Op != OpARM64InvertFlags {
3482			break
3483		}
3484		cmp := v_1.Args[0]
3485		v.reset(OpARM64CSEL0)
3486		v.AuxInt = opToAuxInt(arm64Invert(cc))
3487		v.AddArg2(x, cmp)
3488		return true
3489	}
3490	// match: (CSEL0 [cc] x flag)
3491	// cond: ccARM64Eval(cc, flag) > 0
3492	// result: x
3493	for {
3494		cc := auxIntToOp(v.AuxInt)
3495		x := v_0
3496		flag := v_1
3497		if !(ccARM64Eval(cc, flag) > 0) {
3498			break
3499		}
3500		v.copyOf(x)
3501		return true
3502	}
3503	// match: (CSEL0 [cc] _ flag)
3504	// cond: ccARM64Eval(cc, flag) < 0
3505	// result: (MOVDconst [0])
3506	for {
3507		cc := auxIntToOp(v.AuxInt)
3508		flag := v_1
3509		if !(ccARM64Eval(cc, flag) < 0) {
3510			break
3511		}
3512		v.reset(OpARM64MOVDconst)
3513		v.AuxInt = int64ToAuxInt(0)
3514		return true
3515	}
3516	// match: (CSEL0 [cc] x (CMPWconst [0] boolval))
3517	// cond: cc == OpARM64NotEqual && flagArg(boolval) != nil
3518	// result: (CSEL0 [boolval.Op] x flagArg(boolval))
3519	for {
3520		cc := auxIntToOp(v.AuxInt)
3521		x := v_0
3522		if v_1.Op != OpARM64CMPWconst || auxIntToInt32(v_1.AuxInt) != 0 {
3523			break
3524		}
3525		boolval := v_1.Args[0]
3526		if !(cc == OpARM64NotEqual && flagArg(boolval) != nil) {
3527			break
3528		}
3529		v.reset(OpARM64CSEL0)
3530		v.AuxInt = opToAuxInt(boolval.Op)
3531		v.AddArg2(x, flagArg(boolval))
3532		return true
3533	}
3534	// match: (CSEL0 [cc] x (CMPWconst [0] boolval))
3535	// cond: cc == OpARM64Equal && flagArg(boolval) != nil
3536	// result: (CSEL0 [arm64Negate(boolval.Op)] x flagArg(boolval))
3537	for {
3538		cc := auxIntToOp(v.AuxInt)
3539		x := v_0
3540		if v_1.Op != OpARM64CMPWconst || auxIntToInt32(v_1.AuxInt) != 0 {
3541			break
3542		}
3543		boolval := v_1.Args[0]
3544		if !(cc == OpARM64Equal && flagArg(boolval) != nil) {
3545			break
3546		}
3547		v.reset(OpARM64CSEL0)
3548		v.AuxInt = opToAuxInt(arm64Negate(boolval.Op))
3549		v.AddArg2(x, flagArg(boolval))
3550		return true
3551	}
3552	return false
3553}
3554func rewriteValueARM64_OpARM64CSETM(v *Value) bool {
3555	v_0 := v.Args[0]
3556	// match: (CSETM [cc] (InvertFlags cmp))
3557	// result: (CSETM [arm64Invert(cc)] cmp)
3558	for {
3559		cc := auxIntToOp(v.AuxInt)
3560		if v_0.Op != OpARM64InvertFlags {
3561			break
3562		}
3563		cmp := v_0.Args[0]
3564		v.reset(OpARM64CSETM)
3565		v.AuxInt = opToAuxInt(arm64Invert(cc))
3566		v.AddArg(cmp)
3567		return true
3568	}
3569	// match: (CSETM [cc] flag)
3570	// cond: ccARM64Eval(cc, flag) > 0
3571	// result: (MOVDconst [-1])
3572	for {
3573		cc := auxIntToOp(v.AuxInt)
3574		flag := v_0
3575		if !(ccARM64Eval(cc, flag) > 0) {
3576			break
3577		}
3578		v.reset(OpARM64MOVDconst)
3579		v.AuxInt = int64ToAuxInt(-1)
3580		return true
3581	}
3582	// match: (CSETM [cc] flag)
3583	// cond: ccARM64Eval(cc, flag) < 0
3584	// result: (MOVDconst [0])
3585	for {
3586		cc := auxIntToOp(v.AuxInt)
3587		flag := v_0
3588		if !(ccARM64Eval(cc, flag) < 0) {
3589			break
3590		}
3591		v.reset(OpARM64MOVDconst)
3592		v.AuxInt = int64ToAuxInt(0)
3593		return true
3594	}
3595	return false
3596}
3597func rewriteValueARM64_OpARM64CSINC(v *Value) bool {
3598	v_2 := v.Args[2]
3599	v_1 := v.Args[1]
3600	v_0 := v.Args[0]
3601	// match: (CSINC [cc] x y (InvertFlags cmp))
3602	// result: (CSINC [arm64Invert(cc)] x y cmp)
3603	for {
3604		cc := auxIntToOp(v.AuxInt)
3605		x := v_0
3606		y := v_1
3607		if v_2.Op != OpARM64InvertFlags {
3608			break
3609		}
3610		cmp := v_2.Args[0]
3611		v.reset(OpARM64CSINC)
3612		v.AuxInt = opToAuxInt(arm64Invert(cc))
3613		v.AddArg3(x, y, cmp)
3614		return true
3615	}
3616	// match: (CSINC [cc] x _ flag)
3617	// cond: ccARM64Eval(cc, flag) > 0
3618	// result: x
3619	for {
3620		cc := auxIntToOp(v.AuxInt)
3621		x := v_0
3622		flag := v_2
3623		if !(ccARM64Eval(cc, flag) > 0) {
3624			break
3625		}
3626		v.copyOf(x)
3627		return true
3628	}
3629	// match: (CSINC [cc] _ y flag)
3630	// cond: ccARM64Eval(cc, flag) < 0
3631	// result: (ADDconst [1] y)
3632	for {
3633		cc := auxIntToOp(v.AuxInt)
3634		y := v_1
3635		flag := v_2
3636		if !(ccARM64Eval(cc, flag) < 0) {
3637			break
3638		}
3639		v.reset(OpARM64ADDconst)
3640		v.AuxInt = int64ToAuxInt(1)
3641		v.AddArg(y)
3642		return true
3643	}
3644	return false
3645}
3646func rewriteValueARM64_OpARM64CSINV(v *Value) bool {
3647	v_2 := v.Args[2]
3648	v_1 := v.Args[1]
3649	v_0 := v.Args[0]
3650	// match: (CSINV [cc] x y (InvertFlags cmp))
3651	// result: (CSINV [arm64Invert(cc)] x y cmp)
3652	for {
3653		cc := auxIntToOp(v.AuxInt)
3654		x := v_0
3655		y := v_1
3656		if v_2.Op != OpARM64InvertFlags {
3657			break
3658		}
3659		cmp := v_2.Args[0]
3660		v.reset(OpARM64CSINV)
3661		v.AuxInt = opToAuxInt(arm64Invert(cc))
3662		v.AddArg3(x, y, cmp)
3663		return true
3664	}
3665	// match: (CSINV [cc] x _ flag)
3666	// cond: ccARM64Eval(cc, flag) > 0
3667	// result: x
3668	for {
3669		cc := auxIntToOp(v.AuxInt)
3670		x := v_0
3671		flag := v_2
3672		if !(ccARM64Eval(cc, flag) > 0) {
3673			break
3674		}
3675		v.copyOf(x)
3676		return true
3677	}
3678	// match: (CSINV [cc] _ y flag)
3679	// cond: ccARM64Eval(cc, flag) < 0
3680	// result: (Not y)
3681	for {
3682		cc := auxIntToOp(v.AuxInt)
3683		y := v_1
3684		flag := v_2
3685		if !(ccARM64Eval(cc, flag) < 0) {
3686			break
3687		}
3688		v.reset(OpNot)
3689		v.AddArg(y)
3690		return true
3691	}
3692	return false
3693}
3694func rewriteValueARM64_OpARM64CSNEG(v *Value) bool {
3695	v_2 := v.Args[2]
3696	v_1 := v.Args[1]
3697	v_0 := v.Args[0]
3698	// match: (CSNEG [cc] x y (InvertFlags cmp))
3699	// result: (CSNEG [arm64Invert(cc)] x y cmp)
3700	for {
3701		cc := auxIntToOp(v.AuxInt)
3702		x := v_0
3703		y := v_1
3704		if v_2.Op != OpARM64InvertFlags {
3705			break
3706		}
3707		cmp := v_2.Args[0]
3708		v.reset(OpARM64CSNEG)
3709		v.AuxInt = opToAuxInt(arm64Invert(cc))
3710		v.AddArg3(x, y, cmp)
3711		return true
3712	}
3713	// match: (CSNEG [cc] x _ flag)
3714	// cond: ccARM64Eval(cc, flag) > 0
3715	// result: x
3716	for {
3717		cc := auxIntToOp(v.AuxInt)
3718		x := v_0
3719		flag := v_2
3720		if !(ccARM64Eval(cc, flag) > 0) {
3721			break
3722		}
3723		v.copyOf(x)
3724		return true
3725	}
3726	// match: (CSNEG [cc] _ y flag)
3727	// cond: ccARM64Eval(cc, flag) < 0
3728	// result: (NEG y)
3729	for {
3730		cc := auxIntToOp(v.AuxInt)
3731		y := v_1
3732		flag := v_2
3733		if !(ccARM64Eval(cc, flag) < 0) {
3734			break
3735		}
3736		v.reset(OpARM64NEG)
3737		v.AddArg(y)
3738		return true
3739	}
3740	return false
3741}
3742func rewriteValueARM64_OpARM64DIV(v *Value) bool {
3743	v_1 := v.Args[1]
3744	v_0 := v.Args[0]
3745	// match: (DIV (MOVDconst [c]) (MOVDconst [d]))
3746	// cond: d != 0
3747	// result: (MOVDconst [c/d])
3748	for {
3749		if v_0.Op != OpARM64MOVDconst {
3750			break
3751		}
3752		c := auxIntToInt64(v_0.AuxInt)
3753		if v_1.Op != OpARM64MOVDconst {
3754			break
3755		}
3756		d := auxIntToInt64(v_1.AuxInt)
3757		if !(d != 0) {
3758			break
3759		}
3760		v.reset(OpARM64MOVDconst)
3761		v.AuxInt = int64ToAuxInt(c / d)
3762		return true
3763	}
3764	return false
3765}
3766func rewriteValueARM64_OpARM64DIVW(v *Value) bool {
3767	v_1 := v.Args[1]
3768	v_0 := v.Args[0]
3769	// match: (DIVW (MOVDconst [c]) (MOVDconst [d]))
3770	// cond: d != 0
3771	// result: (MOVDconst [int64(uint32(int32(c)/int32(d)))])
3772	for {
3773		if v_0.Op != OpARM64MOVDconst {
3774			break
3775		}
3776		c := auxIntToInt64(v_0.AuxInt)
3777		if v_1.Op != OpARM64MOVDconst {
3778			break
3779		}
3780		d := auxIntToInt64(v_1.AuxInt)
3781		if !(d != 0) {
3782			break
3783		}
3784		v.reset(OpARM64MOVDconst)
3785		v.AuxInt = int64ToAuxInt(int64(uint32(int32(c) / int32(d))))
3786		return true
3787	}
3788	return false
3789}
3790func rewriteValueARM64_OpARM64EON(v *Value) bool {
3791	v_1 := v.Args[1]
3792	v_0 := v.Args[0]
3793	// match: (EON x (MOVDconst [c]))
3794	// result: (XORconst [^c] x)
3795	for {
3796		x := v_0
3797		if v_1.Op != OpARM64MOVDconst {
3798			break
3799		}
3800		c := auxIntToInt64(v_1.AuxInt)
3801		v.reset(OpARM64XORconst)
3802		v.AuxInt = int64ToAuxInt(^c)
3803		v.AddArg(x)
3804		return true
3805	}
3806	// match: (EON x x)
3807	// result: (MOVDconst [-1])
3808	for {
3809		x := v_0
3810		if x != v_1 {
3811			break
3812		}
3813		v.reset(OpARM64MOVDconst)
3814		v.AuxInt = int64ToAuxInt(-1)
3815		return true
3816	}
3817	// match: (EON x0 x1:(SLLconst [c] y))
3818	// cond: clobberIfDead(x1)
3819	// result: (EONshiftLL x0 y [c])
3820	for {
3821		x0 := v_0
3822		x1 := v_1
3823		if x1.Op != OpARM64SLLconst {
3824			break
3825		}
3826		c := auxIntToInt64(x1.AuxInt)
3827		y := x1.Args[0]
3828		if !(clobberIfDead(x1)) {
3829			break
3830		}
3831		v.reset(OpARM64EONshiftLL)
3832		v.AuxInt = int64ToAuxInt(c)
3833		v.AddArg2(x0, y)
3834		return true
3835	}
3836	// match: (EON x0 x1:(SRLconst [c] y))
3837	// cond: clobberIfDead(x1)
3838	// result: (EONshiftRL x0 y [c])
3839	for {
3840		x0 := v_0
3841		x1 := v_1
3842		if x1.Op != OpARM64SRLconst {
3843			break
3844		}
3845		c := auxIntToInt64(x1.AuxInt)
3846		y := x1.Args[0]
3847		if !(clobberIfDead(x1)) {
3848			break
3849		}
3850		v.reset(OpARM64EONshiftRL)
3851		v.AuxInt = int64ToAuxInt(c)
3852		v.AddArg2(x0, y)
3853		return true
3854	}
3855	// match: (EON x0 x1:(SRAconst [c] y))
3856	// cond: clobberIfDead(x1)
3857	// result: (EONshiftRA x0 y [c])
3858	for {
3859		x0 := v_0
3860		x1 := v_1
3861		if x1.Op != OpARM64SRAconst {
3862			break
3863		}
3864		c := auxIntToInt64(x1.AuxInt)
3865		y := x1.Args[0]
3866		if !(clobberIfDead(x1)) {
3867			break
3868		}
3869		v.reset(OpARM64EONshiftRA)
3870		v.AuxInt = int64ToAuxInt(c)
3871		v.AddArg2(x0, y)
3872		return true
3873	}
3874	// match: (EON x0 x1:(RORconst [c] y))
3875	// cond: clobberIfDead(x1)
3876	// result: (EONshiftRO x0 y [c])
3877	for {
3878		x0 := v_0
3879		x1 := v_1
3880		if x1.Op != OpARM64RORconst {
3881			break
3882		}
3883		c := auxIntToInt64(x1.AuxInt)
3884		y := x1.Args[0]
3885		if !(clobberIfDead(x1)) {
3886			break
3887		}
3888		v.reset(OpARM64EONshiftRO)
3889		v.AuxInt = int64ToAuxInt(c)
3890		v.AddArg2(x0, y)
3891		return true
3892	}
3893	return false
3894}
3895func rewriteValueARM64_OpARM64EONshiftLL(v *Value) bool {
3896	v_1 := v.Args[1]
3897	v_0 := v.Args[0]
3898	// match: (EONshiftLL x (MOVDconst [c]) [d])
3899	// result: (XORconst x [^int64(uint64(c)<<uint64(d))])
3900	for {
3901		d := auxIntToInt64(v.AuxInt)
3902		x := v_0
3903		if v_1.Op != OpARM64MOVDconst {
3904			break
3905		}
3906		c := auxIntToInt64(v_1.AuxInt)
3907		v.reset(OpARM64XORconst)
3908		v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d)))
3909		v.AddArg(x)
3910		return true
3911	}
3912	// match: (EONshiftLL (SLLconst x [c]) x [c])
3913	// result: (MOVDconst [-1])
3914	for {
3915		c := auxIntToInt64(v.AuxInt)
3916		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
3917			break
3918		}
3919		x := v_0.Args[0]
3920		if x != v_1 {
3921			break
3922		}
3923		v.reset(OpARM64MOVDconst)
3924		v.AuxInt = int64ToAuxInt(-1)
3925		return true
3926	}
3927	return false
3928}
3929func rewriteValueARM64_OpARM64EONshiftRA(v *Value) bool {
3930	v_1 := v.Args[1]
3931	v_0 := v.Args[0]
3932	// match: (EONshiftRA x (MOVDconst [c]) [d])
3933	// result: (XORconst x [^(c>>uint64(d))])
3934	for {
3935		d := auxIntToInt64(v.AuxInt)
3936		x := v_0
3937		if v_1.Op != OpARM64MOVDconst {
3938			break
3939		}
3940		c := auxIntToInt64(v_1.AuxInt)
3941		v.reset(OpARM64XORconst)
3942		v.AuxInt = int64ToAuxInt(^(c >> uint64(d)))
3943		v.AddArg(x)
3944		return true
3945	}
3946	// match: (EONshiftRA (SRAconst x [c]) x [c])
3947	// result: (MOVDconst [-1])
3948	for {
3949		c := auxIntToInt64(v.AuxInt)
3950		if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c {
3951			break
3952		}
3953		x := v_0.Args[0]
3954		if x != v_1 {
3955			break
3956		}
3957		v.reset(OpARM64MOVDconst)
3958		v.AuxInt = int64ToAuxInt(-1)
3959		return true
3960	}
3961	return false
3962}
3963func rewriteValueARM64_OpARM64EONshiftRL(v *Value) bool {
3964	v_1 := v.Args[1]
3965	v_0 := v.Args[0]
3966	// match: (EONshiftRL x (MOVDconst [c]) [d])
3967	// result: (XORconst x [^int64(uint64(c)>>uint64(d))])
3968	for {
3969		d := auxIntToInt64(v.AuxInt)
3970		x := v_0
3971		if v_1.Op != OpARM64MOVDconst {
3972			break
3973		}
3974		c := auxIntToInt64(v_1.AuxInt)
3975		v.reset(OpARM64XORconst)
3976		v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d)))
3977		v.AddArg(x)
3978		return true
3979	}
3980	// match: (EONshiftRL (SRLconst x [c]) x [c])
3981	// result: (MOVDconst [-1])
3982	for {
3983		c := auxIntToInt64(v.AuxInt)
3984		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
3985			break
3986		}
3987		x := v_0.Args[0]
3988		if x != v_1 {
3989			break
3990		}
3991		v.reset(OpARM64MOVDconst)
3992		v.AuxInt = int64ToAuxInt(-1)
3993		return true
3994	}
3995	return false
3996}
3997func rewriteValueARM64_OpARM64EONshiftRO(v *Value) bool {
3998	v_1 := v.Args[1]
3999	v_0 := v.Args[0]
4000	// match: (EONshiftRO x (MOVDconst [c]) [d])
4001	// result: (XORconst x [^rotateRight64(c, d)])
4002	for {
4003		d := auxIntToInt64(v.AuxInt)
4004		x := v_0
4005		if v_1.Op != OpARM64MOVDconst {
4006			break
4007		}
4008		c := auxIntToInt64(v_1.AuxInt)
4009		v.reset(OpARM64XORconst)
4010		v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
4011		v.AddArg(x)
4012		return true
4013	}
4014	// match: (EONshiftRO (RORconst x [c]) x [c])
4015	// result: (MOVDconst [-1])
4016	for {
4017		c := auxIntToInt64(v.AuxInt)
4018		if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c {
4019			break
4020		}
4021		x := v_0.Args[0]
4022		if x != v_1 {
4023			break
4024		}
4025		v.reset(OpARM64MOVDconst)
4026		v.AuxInt = int64ToAuxInt(-1)
4027		return true
4028	}
4029	return false
4030}
4031func rewriteValueARM64_OpARM64Equal(v *Value) bool {
4032	v_0 := v.Args[0]
4033	b := v.Block
4034	// match: (Equal (CMPconst [0] z:(AND x y)))
4035	// cond: z.Uses == 1
4036	// result: (Equal (TST x y))
4037	for {
4038		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
4039			break
4040		}
4041		z := v_0.Args[0]
4042		if z.Op != OpARM64AND {
4043			break
4044		}
4045		y := z.Args[1]
4046		x := z.Args[0]
4047		if !(z.Uses == 1) {
4048			break
4049		}
4050		v.reset(OpARM64Equal)
4051		v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
4052		v0.AddArg2(x, y)
4053		v.AddArg(v0)
4054		return true
4055	}
4056	// match: (Equal (CMPWconst [0] x:(ANDconst [c] y)))
4057	// cond: x.Uses == 1
4058	// result: (Equal (TSTWconst [int32(c)] y))
4059	for {
4060		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
4061			break
4062		}
4063		x := v_0.Args[0]
4064		if x.Op != OpARM64ANDconst {
4065			break
4066		}
4067		c := auxIntToInt64(x.AuxInt)
4068		y := x.Args[0]
4069		if !(x.Uses == 1) {
4070			break
4071		}
4072		v.reset(OpARM64Equal)
4073		v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
4074		v0.AuxInt = int32ToAuxInt(int32(c))
4075		v0.AddArg(y)
4076		v.AddArg(v0)
4077		return true
4078	}
4079	// match: (Equal (CMPWconst [0] z:(AND x y)))
4080	// cond: z.Uses == 1
4081	// result: (Equal (TSTW x y))
4082	for {
4083		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
4084			break
4085		}
4086		z := v_0.Args[0]
4087		if z.Op != OpARM64AND {
4088			break
4089		}
4090		y := z.Args[1]
4091		x := z.Args[0]
4092		if !(z.Uses == 1) {
4093			break
4094		}
4095		v.reset(OpARM64Equal)
4096		v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
4097		v0.AddArg2(x, y)
4098		v.AddArg(v0)
4099		return true
4100	}
4101	// match: (Equal (CMPconst [0] x:(ANDconst [c] y)))
4102	// cond: x.Uses == 1
4103	// result: (Equal (TSTconst [c] y))
4104	for {
4105		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
4106			break
4107		}
4108		x := v_0.Args[0]
4109		if x.Op != OpARM64ANDconst {
4110			break
4111		}
4112		c := auxIntToInt64(x.AuxInt)
4113		y := x.Args[0]
4114		if !(x.Uses == 1) {
4115			break
4116		}
4117		v.reset(OpARM64Equal)
4118		v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
4119		v0.AuxInt = int64ToAuxInt(c)
4120		v0.AddArg(y)
4121		v.AddArg(v0)
4122		return true
4123	}
4124	// match: (Equal (CMP x z:(NEG y)))
4125	// cond: z.Uses == 1
4126	// result: (Equal (CMN x y))
4127	for {
4128		if v_0.Op != OpARM64CMP {
4129			break
4130		}
4131		_ = v_0.Args[1]
4132		x := v_0.Args[0]
4133		z := v_0.Args[1]
4134		if z.Op != OpARM64NEG {
4135			break
4136		}
4137		y := z.Args[0]
4138		if !(z.Uses == 1) {
4139			break
4140		}
4141		v.reset(OpARM64Equal)
4142		v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
4143		v0.AddArg2(x, y)
4144		v.AddArg(v0)
4145		return true
4146	}
4147	// match: (Equal (CMPW x z:(NEG y)))
4148	// cond: z.Uses == 1
4149	// result: (Equal (CMNW x y))
4150	for {
4151		if v_0.Op != OpARM64CMPW {
4152			break
4153		}
4154		_ = v_0.Args[1]
4155		x := v_0.Args[0]
4156		z := v_0.Args[1]
4157		if z.Op != OpARM64NEG {
4158			break
4159		}
4160		y := z.Args[0]
4161		if !(z.Uses == 1) {
4162			break
4163		}
4164		v.reset(OpARM64Equal)
4165		v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
4166		v0.AddArg2(x, y)
4167		v.AddArg(v0)
4168		return true
4169	}
4170	// match: (Equal (CMPconst [0] x:(ADDconst [c] y)))
4171	// cond: x.Uses == 1
4172	// result: (Equal (CMNconst [c] y))
4173	for {
4174		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
4175			break
4176		}
4177		x := v_0.Args[0]
4178		if x.Op != OpARM64ADDconst {
4179			break
4180		}
4181		c := auxIntToInt64(x.AuxInt)
4182		y := x.Args[0]
4183		if !(x.Uses == 1) {
4184			break
4185		}
4186		v.reset(OpARM64Equal)
4187		v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags)
4188		v0.AuxInt = int64ToAuxInt(c)
4189		v0.AddArg(y)
4190		v.AddArg(v0)
4191		return true
4192	}
4193	// match: (Equal (CMPWconst [0] x:(ADDconst [c] y)))
4194	// cond: x.Uses == 1
4195	// result: (Equal (CMNWconst [int32(c)] y))
4196	for {
4197		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
4198			break
4199		}
4200		x := v_0.Args[0]
4201		if x.Op != OpARM64ADDconst {
4202			break
4203		}
4204		c := auxIntToInt64(x.AuxInt)
4205		y := x.Args[0]
4206		if !(x.Uses == 1) {
4207			break
4208		}
4209		v.reset(OpARM64Equal)
4210		v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags)
4211		v0.AuxInt = int32ToAuxInt(int32(c))
4212		v0.AddArg(y)
4213		v.AddArg(v0)
4214		return true
4215	}
4216	// match: (Equal (CMPconst [0] z:(ADD x y)))
4217	// cond: z.Uses == 1
4218	// result: (Equal (CMN x y))
4219	for {
4220		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
4221			break
4222		}
4223		z := v_0.Args[0]
4224		if z.Op != OpARM64ADD {
4225			break
4226		}
4227		y := z.Args[1]
4228		x := z.Args[0]
4229		if !(z.Uses == 1) {
4230			break
4231		}
4232		v.reset(OpARM64Equal)
4233		v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
4234		v0.AddArg2(x, y)
4235		v.AddArg(v0)
4236		return true
4237	}
4238	// match: (Equal (CMPWconst [0] z:(ADD x y)))
4239	// cond: z.Uses == 1
4240	// result: (Equal (CMNW x y))
4241	for {
4242		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
4243			break
4244		}
4245		z := v_0.Args[0]
4246		if z.Op != OpARM64ADD {
4247			break
4248		}
4249		y := z.Args[1]
4250		x := z.Args[0]
4251		if !(z.Uses == 1) {
4252			break
4253		}
4254		v.reset(OpARM64Equal)
4255		v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
4256		v0.AddArg2(x, y)
4257		v.AddArg(v0)
4258		return true
4259	}
4260	// match: (Equal (CMPconst [0] z:(MADD a x y)))
4261	// cond: z.Uses == 1
4262	// result: (Equal (CMN a (MUL <x.Type> x y)))
4263	for {
4264		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
4265			break
4266		}
4267		z := v_0.Args[0]
4268		if z.Op != OpARM64MADD {
4269			break
4270		}
4271		y := z.Args[2]
4272		a := z.Args[0]
4273		x := z.Args[1]
4274		if !(z.Uses == 1) {
4275			break
4276		}
4277		v.reset(OpARM64Equal)
4278		v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
4279		v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
4280		v1.AddArg2(x, y)
4281		v0.AddArg2(a, v1)
4282		v.AddArg(v0)
4283		return true
4284	}
4285	// match: (Equal (CMPconst [0] z:(MSUB a x y)))
4286	// cond: z.Uses == 1
4287	// result: (Equal (CMP a (MUL <x.Type> x y)))
4288	for {
4289		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
4290			break
4291		}
4292		z := v_0.Args[0]
4293		if z.Op != OpARM64MSUB {
4294			break
4295		}
4296		y := z.Args[2]
4297		a := z.Args[0]
4298		x := z.Args[1]
4299		if !(z.Uses == 1) {
4300			break
4301		}
4302		v.reset(OpARM64Equal)
4303		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
4304		v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
4305		v1.AddArg2(x, y)
4306		v0.AddArg2(a, v1)
4307		v.AddArg(v0)
4308		return true
4309	}
4310	// match: (Equal (CMPWconst [0] z:(MADDW a x y)))
4311	// cond: z.Uses == 1
4312	// result: (Equal (CMNW a (MULW <x.Type> x y)))
4313	for {
4314		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
4315			break
4316		}
4317		z := v_0.Args[0]
4318		if z.Op != OpARM64MADDW {
4319			break
4320		}
4321		y := z.Args[2]
4322		a := z.Args[0]
4323		x := z.Args[1]
4324		if !(z.Uses == 1) {
4325			break
4326		}
4327		v.reset(OpARM64Equal)
4328		v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
4329		v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
4330		v1.AddArg2(x, y)
4331		v0.AddArg2(a, v1)
4332		v.AddArg(v0)
4333		return true
4334	}
4335	// match: (Equal (CMPWconst [0] z:(MSUBW a x y)))
4336	// cond: z.Uses == 1
4337	// result: (Equal (CMPW a (MULW <x.Type> x y)))
4338	for {
4339		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
4340			break
4341		}
4342		z := v_0.Args[0]
4343		if z.Op != OpARM64MSUBW {
4344			break
4345		}
4346		y := z.Args[2]
4347		a := z.Args[0]
4348		x := z.Args[1]
4349		if !(z.Uses == 1) {
4350			break
4351		}
4352		v.reset(OpARM64Equal)
4353		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
4354		v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
4355		v1.AddArg2(x, y)
4356		v0.AddArg2(a, v1)
4357		v.AddArg(v0)
4358		return true
4359	}
4360	// match: (Equal (FlagConstant [fc]))
4361	// result: (MOVDconst [b2i(fc.eq())])
4362	for {
4363		if v_0.Op != OpARM64FlagConstant {
4364			break
4365		}
4366		fc := auxIntToFlagConstant(v_0.AuxInt)
4367		v.reset(OpARM64MOVDconst)
4368		v.AuxInt = int64ToAuxInt(b2i(fc.eq()))
4369		return true
4370	}
4371	// match: (Equal (InvertFlags x))
4372	// result: (Equal x)
4373	for {
4374		if v_0.Op != OpARM64InvertFlags {
4375			break
4376		}
4377		x := v_0.Args[0]
4378		v.reset(OpARM64Equal)
4379		v.AddArg(x)
4380		return true
4381	}
4382	return false
4383}
4384func rewriteValueARM64_OpARM64FADDD(v *Value) bool {
4385	v_1 := v.Args[1]
4386	v_0 := v.Args[0]
4387	// match: (FADDD a (FMULD x y))
4388	// cond: a.Block.Func.useFMA(v)
4389	// result: (FMADDD a x y)
4390	for {
4391		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4392			a := v_0
4393			if v_1.Op != OpARM64FMULD {
4394				continue
4395			}
4396			y := v_1.Args[1]
4397			x := v_1.Args[0]
4398			if !(a.Block.Func.useFMA(v)) {
4399				continue
4400			}
4401			v.reset(OpARM64FMADDD)
4402			v.AddArg3(a, x, y)
4403			return true
4404		}
4405		break
4406	}
4407	// match: (FADDD a (FNMULD x y))
4408	// cond: a.Block.Func.useFMA(v)
4409	// result: (FMSUBD a x y)
4410	for {
4411		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4412			a := v_0
4413			if v_1.Op != OpARM64FNMULD {
4414				continue
4415			}
4416			y := v_1.Args[1]
4417			x := v_1.Args[0]
4418			if !(a.Block.Func.useFMA(v)) {
4419				continue
4420			}
4421			v.reset(OpARM64FMSUBD)
4422			v.AddArg3(a, x, y)
4423			return true
4424		}
4425		break
4426	}
4427	return false
4428}
4429func rewriteValueARM64_OpARM64FADDS(v *Value) bool {
4430	v_1 := v.Args[1]
4431	v_0 := v.Args[0]
4432	// match: (FADDS a (FMULS x y))
4433	// cond: a.Block.Func.useFMA(v)
4434	// result: (FMADDS a x y)
4435	for {
4436		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4437			a := v_0
4438			if v_1.Op != OpARM64FMULS {
4439				continue
4440			}
4441			y := v_1.Args[1]
4442			x := v_1.Args[0]
4443			if !(a.Block.Func.useFMA(v)) {
4444				continue
4445			}
4446			v.reset(OpARM64FMADDS)
4447			v.AddArg3(a, x, y)
4448			return true
4449		}
4450		break
4451	}
4452	// match: (FADDS a (FNMULS x y))
4453	// cond: a.Block.Func.useFMA(v)
4454	// result: (FMSUBS a x y)
4455	for {
4456		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4457			a := v_0
4458			if v_1.Op != OpARM64FNMULS {
4459				continue
4460			}
4461			y := v_1.Args[1]
4462			x := v_1.Args[0]
4463			if !(a.Block.Func.useFMA(v)) {
4464				continue
4465			}
4466			v.reset(OpARM64FMSUBS)
4467			v.AddArg3(a, x, y)
4468			return true
4469		}
4470		break
4471	}
4472	return false
4473}
4474func rewriteValueARM64_OpARM64FCMPD(v *Value) bool {
4475	v_1 := v.Args[1]
4476	v_0 := v.Args[0]
4477	b := v.Block
4478	// match: (FCMPD x (FMOVDconst [0]))
4479	// result: (FCMPD0 x)
4480	for {
4481		x := v_0
4482		if v_1.Op != OpARM64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != 0 {
4483			break
4484		}
4485		v.reset(OpARM64FCMPD0)
4486		v.AddArg(x)
4487		return true
4488	}
4489	// match: (FCMPD (FMOVDconst [0]) x)
4490	// result: (InvertFlags (FCMPD0 x))
4491	for {
4492		if v_0.Op != OpARM64FMOVDconst || auxIntToFloat64(v_0.AuxInt) != 0 {
4493			break
4494		}
4495		x := v_1
4496		v.reset(OpARM64InvertFlags)
4497		v0 := b.NewValue0(v.Pos, OpARM64FCMPD0, types.TypeFlags)
4498		v0.AddArg(x)
4499		v.AddArg(v0)
4500		return true
4501	}
4502	return false
4503}
4504func rewriteValueARM64_OpARM64FCMPS(v *Value) bool {
4505	v_1 := v.Args[1]
4506	v_0 := v.Args[0]
4507	b := v.Block
4508	// match: (FCMPS x (FMOVSconst [0]))
4509	// result: (FCMPS0 x)
4510	for {
4511		x := v_0
4512		if v_1.Op != OpARM64FMOVSconst || auxIntToFloat64(v_1.AuxInt) != 0 {
4513			break
4514		}
4515		v.reset(OpARM64FCMPS0)
4516		v.AddArg(x)
4517		return true
4518	}
4519	// match: (FCMPS (FMOVSconst [0]) x)
4520	// result: (InvertFlags (FCMPS0 x))
4521	for {
4522		if v_0.Op != OpARM64FMOVSconst || auxIntToFloat64(v_0.AuxInt) != 0 {
4523			break
4524		}
4525		x := v_1
4526		v.reset(OpARM64InvertFlags)
4527		v0 := b.NewValue0(v.Pos, OpARM64FCMPS0, types.TypeFlags)
4528		v0.AddArg(x)
4529		v.AddArg(v0)
4530		return true
4531	}
4532	return false
4533}
4534func rewriteValueARM64_OpARM64FMOVDfpgp(v *Value) bool {
4535	v_0 := v.Args[0]
4536	b := v.Block
4537	// match: (FMOVDfpgp <t> (Arg [off] {sym}))
4538	// result: @b.Func.Entry (Arg <t> [off] {sym})
4539	for {
4540		t := v.Type
4541		if v_0.Op != OpArg {
4542			break
4543		}
4544		off := auxIntToInt32(v_0.AuxInt)
4545		sym := auxToSym(v_0.Aux)
4546		b = b.Func.Entry
4547		v0 := b.NewValue0(v.Pos, OpArg, t)
4548		v.copyOf(v0)
4549		v0.AuxInt = int32ToAuxInt(off)
4550		v0.Aux = symToAux(sym)
4551		return true
4552	}
4553	return false
4554}
4555func rewriteValueARM64_OpARM64FMOVDgpfp(v *Value) bool {
4556	v_0 := v.Args[0]
4557	b := v.Block
4558	// match: (FMOVDgpfp <t> (Arg [off] {sym}))
4559	// result: @b.Func.Entry (Arg <t> [off] {sym})
4560	for {
4561		t := v.Type
4562		if v_0.Op != OpArg {
4563			break
4564		}
4565		off := auxIntToInt32(v_0.AuxInt)
4566		sym := auxToSym(v_0.Aux)
4567		b = b.Func.Entry
4568		v0 := b.NewValue0(v.Pos, OpArg, t)
4569		v.copyOf(v0)
4570		v0.AuxInt = int32ToAuxInt(off)
4571		v0.Aux = symToAux(sym)
4572		return true
4573	}
4574	return false
4575}
4576func rewriteValueARM64_OpARM64FMOVDload(v *Value) bool {
4577	v_1 := v.Args[1]
4578	v_0 := v.Args[0]
4579	b := v.Block
4580	config := b.Func.Config
4581	// match: (FMOVDload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _))
4582	// result: (FMOVDgpfp val)
4583	for {
4584		off := auxIntToInt32(v.AuxInt)
4585		sym := auxToSym(v.Aux)
4586		ptr := v_0
4587		if v_1.Op != OpARM64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
4588			break
4589		}
4590		val := v_1.Args[1]
4591		if ptr != v_1.Args[0] {
4592			break
4593		}
4594		v.reset(OpARM64FMOVDgpfp)
4595		v.AddArg(val)
4596		return true
4597	}
4598	// match: (FMOVDload [off1] {sym} (ADDconst [off2] ptr) mem)
4599	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
4600	// result: (FMOVDload [off1+int32(off2)] {sym} ptr mem)
4601	for {
4602		off1 := auxIntToInt32(v.AuxInt)
4603		sym := auxToSym(v.Aux)
4604		if v_0.Op != OpARM64ADDconst {
4605			break
4606		}
4607		off2 := auxIntToInt64(v_0.AuxInt)
4608		ptr := v_0.Args[0]
4609		mem := v_1
4610		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
4611			break
4612		}
4613		v.reset(OpARM64FMOVDload)
4614		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4615		v.Aux = symToAux(sym)
4616		v.AddArg2(ptr, mem)
4617		return true
4618	}
4619	// match: (FMOVDload [off] {sym} (ADD ptr idx) mem)
4620	// cond: off == 0 && sym == nil
4621	// result: (FMOVDloadidx ptr idx mem)
4622	for {
4623		off := auxIntToInt32(v.AuxInt)
4624		sym := auxToSym(v.Aux)
4625		if v_0.Op != OpARM64ADD {
4626			break
4627		}
4628		idx := v_0.Args[1]
4629		ptr := v_0.Args[0]
4630		mem := v_1
4631		if !(off == 0 && sym == nil) {
4632			break
4633		}
4634		v.reset(OpARM64FMOVDloadidx)
4635		v.AddArg3(ptr, idx, mem)
4636		return true
4637	}
4638	// match: (FMOVDload [off] {sym} (ADDshiftLL [3] ptr idx) mem)
4639	// cond: off == 0 && sym == nil
4640	// result: (FMOVDloadidx8 ptr idx mem)
4641	for {
4642		off := auxIntToInt32(v.AuxInt)
4643		sym := auxToSym(v.Aux)
4644		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 {
4645			break
4646		}
4647		idx := v_0.Args[1]
4648		ptr := v_0.Args[0]
4649		mem := v_1
4650		if !(off == 0 && sym == nil) {
4651			break
4652		}
4653		v.reset(OpARM64FMOVDloadidx8)
4654		v.AddArg3(ptr, idx, mem)
4655		return true
4656	}
4657	// match: (FMOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
4658	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
4659	// result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
4660	for {
4661		off1 := auxIntToInt32(v.AuxInt)
4662		sym1 := auxToSym(v.Aux)
4663		if v_0.Op != OpARM64MOVDaddr {
4664			break
4665		}
4666		off2 := auxIntToInt32(v_0.AuxInt)
4667		sym2 := auxToSym(v_0.Aux)
4668		ptr := v_0.Args[0]
4669		mem := v_1
4670		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
4671			break
4672		}
4673		v.reset(OpARM64FMOVDload)
4674		v.AuxInt = int32ToAuxInt(off1 + off2)
4675		v.Aux = symToAux(mergeSym(sym1, sym2))
4676		v.AddArg2(ptr, mem)
4677		return true
4678	}
4679	return false
4680}
4681func rewriteValueARM64_OpARM64FMOVDloadidx(v *Value) bool {
4682	v_2 := v.Args[2]
4683	v_1 := v.Args[1]
4684	v_0 := v.Args[0]
4685	// match: (FMOVDloadidx ptr (MOVDconst [c]) mem)
4686	// cond: is32Bit(c)
4687	// result: (FMOVDload [int32(c)] ptr mem)
4688	for {
4689		ptr := v_0
4690		if v_1.Op != OpARM64MOVDconst {
4691			break
4692		}
4693		c := auxIntToInt64(v_1.AuxInt)
4694		mem := v_2
4695		if !(is32Bit(c)) {
4696			break
4697		}
4698		v.reset(OpARM64FMOVDload)
4699		v.AuxInt = int32ToAuxInt(int32(c))
4700		v.AddArg2(ptr, mem)
4701		return true
4702	}
4703	// match: (FMOVDloadidx (MOVDconst [c]) ptr mem)
4704	// cond: is32Bit(c)
4705	// result: (FMOVDload [int32(c)] ptr mem)
4706	for {
4707		if v_0.Op != OpARM64MOVDconst {
4708			break
4709		}
4710		c := auxIntToInt64(v_0.AuxInt)
4711		ptr := v_1
4712		mem := v_2
4713		if !(is32Bit(c)) {
4714			break
4715		}
4716		v.reset(OpARM64FMOVDload)
4717		v.AuxInt = int32ToAuxInt(int32(c))
4718		v.AddArg2(ptr, mem)
4719		return true
4720	}
4721	// match: (FMOVDloadidx ptr (SLLconst [3] idx) mem)
4722	// result: (FMOVDloadidx8 ptr idx mem)
4723	for {
4724		ptr := v_0
4725		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 {
4726			break
4727		}
4728		idx := v_1.Args[0]
4729		mem := v_2
4730		v.reset(OpARM64FMOVDloadidx8)
4731		v.AddArg3(ptr, idx, mem)
4732		return true
4733	}
4734	// match: (FMOVDloadidx (SLLconst [3] idx) ptr mem)
4735	// result: (FMOVDloadidx8 ptr idx mem)
4736	for {
4737		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 {
4738			break
4739		}
4740		idx := v_0.Args[0]
4741		ptr := v_1
4742		mem := v_2
4743		v.reset(OpARM64FMOVDloadidx8)
4744		v.AddArg3(ptr, idx, mem)
4745		return true
4746	}
4747	return false
4748}
4749func rewriteValueARM64_OpARM64FMOVDloadidx8(v *Value) bool {
4750	v_2 := v.Args[2]
4751	v_1 := v.Args[1]
4752	v_0 := v.Args[0]
4753	// match: (FMOVDloadidx8 ptr (MOVDconst [c]) mem)
4754	// cond: is32Bit(c<<3)
4755	// result: (FMOVDload ptr [int32(c)<<3] mem)
4756	for {
4757		ptr := v_0
4758		if v_1.Op != OpARM64MOVDconst {
4759			break
4760		}
4761		c := auxIntToInt64(v_1.AuxInt)
4762		mem := v_2
4763		if !(is32Bit(c << 3)) {
4764			break
4765		}
4766		v.reset(OpARM64FMOVDload)
4767		v.AuxInt = int32ToAuxInt(int32(c) << 3)
4768		v.AddArg2(ptr, mem)
4769		return true
4770	}
4771	return false
4772}
4773func rewriteValueARM64_OpARM64FMOVDstore(v *Value) bool {
4774	v_2 := v.Args[2]
4775	v_1 := v.Args[1]
4776	v_0 := v.Args[0]
4777	b := v.Block
4778	config := b.Func.Config
4779	// match: (FMOVDstore [off] {sym} ptr (FMOVDgpfp val) mem)
4780	// result: (MOVDstore [off] {sym} ptr val mem)
4781	for {
4782		off := auxIntToInt32(v.AuxInt)
4783		sym := auxToSym(v.Aux)
4784		ptr := v_0
4785		if v_1.Op != OpARM64FMOVDgpfp {
4786			break
4787		}
4788		val := v_1.Args[0]
4789		mem := v_2
4790		v.reset(OpARM64MOVDstore)
4791		v.AuxInt = int32ToAuxInt(off)
4792		v.Aux = symToAux(sym)
4793		v.AddArg3(ptr, val, mem)
4794		return true
4795	}
4796	// match: (FMOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem)
4797	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
4798	// result: (FMOVDstore [off1+int32(off2)] {sym} ptr val mem)
4799	for {
4800		off1 := auxIntToInt32(v.AuxInt)
4801		sym := auxToSym(v.Aux)
4802		if v_0.Op != OpARM64ADDconst {
4803			break
4804		}
4805		off2 := auxIntToInt64(v_0.AuxInt)
4806		ptr := v_0.Args[0]
4807		val := v_1
4808		mem := v_2
4809		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
4810			break
4811		}
4812		v.reset(OpARM64FMOVDstore)
4813		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4814		v.Aux = symToAux(sym)
4815		v.AddArg3(ptr, val, mem)
4816		return true
4817	}
4818	// match: (FMOVDstore [off] {sym} (ADD ptr idx) val mem)
4819	// cond: off == 0 && sym == nil
4820	// result: (FMOVDstoreidx ptr idx val mem)
4821	for {
4822		off := auxIntToInt32(v.AuxInt)
4823		sym := auxToSym(v.Aux)
4824		if v_0.Op != OpARM64ADD {
4825			break
4826		}
4827		idx := v_0.Args[1]
4828		ptr := v_0.Args[0]
4829		val := v_1
4830		mem := v_2
4831		if !(off == 0 && sym == nil) {
4832			break
4833		}
4834		v.reset(OpARM64FMOVDstoreidx)
4835		v.AddArg4(ptr, idx, val, mem)
4836		return true
4837	}
4838	// match: (FMOVDstore [off] {sym} (ADDshiftLL [3] ptr idx) val mem)
4839	// cond: off == 0 && sym == nil
4840	// result: (FMOVDstoreidx8 ptr idx val mem)
4841	for {
4842		off := auxIntToInt32(v.AuxInt)
4843		sym := auxToSym(v.Aux)
4844		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 {
4845			break
4846		}
4847		idx := v_0.Args[1]
4848		ptr := v_0.Args[0]
4849		val := v_1
4850		mem := v_2
4851		if !(off == 0 && sym == nil) {
4852			break
4853		}
4854		v.reset(OpARM64FMOVDstoreidx8)
4855		v.AddArg4(ptr, idx, val, mem)
4856		return true
4857	}
4858	// match: (FMOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
4859	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
4860	// result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
4861	for {
4862		off1 := auxIntToInt32(v.AuxInt)
4863		sym1 := auxToSym(v.Aux)
4864		if v_0.Op != OpARM64MOVDaddr {
4865			break
4866		}
4867		off2 := auxIntToInt32(v_0.AuxInt)
4868		sym2 := auxToSym(v_0.Aux)
4869		ptr := v_0.Args[0]
4870		val := v_1
4871		mem := v_2
4872		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
4873			break
4874		}
4875		v.reset(OpARM64FMOVDstore)
4876		v.AuxInt = int32ToAuxInt(off1 + off2)
4877		v.Aux = symToAux(mergeSym(sym1, sym2))
4878		v.AddArg3(ptr, val, mem)
4879		return true
4880	}
4881	return false
4882}
4883func rewriteValueARM64_OpARM64FMOVDstoreidx(v *Value) bool {
4884	v_3 := v.Args[3]
4885	v_2 := v.Args[2]
4886	v_1 := v.Args[1]
4887	v_0 := v.Args[0]
4888	// match: (FMOVDstoreidx ptr (MOVDconst [c]) val mem)
4889	// cond: is32Bit(c)
4890	// result: (FMOVDstore [int32(c)] ptr val mem)
4891	for {
4892		ptr := v_0
4893		if v_1.Op != OpARM64MOVDconst {
4894			break
4895		}
4896		c := auxIntToInt64(v_1.AuxInt)
4897		val := v_2
4898		mem := v_3
4899		if !(is32Bit(c)) {
4900			break
4901		}
4902		v.reset(OpARM64FMOVDstore)
4903		v.AuxInt = int32ToAuxInt(int32(c))
4904		v.AddArg3(ptr, val, mem)
4905		return true
4906	}
4907	// match: (FMOVDstoreidx (MOVDconst [c]) idx val mem)
4908	// cond: is32Bit(c)
4909	// result: (FMOVDstore [int32(c)] idx val mem)
4910	for {
4911		if v_0.Op != OpARM64MOVDconst {
4912			break
4913		}
4914		c := auxIntToInt64(v_0.AuxInt)
4915		idx := v_1
4916		val := v_2
4917		mem := v_3
4918		if !(is32Bit(c)) {
4919			break
4920		}
4921		v.reset(OpARM64FMOVDstore)
4922		v.AuxInt = int32ToAuxInt(int32(c))
4923		v.AddArg3(idx, val, mem)
4924		return true
4925	}
4926	// match: (FMOVDstoreidx ptr (SLLconst [3] idx) val mem)
4927	// result: (FMOVDstoreidx8 ptr idx val mem)
4928	for {
4929		ptr := v_0
4930		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 {
4931			break
4932		}
4933		idx := v_1.Args[0]
4934		val := v_2
4935		mem := v_3
4936		v.reset(OpARM64FMOVDstoreidx8)
4937		v.AddArg4(ptr, idx, val, mem)
4938		return true
4939	}
4940	// match: (FMOVDstoreidx (SLLconst [3] idx) ptr val mem)
4941	// result: (FMOVDstoreidx8 ptr idx val mem)
4942	for {
4943		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 {
4944			break
4945		}
4946		idx := v_0.Args[0]
4947		ptr := v_1
4948		val := v_2
4949		mem := v_3
4950		v.reset(OpARM64FMOVDstoreidx8)
4951		v.AddArg4(ptr, idx, val, mem)
4952		return true
4953	}
4954	return false
4955}
4956func rewriteValueARM64_OpARM64FMOVDstoreidx8(v *Value) bool {
4957	v_3 := v.Args[3]
4958	v_2 := v.Args[2]
4959	v_1 := v.Args[1]
4960	v_0 := v.Args[0]
4961	// match: (FMOVDstoreidx8 ptr (MOVDconst [c]) val mem)
4962	// cond: is32Bit(c<<3)
4963	// result: (FMOVDstore [int32(c)<<3] ptr val mem)
4964	for {
4965		ptr := v_0
4966		if v_1.Op != OpARM64MOVDconst {
4967			break
4968		}
4969		c := auxIntToInt64(v_1.AuxInt)
4970		val := v_2
4971		mem := v_3
4972		if !(is32Bit(c << 3)) {
4973			break
4974		}
4975		v.reset(OpARM64FMOVDstore)
4976		v.AuxInt = int32ToAuxInt(int32(c) << 3)
4977		v.AddArg3(ptr, val, mem)
4978		return true
4979	}
4980	return false
4981}
4982func rewriteValueARM64_OpARM64FMOVSload(v *Value) bool {
4983	v_1 := v.Args[1]
4984	v_0 := v.Args[0]
4985	b := v.Block
4986	config := b.Func.Config
4987	// match: (FMOVSload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _))
4988	// result: (FMOVSgpfp val)
4989	for {
4990		off := auxIntToInt32(v.AuxInt)
4991		sym := auxToSym(v.Aux)
4992		ptr := v_0
4993		if v_1.Op != OpARM64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
4994			break
4995		}
4996		val := v_1.Args[1]
4997		if ptr != v_1.Args[0] {
4998			break
4999		}
5000		v.reset(OpARM64FMOVSgpfp)
5001		v.AddArg(val)
5002		return true
5003	}
5004	// match: (FMOVSload [off1] {sym} (ADDconst [off2] ptr) mem)
5005	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
5006	// result: (FMOVSload [off1+int32(off2)] {sym} ptr mem)
5007	for {
5008		off1 := auxIntToInt32(v.AuxInt)
5009		sym := auxToSym(v.Aux)
5010		if v_0.Op != OpARM64ADDconst {
5011			break
5012		}
5013		off2 := auxIntToInt64(v_0.AuxInt)
5014		ptr := v_0.Args[0]
5015		mem := v_1
5016		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
5017			break
5018		}
5019		v.reset(OpARM64FMOVSload)
5020		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
5021		v.Aux = symToAux(sym)
5022		v.AddArg2(ptr, mem)
5023		return true
5024	}
5025	// match: (FMOVSload [off] {sym} (ADD ptr idx) mem)
5026	// cond: off == 0 && sym == nil
5027	// result: (FMOVSloadidx ptr idx mem)
5028	for {
5029		off := auxIntToInt32(v.AuxInt)
5030		sym := auxToSym(v.Aux)
5031		if v_0.Op != OpARM64ADD {
5032			break
5033		}
5034		idx := v_0.Args[1]
5035		ptr := v_0.Args[0]
5036		mem := v_1
5037		if !(off == 0 && sym == nil) {
5038			break
5039		}
5040		v.reset(OpARM64FMOVSloadidx)
5041		v.AddArg3(ptr, idx, mem)
5042		return true
5043	}
5044	// match: (FMOVSload [off] {sym} (ADDshiftLL [2] ptr idx) mem)
5045	// cond: off == 0 && sym == nil
5046	// result: (FMOVSloadidx4 ptr idx mem)
5047	for {
5048		off := auxIntToInt32(v.AuxInt)
5049		sym := auxToSym(v.Aux)
5050		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
5051			break
5052		}
5053		idx := v_0.Args[1]
5054		ptr := v_0.Args[0]
5055		mem := v_1
5056		if !(off == 0 && sym == nil) {
5057			break
5058		}
5059		v.reset(OpARM64FMOVSloadidx4)
5060		v.AddArg3(ptr, idx, mem)
5061		return true
5062	}
5063	// match: (FMOVSload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
5064	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
5065	// result: (FMOVSload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
5066	for {
5067		off1 := auxIntToInt32(v.AuxInt)
5068		sym1 := auxToSym(v.Aux)
5069		if v_0.Op != OpARM64MOVDaddr {
5070			break
5071		}
5072		off2 := auxIntToInt32(v_0.AuxInt)
5073		sym2 := auxToSym(v_0.Aux)
5074		ptr := v_0.Args[0]
5075		mem := v_1
5076		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
5077			break
5078		}
5079		v.reset(OpARM64FMOVSload)
5080		v.AuxInt = int32ToAuxInt(off1 + off2)
5081		v.Aux = symToAux(mergeSym(sym1, sym2))
5082		v.AddArg2(ptr, mem)
5083		return true
5084	}
5085	return false
5086}
5087func rewriteValueARM64_OpARM64FMOVSloadidx(v *Value) bool {
5088	v_2 := v.Args[2]
5089	v_1 := v.Args[1]
5090	v_0 := v.Args[0]
5091	// match: (FMOVSloadidx ptr (MOVDconst [c]) mem)
5092	// cond: is32Bit(c)
5093	// result: (FMOVSload [int32(c)] ptr mem)
5094	for {
5095		ptr := v_0
5096		if v_1.Op != OpARM64MOVDconst {
5097			break
5098		}
5099		c := auxIntToInt64(v_1.AuxInt)
5100		mem := v_2
5101		if !(is32Bit(c)) {
5102			break
5103		}
5104		v.reset(OpARM64FMOVSload)
5105		v.AuxInt = int32ToAuxInt(int32(c))
5106		v.AddArg2(ptr, mem)
5107		return true
5108	}
5109	// match: (FMOVSloadidx (MOVDconst [c]) ptr mem)
5110	// cond: is32Bit(c)
5111	// result: (FMOVSload [int32(c)] ptr mem)
5112	for {
5113		if v_0.Op != OpARM64MOVDconst {
5114			break
5115		}
5116		c := auxIntToInt64(v_0.AuxInt)
5117		ptr := v_1
5118		mem := v_2
5119		if !(is32Bit(c)) {
5120			break
5121		}
5122		v.reset(OpARM64FMOVSload)
5123		v.AuxInt = int32ToAuxInt(int32(c))
5124		v.AddArg2(ptr, mem)
5125		return true
5126	}
5127	// match: (FMOVSloadidx ptr (SLLconst [2] idx) mem)
5128	// result: (FMOVSloadidx4 ptr idx mem)
5129	for {
5130		ptr := v_0
5131		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
5132			break
5133		}
5134		idx := v_1.Args[0]
5135		mem := v_2
5136		v.reset(OpARM64FMOVSloadidx4)
5137		v.AddArg3(ptr, idx, mem)
5138		return true
5139	}
5140	// match: (FMOVSloadidx (SLLconst [2] idx) ptr mem)
5141	// result: (FMOVSloadidx4 ptr idx mem)
5142	for {
5143		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
5144			break
5145		}
5146		idx := v_0.Args[0]
5147		ptr := v_1
5148		mem := v_2
5149		v.reset(OpARM64FMOVSloadidx4)
5150		v.AddArg3(ptr, idx, mem)
5151		return true
5152	}
5153	return false
5154}
5155func rewriteValueARM64_OpARM64FMOVSloadidx4(v *Value) bool {
5156	v_2 := v.Args[2]
5157	v_1 := v.Args[1]
5158	v_0 := v.Args[0]
5159	// match: (FMOVSloadidx4 ptr (MOVDconst [c]) mem)
5160	// cond: is32Bit(c<<2)
5161	// result: (FMOVSload ptr [int32(c)<<2] mem)
5162	for {
5163		ptr := v_0
5164		if v_1.Op != OpARM64MOVDconst {
5165			break
5166		}
5167		c := auxIntToInt64(v_1.AuxInt)
5168		mem := v_2
5169		if !(is32Bit(c << 2)) {
5170			break
5171		}
5172		v.reset(OpARM64FMOVSload)
5173		v.AuxInt = int32ToAuxInt(int32(c) << 2)
5174		v.AddArg2(ptr, mem)
5175		return true
5176	}
5177	return false
5178}
5179func rewriteValueARM64_OpARM64FMOVSstore(v *Value) bool {
5180	v_2 := v.Args[2]
5181	v_1 := v.Args[1]
5182	v_0 := v.Args[0]
5183	b := v.Block
5184	config := b.Func.Config
5185	// match: (FMOVSstore [off] {sym} ptr (FMOVSgpfp val) mem)
5186	// result: (MOVWstore [off] {sym} ptr val mem)
5187	for {
5188		off := auxIntToInt32(v.AuxInt)
5189		sym := auxToSym(v.Aux)
5190		ptr := v_0
5191		if v_1.Op != OpARM64FMOVSgpfp {
5192			break
5193		}
5194		val := v_1.Args[0]
5195		mem := v_2
5196		v.reset(OpARM64MOVWstore)
5197		v.AuxInt = int32ToAuxInt(off)
5198		v.Aux = symToAux(sym)
5199		v.AddArg3(ptr, val, mem)
5200		return true
5201	}
5202	// match: (FMOVSstore [off1] {sym} (ADDconst [off2] ptr) val mem)
5203	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
5204	// result: (FMOVSstore [off1+int32(off2)] {sym} ptr val mem)
5205	for {
5206		off1 := auxIntToInt32(v.AuxInt)
5207		sym := auxToSym(v.Aux)
5208		if v_0.Op != OpARM64ADDconst {
5209			break
5210		}
5211		off2 := auxIntToInt64(v_0.AuxInt)
5212		ptr := v_0.Args[0]
5213		val := v_1
5214		mem := v_2
5215		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
5216			break
5217		}
5218		v.reset(OpARM64FMOVSstore)
5219		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
5220		v.Aux = symToAux(sym)
5221		v.AddArg3(ptr, val, mem)
5222		return true
5223	}
5224	// match: (FMOVSstore [off] {sym} (ADD ptr idx) val mem)
5225	// cond: off == 0 && sym == nil
5226	// result: (FMOVSstoreidx ptr idx val mem)
5227	for {
5228		off := auxIntToInt32(v.AuxInt)
5229		sym := auxToSym(v.Aux)
5230		if v_0.Op != OpARM64ADD {
5231			break
5232		}
5233		idx := v_0.Args[1]
5234		ptr := v_0.Args[0]
5235		val := v_1
5236		mem := v_2
5237		if !(off == 0 && sym == nil) {
5238			break
5239		}
5240		v.reset(OpARM64FMOVSstoreidx)
5241		v.AddArg4(ptr, idx, val, mem)
5242		return true
5243	}
5244	// match: (FMOVSstore [off] {sym} (ADDshiftLL [2] ptr idx) val mem)
5245	// cond: off == 0 && sym == nil
5246	// result: (FMOVSstoreidx4 ptr idx val mem)
5247	for {
5248		off := auxIntToInt32(v.AuxInt)
5249		sym := auxToSym(v.Aux)
5250		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
5251			break
5252		}
5253		idx := v_0.Args[1]
5254		ptr := v_0.Args[0]
5255		val := v_1
5256		mem := v_2
5257		if !(off == 0 && sym == nil) {
5258			break
5259		}
5260		v.reset(OpARM64FMOVSstoreidx4)
5261		v.AddArg4(ptr, idx, val, mem)
5262		return true
5263	}
5264	// match: (FMOVSstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
5265	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
5266	// result: (FMOVSstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
5267	for {
5268		off1 := auxIntToInt32(v.AuxInt)
5269		sym1 := auxToSym(v.Aux)
5270		if v_0.Op != OpARM64MOVDaddr {
5271			break
5272		}
5273		off2 := auxIntToInt32(v_0.AuxInt)
5274		sym2 := auxToSym(v_0.Aux)
5275		ptr := v_0.Args[0]
5276		val := v_1
5277		mem := v_2
5278		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
5279			break
5280		}
5281		v.reset(OpARM64FMOVSstore)
5282		v.AuxInt = int32ToAuxInt(off1 + off2)
5283		v.Aux = symToAux(mergeSym(sym1, sym2))
5284		v.AddArg3(ptr, val, mem)
5285		return true
5286	}
5287	return false
5288}
5289func rewriteValueARM64_OpARM64FMOVSstoreidx(v *Value) bool {
5290	v_3 := v.Args[3]
5291	v_2 := v.Args[2]
5292	v_1 := v.Args[1]
5293	v_0 := v.Args[0]
5294	// match: (FMOVSstoreidx ptr (MOVDconst [c]) val mem)
5295	// cond: is32Bit(c)
5296	// result: (FMOVSstore [int32(c)] ptr val mem)
5297	for {
5298		ptr := v_0
5299		if v_1.Op != OpARM64MOVDconst {
5300			break
5301		}
5302		c := auxIntToInt64(v_1.AuxInt)
5303		val := v_2
5304		mem := v_3
5305		if !(is32Bit(c)) {
5306			break
5307		}
5308		v.reset(OpARM64FMOVSstore)
5309		v.AuxInt = int32ToAuxInt(int32(c))
5310		v.AddArg3(ptr, val, mem)
5311		return true
5312	}
5313	// match: (FMOVSstoreidx (MOVDconst [c]) idx val mem)
5314	// cond: is32Bit(c)
5315	// result: (FMOVSstore [int32(c)] idx val mem)
5316	for {
5317		if v_0.Op != OpARM64MOVDconst {
5318			break
5319		}
5320		c := auxIntToInt64(v_0.AuxInt)
5321		idx := v_1
5322		val := v_2
5323		mem := v_3
5324		if !(is32Bit(c)) {
5325			break
5326		}
5327		v.reset(OpARM64FMOVSstore)
5328		v.AuxInt = int32ToAuxInt(int32(c))
5329		v.AddArg3(idx, val, mem)
5330		return true
5331	}
5332	// match: (FMOVSstoreidx ptr (SLLconst [2] idx) val mem)
5333	// result: (FMOVSstoreidx4 ptr idx val mem)
5334	for {
5335		ptr := v_0
5336		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
5337			break
5338		}
5339		idx := v_1.Args[0]
5340		val := v_2
5341		mem := v_3
5342		v.reset(OpARM64FMOVSstoreidx4)
5343		v.AddArg4(ptr, idx, val, mem)
5344		return true
5345	}
5346	// match: (FMOVSstoreidx (SLLconst [2] idx) ptr val mem)
5347	// result: (FMOVSstoreidx4 ptr idx val mem)
5348	for {
5349		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
5350			break
5351		}
5352		idx := v_0.Args[0]
5353		ptr := v_1
5354		val := v_2
5355		mem := v_3
5356		v.reset(OpARM64FMOVSstoreidx4)
5357		v.AddArg4(ptr, idx, val, mem)
5358		return true
5359	}
5360	return false
5361}
5362func rewriteValueARM64_OpARM64FMOVSstoreidx4(v *Value) bool {
5363	v_3 := v.Args[3]
5364	v_2 := v.Args[2]
5365	v_1 := v.Args[1]
5366	v_0 := v.Args[0]
5367	// match: (FMOVSstoreidx4 ptr (MOVDconst [c]) val mem)
5368	// cond: is32Bit(c<<2)
5369	// result: (FMOVSstore [int32(c)<<2] ptr val mem)
5370	for {
5371		ptr := v_0
5372		if v_1.Op != OpARM64MOVDconst {
5373			break
5374		}
5375		c := auxIntToInt64(v_1.AuxInt)
5376		val := v_2
5377		mem := v_3
5378		if !(is32Bit(c << 2)) {
5379			break
5380		}
5381		v.reset(OpARM64FMOVSstore)
5382		v.AuxInt = int32ToAuxInt(int32(c) << 2)
5383		v.AddArg3(ptr, val, mem)
5384		return true
5385	}
5386	return false
5387}
5388func rewriteValueARM64_OpARM64FMULD(v *Value) bool {
5389	v_1 := v.Args[1]
5390	v_0 := v.Args[0]
5391	// match: (FMULD (FNEGD x) y)
5392	// result: (FNMULD x y)
5393	for {
5394		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5395			if v_0.Op != OpARM64FNEGD {
5396				continue
5397			}
5398			x := v_0.Args[0]
5399			y := v_1
5400			v.reset(OpARM64FNMULD)
5401			v.AddArg2(x, y)
5402			return true
5403		}
5404		break
5405	}
5406	return false
5407}
5408func rewriteValueARM64_OpARM64FMULS(v *Value) bool {
5409	v_1 := v.Args[1]
5410	v_0 := v.Args[0]
5411	// match: (FMULS (FNEGS x) y)
5412	// result: (FNMULS x y)
5413	for {
5414		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5415			if v_0.Op != OpARM64FNEGS {
5416				continue
5417			}
5418			x := v_0.Args[0]
5419			y := v_1
5420			v.reset(OpARM64FNMULS)
5421			v.AddArg2(x, y)
5422			return true
5423		}
5424		break
5425	}
5426	return false
5427}
5428func rewriteValueARM64_OpARM64FNEGD(v *Value) bool {
5429	v_0 := v.Args[0]
5430	// match: (FNEGD (FMULD x y))
5431	// result: (FNMULD x y)
5432	for {
5433		if v_0.Op != OpARM64FMULD {
5434			break
5435		}
5436		y := v_0.Args[1]
5437		x := v_0.Args[0]
5438		v.reset(OpARM64FNMULD)
5439		v.AddArg2(x, y)
5440		return true
5441	}
5442	// match: (FNEGD (FNMULD x y))
5443	// result: (FMULD x y)
5444	for {
5445		if v_0.Op != OpARM64FNMULD {
5446			break
5447		}
5448		y := v_0.Args[1]
5449		x := v_0.Args[0]
5450		v.reset(OpARM64FMULD)
5451		v.AddArg2(x, y)
5452		return true
5453	}
5454	return false
5455}
5456func rewriteValueARM64_OpARM64FNEGS(v *Value) bool {
5457	v_0 := v.Args[0]
5458	// match: (FNEGS (FMULS x y))
5459	// result: (FNMULS x y)
5460	for {
5461		if v_0.Op != OpARM64FMULS {
5462			break
5463		}
5464		y := v_0.Args[1]
5465		x := v_0.Args[0]
5466		v.reset(OpARM64FNMULS)
5467		v.AddArg2(x, y)
5468		return true
5469	}
5470	// match: (FNEGS (FNMULS x y))
5471	// result: (FMULS x y)
5472	for {
5473		if v_0.Op != OpARM64FNMULS {
5474			break
5475		}
5476		y := v_0.Args[1]
5477		x := v_0.Args[0]
5478		v.reset(OpARM64FMULS)
5479		v.AddArg2(x, y)
5480		return true
5481	}
5482	return false
5483}
5484func rewriteValueARM64_OpARM64FNMULD(v *Value) bool {
5485	v_1 := v.Args[1]
5486	v_0 := v.Args[0]
5487	// match: (FNMULD (FNEGD x) y)
5488	// result: (FMULD x y)
5489	for {
5490		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5491			if v_0.Op != OpARM64FNEGD {
5492				continue
5493			}
5494			x := v_0.Args[0]
5495			y := v_1
5496			v.reset(OpARM64FMULD)
5497			v.AddArg2(x, y)
5498			return true
5499		}
5500		break
5501	}
5502	return false
5503}
5504func rewriteValueARM64_OpARM64FNMULS(v *Value) bool {
5505	v_1 := v.Args[1]
5506	v_0 := v.Args[0]
5507	// match: (FNMULS (FNEGS x) y)
5508	// result: (FMULS x y)
5509	for {
5510		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5511			if v_0.Op != OpARM64FNEGS {
5512				continue
5513			}
5514			x := v_0.Args[0]
5515			y := v_1
5516			v.reset(OpARM64FMULS)
5517			v.AddArg2(x, y)
5518			return true
5519		}
5520		break
5521	}
5522	return false
5523}
5524func rewriteValueARM64_OpARM64FSUBD(v *Value) bool {
5525	v_1 := v.Args[1]
5526	v_0 := v.Args[0]
5527	// match: (FSUBD a (FMULD x y))
5528	// cond: a.Block.Func.useFMA(v)
5529	// result: (FMSUBD a x y)
5530	for {
5531		a := v_0
5532		if v_1.Op != OpARM64FMULD {
5533			break
5534		}
5535		y := v_1.Args[1]
5536		x := v_1.Args[0]
5537		if !(a.Block.Func.useFMA(v)) {
5538			break
5539		}
5540		v.reset(OpARM64FMSUBD)
5541		v.AddArg3(a, x, y)
5542		return true
5543	}
5544	// match: (FSUBD (FMULD x y) a)
5545	// cond: a.Block.Func.useFMA(v)
5546	// result: (FNMSUBD a x y)
5547	for {
5548		if v_0.Op != OpARM64FMULD {
5549			break
5550		}
5551		y := v_0.Args[1]
5552		x := v_0.Args[0]
5553		a := v_1
5554		if !(a.Block.Func.useFMA(v)) {
5555			break
5556		}
5557		v.reset(OpARM64FNMSUBD)
5558		v.AddArg3(a, x, y)
5559		return true
5560	}
5561	// match: (FSUBD a (FNMULD x y))
5562	// cond: a.Block.Func.useFMA(v)
5563	// result: (FMADDD a x y)
5564	for {
5565		a := v_0
5566		if v_1.Op != OpARM64FNMULD {
5567			break
5568		}
5569		y := v_1.Args[1]
5570		x := v_1.Args[0]
5571		if !(a.Block.Func.useFMA(v)) {
5572			break
5573		}
5574		v.reset(OpARM64FMADDD)
5575		v.AddArg3(a, x, y)
5576		return true
5577	}
5578	// match: (FSUBD (FNMULD x y) a)
5579	// cond: a.Block.Func.useFMA(v)
5580	// result: (FNMADDD a x y)
5581	for {
5582		if v_0.Op != OpARM64FNMULD {
5583			break
5584		}
5585		y := v_0.Args[1]
5586		x := v_0.Args[0]
5587		a := v_1
5588		if !(a.Block.Func.useFMA(v)) {
5589			break
5590		}
5591		v.reset(OpARM64FNMADDD)
5592		v.AddArg3(a, x, y)
5593		return true
5594	}
5595	return false
5596}
5597func rewriteValueARM64_OpARM64FSUBS(v *Value) bool {
5598	v_1 := v.Args[1]
5599	v_0 := v.Args[0]
5600	// match: (FSUBS a (FMULS x y))
5601	// cond: a.Block.Func.useFMA(v)
5602	// result: (FMSUBS a x y)
5603	for {
5604		a := v_0
5605		if v_1.Op != OpARM64FMULS {
5606			break
5607		}
5608		y := v_1.Args[1]
5609		x := v_1.Args[0]
5610		if !(a.Block.Func.useFMA(v)) {
5611			break
5612		}
5613		v.reset(OpARM64FMSUBS)
5614		v.AddArg3(a, x, y)
5615		return true
5616	}
5617	// match: (FSUBS (FMULS x y) a)
5618	// cond: a.Block.Func.useFMA(v)
5619	// result: (FNMSUBS a x y)
5620	for {
5621		if v_0.Op != OpARM64FMULS {
5622			break
5623		}
5624		y := v_0.Args[1]
5625		x := v_0.Args[0]
5626		a := v_1
5627		if !(a.Block.Func.useFMA(v)) {
5628			break
5629		}
5630		v.reset(OpARM64FNMSUBS)
5631		v.AddArg3(a, x, y)
5632		return true
5633	}
5634	// match: (FSUBS a (FNMULS x y))
5635	// cond: a.Block.Func.useFMA(v)
5636	// result: (FMADDS a x y)
5637	for {
5638		a := v_0
5639		if v_1.Op != OpARM64FNMULS {
5640			break
5641		}
5642		y := v_1.Args[1]
5643		x := v_1.Args[0]
5644		if !(a.Block.Func.useFMA(v)) {
5645			break
5646		}
5647		v.reset(OpARM64FMADDS)
5648		v.AddArg3(a, x, y)
5649		return true
5650	}
5651	// match: (FSUBS (FNMULS x y) a)
5652	// cond: a.Block.Func.useFMA(v)
5653	// result: (FNMADDS a x y)
5654	for {
5655		if v_0.Op != OpARM64FNMULS {
5656			break
5657		}
5658		y := v_0.Args[1]
5659		x := v_0.Args[0]
5660		a := v_1
5661		if !(a.Block.Func.useFMA(v)) {
5662			break
5663		}
5664		v.reset(OpARM64FNMADDS)
5665		v.AddArg3(a, x, y)
5666		return true
5667	}
5668	return false
5669}
5670func rewriteValueARM64_OpARM64GreaterEqual(v *Value) bool {
5671	v_0 := v.Args[0]
5672	b := v.Block
5673	// match: (GreaterEqual (CMPconst [0] z:(AND x y)))
5674	// cond: z.Uses == 1
5675	// result: (GreaterEqual (TST x y))
5676	for {
5677		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
5678			break
5679		}
5680		z := v_0.Args[0]
5681		if z.Op != OpARM64AND {
5682			break
5683		}
5684		y := z.Args[1]
5685		x := z.Args[0]
5686		if !(z.Uses == 1) {
5687			break
5688		}
5689		v.reset(OpARM64GreaterEqual)
5690		v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
5691		v0.AddArg2(x, y)
5692		v.AddArg(v0)
5693		return true
5694	}
5695	// match: (GreaterEqual (CMPWconst [0] x:(ANDconst [c] y)))
5696	// cond: x.Uses == 1
5697	// result: (GreaterEqual (TSTWconst [int32(c)] y))
5698	for {
5699		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
5700			break
5701		}
5702		x := v_0.Args[0]
5703		if x.Op != OpARM64ANDconst {
5704			break
5705		}
5706		c := auxIntToInt64(x.AuxInt)
5707		y := x.Args[0]
5708		if !(x.Uses == 1) {
5709			break
5710		}
5711		v.reset(OpARM64GreaterEqual)
5712		v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
5713		v0.AuxInt = int32ToAuxInt(int32(c))
5714		v0.AddArg(y)
5715		v.AddArg(v0)
5716		return true
5717	}
5718	// match: (GreaterEqual (CMPWconst [0] z:(AND x y)))
5719	// cond: z.Uses == 1
5720	// result: (GreaterEqual (TSTW x y))
5721	for {
5722		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
5723			break
5724		}
5725		z := v_0.Args[0]
5726		if z.Op != OpARM64AND {
5727			break
5728		}
5729		y := z.Args[1]
5730		x := z.Args[0]
5731		if !(z.Uses == 1) {
5732			break
5733		}
5734		v.reset(OpARM64GreaterEqual)
5735		v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
5736		v0.AddArg2(x, y)
5737		v.AddArg(v0)
5738		return true
5739	}
5740	// match: (GreaterEqual (CMPconst [0] x:(ANDconst [c] y)))
5741	// cond: x.Uses == 1
5742	// result: (GreaterEqual (TSTconst [c] y))
5743	for {
5744		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
5745			break
5746		}
5747		x := v_0.Args[0]
5748		if x.Op != OpARM64ANDconst {
5749			break
5750		}
5751		c := auxIntToInt64(x.AuxInt)
5752		y := x.Args[0]
5753		if !(x.Uses == 1) {
5754			break
5755		}
5756		v.reset(OpARM64GreaterEqual)
5757		v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
5758		v0.AuxInt = int64ToAuxInt(c)
5759		v0.AddArg(y)
5760		v.AddArg(v0)
5761		return true
5762	}
5763	// match: (GreaterEqual (CMPconst [0] x:(ADDconst [c] y)))
5764	// cond: x.Uses == 1
5765	// result: (GreaterEqualNoov (CMNconst [c] y))
5766	for {
5767		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
5768			break
5769		}
5770		x := v_0.Args[0]
5771		if x.Op != OpARM64ADDconst {
5772			break
5773		}
5774		c := auxIntToInt64(x.AuxInt)
5775		y := x.Args[0]
5776		if !(x.Uses == 1) {
5777			break
5778		}
5779		v.reset(OpARM64GreaterEqualNoov)
5780		v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags)
5781		v0.AuxInt = int64ToAuxInt(c)
5782		v0.AddArg(y)
5783		v.AddArg(v0)
5784		return true
5785	}
5786	// match: (GreaterEqual (CMPWconst [0] x:(ADDconst [c] y)))
5787	// cond: x.Uses == 1
5788	// result: (GreaterEqualNoov (CMNWconst [int32(c)] y))
5789	for {
5790		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
5791			break
5792		}
5793		x := v_0.Args[0]
5794		if x.Op != OpARM64ADDconst {
5795			break
5796		}
5797		c := auxIntToInt64(x.AuxInt)
5798		y := x.Args[0]
5799		if !(x.Uses == 1) {
5800			break
5801		}
5802		v.reset(OpARM64GreaterEqualNoov)
5803		v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags)
5804		v0.AuxInt = int32ToAuxInt(int32(c))
5805		v0.AddArg(y)
5806		v.AddArg(v0)
5807		return true
5808	}
5809	// match: (GreaterEqual (CMPconst [0] z:(ADD x y)))
5810	// cond: z.Uses == 1
5811	// result: (GreaterEqualNoov (CMN x y))
5812	for {
5813		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
5814			break
5815		}
5816		z := v_0.Args[0]
5817		if z.Op != OpARM64ADD {
5818			break
5819		}
5820		y := z.Args[1]
5821		x := z.Args[0]
5822		if !(z.Uses == 1) {
5823			break
5824		}
5825		v.reset(OpARM64GreaterEqualNoov)
5826		v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
5827		v0.AddArg2(x, y)
5828		v.AddArg(v0)
5829		return true
5830	}
5831	// match: (GreaterEqual (CMPWconst [0] z:(ADD x y)))
5832	// cond: z.Uses == 1
5833	// result: (GreaterEqualNoov (CMNW x y))
5834	for {
5835		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
5836			break
5837		}
5838		z := v_0.Args[0]
5839		if z.Op != OpARM64ADD {
5840			break
5841		}
5842		y := z.Args[1]
5843		x := z.Args[0]
5844		if !(z.Uses == 1) {
5845			break
5846		}
5847		v.reset(OpARM64GreaterEqualNoov)
5848		v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
5849		v0.AddArg2(x, y)
5850		v.AddArg(v0)
5851		return true
5852	}
5853	// match: (GreaterEqual (CMPconst [0] z:(MADD a x y)))
5854	// cond: z.Uses == 1
5855	// result: (GreaterEqualNoov (CMN a (MUL <x.Type> x y)))
5856	for {
5857		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
5858			break
5859		}
5860		z := v_0.Args[0]
5861		if z.Op != OpARM64MADD {
5862			break
5863		}
5864		y := z.Args[2]
5865		a := z.Args[0]
5866		x := z.Args[1]
5867		if !(z.Uses == 1) {
5868			break
5869		}
5870		v.reset(OpARM64GreaterEqualNoov)
5871		v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
5872		v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
5873		v1.AddArg2(x, y)
5874		v0.AddArg2(a, v1)
5875		v.AddArg(v0)
5876		return true
5877	}
5878	// match: (GreaterEqual (CMPconst [0] z:(MSUB a x y)))
5879	// cond: z.Uses == 1
5880	// result: (GreaterEqualNoov (CMP a (MUL <x.Type> x y)))
5881	for {
5882		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
5883			break
5884		}
5885		z := v_0.Args[0]
5886		if z.Op != OpARM64MSUB {
5887			break
5888		}
5889		y := z.Args[2]
5890		a := z.Args[0]
5891		x := z.Args[1]
5892		if !(z.Uses == 1) {
5893			break
5894		}
5895		v.reset(OpARM64GreaterEqualNoov)
5896		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
5897		v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
5898		v1.AddArg2(x, y)
5899		v0.AddArg2(a, v1)
5900		v.AddArg(v0)
5901		return true
5902	}
5903	// match: (GreaterEqual (CMPWconst [0] z:(MADDW a x y)))
5904	// cond: z.Uses == 1
5905	// result: (GreaterEqualNoov (CMNW a (MULW <x.Type> x y)))
5906	for {
5907		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
5908			break
5909		}
5910		z := v_0.Args[0]
5911		if z.Op != OpARM64MADDW {
5912			break
5913		}
5914		y := z.Args[2]
5915		a := z.Args[0]
5916		x := z.Args[1]
5917		if !(z.Uses == 1) {
5918			break
5919		}
5920		v.reset(OpARM64GreaterEqualNoov)
5921		v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
5922		v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
5923		v1.AddArg2(x, y)
5924		v0.AddArg2(a, v1)
5925		v.AddArg(v0)
5926		return true
5927	}
5928	// match: (GreaterEqual (CMPWconst [0] z:(MSUBW a x y)))
5929	// cond: z.Uses == 1
5930	// result: (GreaterEqualNoov (CMPW a (MULW <x.Type> x y)))
5931	for {
5932		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
5933			break
5934		}
5935		z := v_0.Args[0]
5936		if z.Op != OpARM64MSUBW {
5937			break
5938		}
5939		y := z.Args[2]
5940		a := z.Args[0]
5941		x := z.Args[1]
5942		if !(z.Uses == 1) {
5943			break
5944		}
5945		v.reset(OpARM64GreaterEqualNoov)
5946		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
5947		v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
5948		v1.AddArg2(x, y)
5949		v0.AddArg2(a, v1)
5950		v.AddArg(v0)
5951		return true
5952	}
5953	// match: (GreaterEqual (FlagConstant [fc]))
5954	// result: (MOVDconst [b2i(fc.ge())])
5955	for {
5956		if v_0.Op != OpARM64FlagConstant {
5957			break
5958		}
5959		fc := auxIntToFlagConstant(v_0.AuxInt)
5960		v.reset(OpARM64MOVDconst)
5961		v.AuxInt = int64ToAuxInt(b2i(fc.ge()))
5962		return true
5963	}
5964	// match: (GreaterEqual (InvertFlags x))
5965	// result: (LessEqual x)
5966	for {
5967		if v_0.Op != OpARM64InvertFlags {
5968			break
5969		}
5970		x := v_0.Args[0]
5971		v.reset(OpARM64LessEqual)
5972		v.AddArg(x)
5973		return true
5974	}
5975	return false
5976}
5977func rewriteValueARM64_OpARM64GreaterEqualF(v *Value) bool {
5978	v_0 := v.Args[0]
5979	// match: (GreaterEqualF (InvertFlags x))
5980	// result: (LessEqualF x)
5981	for {
5982		if v_0.Op != OpARM64InvertFlags {
5983			break
5984		}
5985		x := v_0.Args[0]
5986		v.reset(OpARM64LessEqualF)
5987		v.AddArg(x)
5988		return true
5989	}
5990	return false
5991}
5992func rewriteValueARM64_OpARM64GreaterEqualNoov(v *Value) bool {
5993	v_0 := v.Args[0]
5994	b := v.Block
5995	typ := &b.Func.Config.Types
5996	// match: (GreaterEqualNoov (InvertFlags x))
5997	// result: (CSINC [OpARM64NotEqual] (LessThanNoov <typ.Bool> x) (MOVDconst [0]) x)
5998	for {
5999		if v_0.Op != OpARM64InvertFlags {
6000			break
6001		}
6002		x := v_0.Args[0]
6003		v.reset(OpARM64CSINC)
6004		v.AuxInt = opToAuxInt(OpARM64NotEqual)
6005		v0 := b.NewValue0(v.Pos, OpARM64LessThanNoov, typ.Bool)
6006		v0.AddArg(x)
6007		v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
6008		v1.AuxInt = int64ToAuxInt(0)
6009		v.AddArg3(v0, v1, x)
6010		return true
6011	}
6012	return false
6013}
6014func rewriteValueARM64_OpARM64GreaterEqualU(v *Value) bool {
6015	v_0 := v.Args[0]
6016	// match: (GreaterEqualU (FlagConstant [fc]))
6017	// result: (MOVDconst [b2i(fc.uge())])
6018	for {
6019		if v_0.Op != OpARM64FlagConstant {
6020			break
6021		}
6022		fc := auxIntToFlagConstant(v_0.AuxInt)
6023		v.reset(OpARM64MOVDconst)
6024		v.AuxInt = int64ToAuxInt(b2i(fc.uge()))
6025		return true
6026	}
6027	// match: (GreaterEqualU (InvertFlags x))
6028	// result: (LessEqualU x)
6029	for {
6030		if v_0.Op != OpARM64InvertFlags {
6031			break
6032		}
6033		x := v_0.Args[0]
6034		v.reset(OpARM64LessEqualU)
6035		v.AddArg(x)
6036		return true
6037	}
6038	return false
6039}
6040func rewriteValueARM64_OpARM64GreaterThan(v *Value) bool {
6041	v_0 := v.Args[0]
6042	b := v.Block
6043	// match: (GreaterThan (CMPconst [0] z:(AND x y)))
6044	// cond: z.Uses == 1
6045	// result: (GreaterThan (TST x y))
6046	for {
6047		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
6048			break
6049		}
6050		z := v_0.Args[0]
6051		if z.Op != OpARM64AND {
6052			break
6053		}
6054		y := z.Args[1]
6055		x := z.Args[0]
6056		if !(z.Uses == 1) {
6057			break
6058		}
6059		v.reset(OpARM64GreaterThan)
6060		v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
6061		v0.AddArg2(x, y)
6062		v.AddArg(v0)
6063		return true
6064	}
6065	// match: (GreaterThan (CMPWconst [0] x:(ANDconst [c] y)))
6066	// cond: x.Uses == 1
6067	// result: (GreaterThan (TSTWconst [int32(c)] y))
6068	for {
6069		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
6070			break
6071		}
6072		x := v_0.Args[0]
6073		if x.Op != OpARM64ANDconst {
6074			break
6075		}
6076		c := auxIntToInt64(x.AuxInt)
6077		y := x.Args[0]
6078		if !(x.Uses == 1) {
6079			break
6080		}
6081		v.reset(OpARM64GreaterThan)
6082		v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
6083		v0.AuxInt = int32ToAuxInt(int32(c))
6084		v0.AddArg(y)
6085		v.AddArg(v0)
6086		return true
6087	}
6088	// match: (GreaterThan (CMPWconst [0] z:(AND x y)))
6089	// cond: z.Uses == 1
6090	// result: (GreaterThan (TSTW x y))
6091	for {
6092		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
6093			break
6094		}
6095		z := v_0.Args[0]
6096		if z.Op != OpARM64AND {
6097			break
6098		}
6099		y := z.Args[1]
6100		x := z.Args[0]
6101		if !(z.Uses == 1) {
6102			break
6103		}
6104		v.reset(OpARM64GreaterThan)
6105		v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
6106		v0.AddArg2(x, y)
6107		v.AddArg(v0)
6108		return true
6109	}
6110	// match: (GreaterThan (CMPconst [0] x:(ANDconst [c] y)))
6111	// cond: x.Uses == 1
6112	// result: (GreaterThan (TSTconst [c] y))
6113	for {
6114		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
6115			break
6116		}
6117		x := v_0.Args[0]
6118		if x.Op != OpARM64ANDconst {
6119			break
6120		}
6121		c := auxIntToInt64(x.AuxInt)
6122		y := x.Args[0]
6123		if !(x.Uses == 1) {
6124			break
6125		}
6126		v.reset(OpARM64GreaterThan)
6127		v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
6128		v0.AuxInt = int64ToAuxInt(c)
6129		v0.AddArg(y)
6130		v.AddArg(v0)
6131		return true
6132	}
6133	// match: (GreaterThan (FlagConstant [fc]))
6134	// result: (MOVDconst [b2i(fc.gt())])
6135	for {
6136		if v_0.Op != OpARM64FlagConstant {
6137			break
6138		}
6139		fc := auxIntToFlagConstant(v_0.AuxInt)
6140		v.reset(OpARM64MOVDconst)
6141		v.AuxInt = int64ToAuxInt(b2i(fc.gt()))
6142		return true
6143	}
6144	// match: (GreaterThan (InvertFlags x))
6145	// result: (LessThan x)
6146	for {
6147		if v_0.Op != OpARM64InvertFlags {
6148			break
6149		}
6150		x := v_0.Args[0]
6151		v.reset(OpARM64LessThan)
6152		v.AddArg(x)
6153		return true
6154	}
6155	return false
6156}
6157func rewriteValueARM64_OpARM64GreaterThanF(v *Value) bool {
6158	v_0 := v.Args[0]
6159	// match: (GreaterThanF (InvertFlags x))
6160	// result: (LessThanF x)
6161	for {
6162		if v_0.Op != OpARM64InvertFlags {
6163			break
6164		}
6165		x := v_0.Args[0]
6166		v.reset(OpARM64LessThanF)
6167		v.AddArg(x)
6168		return true
6169	}
6170	return false
6171}
6172func rewriteValueARM64_OpARM64GreaterThanU(v *Value) bool {
6173	v_0 := v.Args[0]
6174	// match: (GreaterThanU (FlagConstant [fc]))
6175	// result: (MOVDconst [b2i(fc.ugt())])
6176	for {
6177		if v_0.Op != OpARM64FlagConstant {
6178			break
6179		}
6180		fc := auxIntToFlagConstant(v_0.AuxInt)
6181		v.reset(OpARM64MOVDconst)
6182		v.AuxInt = int64ToAuxInt(b2i(fc.ugt()))
6183		return true
6184	}
6185	// match: (GreaterThanU (InvertFlags x))
6186	// result: (LessThanU x)
6187	for {
6188		if v_0.Op != OpARM64InvertFlags {
6189			break
6190		}
6191		x := v_0.Args[0]
6192		v.reset(OpARM64LessThanU)
6193		v.AddArg(x)
6194		return true
6195	}
6196	return false
6197}
6198func rewriteValueARM64_OpARM64LDP(v *Value) bool {
6199	v_1 := v.Args[1]
6200	v_0 := v.Args[0]
6201	b := v.Block
6202	config := b.Func.Config
6203	// match: (LDP [off1] {sym} (ADDconst [off2] ptr) mem)
6204	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
6205	// result: (LDP [off1+int32(off2)] {sym} ptr mem)
6206	for {
6207		off1 := auxIntToInt32(v.AuxInt)
6208		sym := auxToSym(v.Aux)
6209		if v_0.Op != OpARM64ADDconst {
6210			break
6211		}
6212		off2 := auxIntToInt64(v_0.AuxInt)
6213		ptr := v_0.Args[0]
6214		mem := v_1
6215		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
6216			break
6217		}
6218		v.reset(OpARM64LDP)
6219		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
6220		v.Aux = symToAux(sym)
6221		v.AddArg2(ptr, mem)
6222		return true
6223	}
6224	// match: (LDP [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
6225	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
6226	// result: (LDP [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
6227	for {
6228		off1 := auxIntToInt32(v.AuxInt)
6229		sym1 := auxToSym(v.Aux)
6230		if v_0.Op != OpARM64MOVDaddr {
6231			break
6232		}
6233		off2 := auxIntToInt32(v_0.AuxInt)
6234		sym2 := auxToSym(v_0.Aux)
6235		ptr := v_0.Args[0]
6236		mem := v_1
6237		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
6238			break
6239		}
6240		v.reset(OpARM64LDP)
6241		v.AuxInt = int32ToAuxInt(off1 + off2)
6242		v.Aux = symToAux(mergeSym(sym1, sym2))
6243		v.AddArg2(ptr, mem)
6244		return true
6245	}
6246	return false
6247}
6248func rewriteValueARM64_OpARM64LessEqual(v *Value) bool {
6249	v_0 := v.Args[0]
6250	b := v.Block
6251	// match: (LessEqual (CMPconst [0] z:(AND x y)))
6252	// cond: z.Uses == 1
6253	// result: (LessEqual (TST x y))
6254	for {
6255		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
6256			break
6257		}
6258		z := v_0.Args[0]
6259		if z.Op != OpARM64AND {
6260			break
6261		}
6262		y := z.Args[1]
6263		x := z.Args[0]
6264		if !(z.Uses == 1) {
6265			break
6266		}
6267		v.reset(OpARM64LessEqual)
6268		v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
6269		v0.AddArg2(x, y)
6270		v.AddArg(v0)
6271		return true
6272	}
6273	// match: (LessEqual (CMPWconst [0] x:(ANDconst [c] y)))
6274	// cond: x.Uses == 1
6275	// result: (LessEqual (TSTWconst [int32(c)] y))
6276	for {
6277		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
6278			break
6279		}
6280		x := v_0.Args[0]
6281		if x.Op != OpARM64ANDconst {
6282			break
6283		}
6284		c := auxIntToInt64(x.AuxInt)
6285		y := x.Args[0]
6286		if !(x.Uses == 1) {
6287			break
6288		}
6289		v.reset(OpARM64LessEqual)
6290		v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
6291		v0.AuxInt = int32ToAuxInt(int32(c))
6292		v0.AddArg(y)
6293		v.AddArg(v0)
6294		return true
6295	}
6296	// match: (LessEqual (CMPWconst [0] z:(AND x y)))
6297	// cond: z.Uses == 1
6298	// result: (LessEqual (TSTW x y))
6299	for {
6300		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
6301			break
6302		}
6303		z := v_0.Args[0]
6304		if z.Op != OpARM64AND {
6305			break
6306		}
6307		y := z.Args[1]
6308		x := z.Args[0]
6309		if !(z.Uses == 1) {
6310			break
6311		}
6312		v.reset(OpARM64LessEqual)
6313		v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
6314		v0.AddArg2(x, y)
6315		v.AddArg(v0)
6316		return true
6317	}
6318	// match: (LessEqual (CMPconst [0] x:(ANDconst [c] y)))
6319	// cond: x.Uses == 1
6320	// result: (LessEqual (TSTconst [c] y))
6321	for {
6322		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
6323			break
6324		}
6325		x := v_0.Args[0]
6326		if x.Op != OpARM64ANDconst {
6327			break
6328		}
6329		c := auxIntToInt64(x.AuxInt)
6330		y := x.Args[0]
6331		if !(x.Uses == 1) {
6332			break
6333		}
6334		v.reset(OpARM64LessEqual)
6335		v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
6336		v0.AuxInt = int64ToAuxInt(c)
6337		v0.AddArg(y)
6338		v.AddArg(v0)
6339		return true
6340	}
6341	// match: (LessEqual (FlagConstant [fc]))
6342	// result: (MOVDconst [b2i(fc.le())])
6343	for {
6344		if v_0.Op != OpARM64FlagConstant {
6345			break
6346		}
6347		fc := auxIntToFlagConstant(v_0.AuxInt)
6348		v.reset(OpARM64MOVDconst)
6349		v.AuxInt = int64ToAuxInt(b2i(fc.le()))
6350		return true
6351	}
6352	// match: (LessEqual (InvertFlags x))
6353	// result: (GreaterEqual x)
6354	for {
6355		if v_0.Op != OpARM64InvertFlags {
6356			break
6357		}
6358		x := v_0.Args[0]
6359		v.reset(OpARM64GreaterEqual)
6360		v.AddArg(x)
6361		return true
6362	}
6363	return false
6364}
6365func rewriteValueARM64_OpARM64LessEqualF(v *Value) bool {
6366	v_0 := v.Args[0]
6367	// match: (LessEqualF (InvertFlags x))
6368	// result: (GreaterEqualF x)
6369	for {
6370		if v_0.Op != OpARM64InvertFlags {
6371			break
6372		}
6373		x := v_0.Args[0]
6374		v.reset(OpARM64GreaterEqualF)
6375		v.AddArg(x)
6376		return true
6377	}
6378	return false
6379}
6380func rewriteValueARM64_OpARM64LessEqualU(v *Value) bool {
6381	v_0 := v.Args[0]
6382	// match: (LessEqualU (FlagConstant [fc]))
6383	// result: (MOVDconst [b2i(fc.ule())])
6384	for {
6385		if v_0.Op != OpARM64FlagConstant {
6386			break
6387		}
6388		fc := auxIntToFlagConstant(v_0.AuxInt)
6389		v.reset(OpARM64MOVDconst)
6390		v.AuxInt = int64ToAuxInt(b2i(fc.ule()))
6391		return true
6392	}
6393	// match: (LessEqualU (InvertFlags x))
6394	// result: (GreaterEqualU x)
6395	for {
6396		if v_0.Op != OpARM64InvertFlags {
6397			break
6398		}
6399		x := v_0.Args[0]
6400		v.reset(OpARM64GreaterEqualU)
6401		v.AddArg(x)
6402		return true
6403	}
6404	return false
6405}
6406func rewriteValueARM64_OpARM64LessThan(v *Value) bool {
6407	v_0 := v.Args[0]
6408	b := v.Block
6409	// match: (LessThan (CMPconst [0] z:(AND x y)))
6410	// cond: z.Uses == 1
6411	// result: (LessThan (TST x y))
6412	for {
6413		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
6414			break
6415		}
6416		z := v_0.Args[0]
6417		if z.Op != OpARM64AND {
6418			break
6419		}
6420		y := z.Args[1]
6421		x := z.Args[0]
6422		if !(z.Uses == 1) {
6423			break
6424		}
6425		v.reset(OpARM64LessThan)
6426		v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
6427		v0.AddArg2(x, y)
6428		v.AddArg(v0)
6429		return true
6430	}
6431	// match: (LessThan (CMPWconst [0] x:(ANDconst [c] y)))
6432	// cond: x.Uses == 1
6433	// result: (LessThan (TSTWconst [int32(c)] y))
6434	for {
6435		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
6436			break
6437		}
6438		x := v_0.Args[0]
6439		if x.Op != OpARM64ANDconst {
6440			break
6441		}
6442		c := auxIntToInt64(x.AuxInt)
6443		y := x.Args[0]
6444		if !(x.Uses == 1) {
6445			break
6446		}
6447		v.reset(OpARM64LessThan)
6448		v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
6449		v0.AuxInt = int32ToAuxInt(int32(c))
6450		v0.AddArg(y)
6451		v.AddArg(v0)
6452		return true
6453	}
6454	// match: (LessThan (CMPWconst [0] z:(AND x y)))
6455	// cond: z.Uses == 1
6456	// result: (LessThan (TSTW x y))
6457	for {
6458		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
6459			break
6460		}
6461		z := v_0.Args[0]
6462		if z.Op != OpARM64AND {
6463			break
6464		}
6465		y := z.Args[1]
6466		x := z.Args[0]
6467		if !(z.Uses == 1) {
6468			break
6469		}
6470		v.reset(OpARM64LessThan)
6471		v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
6472		v0.AddArg2(x, y)
6473		v.AddArg(v0)
6474		return true
6475	}
6476	// match: (LessThan (CMPconst [0] x:(ANDconst [c] y)))
6477	// cond: x.Uses == 1
6478	// result: (LessThan (TSTconst [c] y))
6479	for {
6480		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
6481			break
6482		}
6483		x := v_0.Args[0]
6484		if x.Op != OpARM64ANDconst {
6485			break
6486		}
6487		c := auxIntToInt64(x.AuxInt)
6488		y := x.Args[0]
6489		if !(x.Uses == 1) {
6490			break
6491		}
6492		v.reset(OpARM64LessThan)
6493		v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
6494		v0.AuxInt = int64ToAuxInt(c)
6495		v0.AddArg(y)
6496		v.AddArg(v0)
6497		return true
6498	}
6499	// match: (LessThan (CMPconst [0] x:(ADDconst [c] y)))
6500	// cond: x.Uses == 1
6501	// result: (LessThanNoov (CMNconst [c] y))
6502	for {
6503		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
6504			break
6505		}
6506		x := v_0.Args[0]
6507		if x.Op != OpARM64ADDconst {
6508			break
6509		}
6510		c := auxIntToInt64(x.AuxInt)
6511		y := x.Args[0]
6512		if !(x.Uses == 1) {
6513			break
6514		}
6515		v.reset(OpARM64LessThanNoov)
6516		v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags)
6517		v0.AuxInt = int64ToAuxInt(c)
6518		v0.AddArg(y)
6519		v.AddArg(v0)
6520		return true
6521	}
6522	// match: (LessThan (CMPWconst [0] x:(ADDconst [c] y)))
6523	// cond: x.Uses == 1
6524	// result: (LessThanNoov (CMNWconst [int32(c)] y))
6525	for {
6526		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
6527			break
6528		}
6529		x := v_0.Args[0]
6530		if x.Op != OpARM64ADDconst {
6531			break
6532		}
6533		c := auxIntToInt64(x.AuxInt)
6534		y := x.Args[0]
6535		if !(x.Uses == 1) {
6536			break
6537		}
6538		v.reset(OpARM64LessThanNoov)
6539		v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags)
6540		v0.AuxInt = int32ToAuxInt(int32(c))
6541		v0.AddArg(y)
6542		v.AddArg(v0)
6543		return true
6544	}
6545	// match: (LessThan (CMPconst [0] z:(ADD x y)))
6546	// cond: z.Uses == 1
6547	// result: (LessThanNoov (CMN x y))
6548	for {
6549		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
6550			break
6551		}
6552		z := v_0.Args[0]
6553		if z.Op != OpARM64ADD {
6554			break
6555		}
6556		y := z.Args[1]
6557		x := z.Args[0]
6558		if !(z.Uses == 1) {
6559			break
6560		}
6561		v.reset(OpARM64LessThanNoov)
6562		v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
6563		v0.AddArg2(x, y)
6564		v.AddArg(v0)
6565		return true
6566	}
6567	// match: (LessThan (CMPWconst [0] z:(ADD x y)))
6568	// cond: z.Uses == 1
6569	// result: (LessThanNoov (CMNW x y))
6570	for {
6571		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
6572			break
6573		}
6574		z := v_0.Args[0]
6575		if z.Op != OpARM64ADD {
6576			break
6577		}
6578		y := z.Args[1]
6579		x := z.Args[0]
6580		if !(z.Uses == 1) {
6581			break
6582		}
6583		v.reset(OpARM64LessThanNoov)
6584		v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
6585		v0.AddArg2(x, y)
6586		v.AddArg(v0)
6587		return true
6588	}
6589	// match: (LessThan (CMPconst [0] z:(MADD a x y)))
6590	// cond: z.Uses == 1
6591	// result: (LessThanNoov (CMN a (MUL <x.Type> x y)))
6592	for {
6593		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
6594			break
6595		}
6596		z := v_0.Args[0]
6597		if z.Op != OpARM64MADD {
6598			break
6599		}
6600		y := z.Args[2]
6601		a := z.Args[0]
6602		x := z.Args[1]
6603		if !(z.Uses == 1) {
6604			break
6605		}
6606		v.reset(OpARM64LessThanNoov)
6607		v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
6608		v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
6609		v1.AddArg2(x, y)
6610		v0.AddArg2(a, v1)
6611		v.AddArg(v0)
6612		return true
6613	}
6614	// match: (LessThan (CMPconst [0] z:(MSUB a x y)))
6615	// cond: z.Uses == 1
6616	// result: (LessThanNoov (CMP a (MUL <x.Type> x y)))
6617	for {
6618		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
6619			break
6620		}
6621		z := v_0.Args[0]
6622		if z.Op != OpARM64MSUB {
6623			break
6624		}
6625		y := z.Args[2]
6626		a := z.Args[0]
6627		x := z.Args[1]
6628		if !(z.Uses == 1) {
6629			break
6630		}
6631		v.reset(OpARM64LessThanNoov)
6632		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
6633		v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
6634		v1.AddArg2(x, y)
6635		v0.AddArg2(a, v1)
6636		v.AddArg(v0)
6637		return true
6638	}
6639	// match: (LessThan (CMPWconst [0] z:(MADDW a x y)))
6640	// cond: z.Uses == 1
6641	// result: (LessThanNoov (CMNW a (MULW <x.Type> x y)))
6642	for {
6643		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
6644			break
6645		}
6646		z := v_0.Args[0]
6647		if z.Op != OpARM64MADDW {
6648			break
6649		}
6650		y := z.Args[2]
6651		a := z.Args[0]
6652		x := z.Args[1]
6653		if !(z.Uses == 1) {
6654			break
6655		}
6656		v.reset(OpARM64LessThanNoov)
6657		v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
6658		v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
6659		v1.AddArg2(x, y)
6660		v0.AddArg2(a, v1)
6661		v.AddArg(v0)
6662		return true
6663	}
6664	// match: (LessThan (CMPWconst [0] z:(MSUBW a x y)))
6665	// cond: z.Uses == 1
6666	// result: (LessThanNoov (CMPW a (MULW <x.Type> x y)))
6667	for {
6668		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
6669			break
6670		}
6671		z := v_0.Args[0]
6672		if z.Op != OpARM64MSUBW {
6673			break
6674		}
6675		y := z.Args[2]
6676		a := z.Args[0]
6677		x := z.Args[1]
6678		if !(z.Uses == 1) {
6679			break
6680		}
6681		v.reset(OpARM64LessThanNoov)
6682		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
6683		v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
6684		v1.AddArg2(x, y)
6685		v0.AddArg2(a, v1)
6686		v.AddArg(v0)
6687		return true
6688	}
6689	// match: (LessThan (FlagConstant [fc]))
6690	// result: (MOVDconst [b2i(fc.lt())])
6691	for {
6692		if v_0.Op != OpARM64FlagConstant {
6693			break
6694		}
6695		fc := auxIntToFlagConstant(v_0.AuxInt)
6696		v.reset(OpARM64MOVDconst)
6697		v.AuxInt = int64ToAuxInt(b2i(fc.lt()))
6698		return true
6699	}
6700	// match: (LessThan (InvertFlags x))
6701	// result: (GreaterThan x)
6702	for {
6703		if v_0.Op != OpARM64InvertFlags {
6704			break
6705		}
6706		x := v_0.Args[0]
6707		v.reset(OpARM64GreaterThan)
6708		v.AddArg(x)
6709		return true
6710	}
6711	return false
6712}
6713func rewriteValueARM64_OpARM64LessThanF(v *Value) bool {
6714	v_0 := v.Args[0]
6715	// match: (LessThanF (InvertFlags x))
6716	// result: (GreaterThanF x)
6717	for {
6718		if v_0.Op != OpARM64InvertFlags {
6719			break
6720		}
6721		x := v_0.Args[0]
6722		v.reset(OpARM64GreaterThanF)
6723		v.AddArg(x)
6724		return true
6725	}
6726	return false
6727}
6728func rewriteValueARM64_OpARM64LessThanNoov(v *Value) bool {
6729	v_0 := v.Args[0]
6730	b := v.Block
6731	typ := &b.Func.Config.Types
6732	// match: (LessThanNoov (InvertFlags x))
6733	// result: (CSEL0 [OpARM64NotEqual] (GreaterEqualNoov <typ.Bool> x) x)
6734	for {
6735		if v_0.Op != OpARM64InvertFlags {
6736			break
6737		}
6738		x := v_0.Args[0]
6739		v.reset(OpARM64CSEL0)
6740		v.AuxInt = opToAuxInt(OpARM64NotEqual)
6741		v0 := b.NewValue0(v.Pos, OpARM64GreaterEqualNoov, typ.Bool)
6742		v0.AddArg(x)
6743		v.AddArg2(v0, x)
6744		return true
6745	}
6746	return false
6747}
6748func rewriteValueARM64_OpARM64LessThanU(v *Value) bool {
6749	v_0 := v.Args[0]
6750	// match: (LessThanU (FlagConstant [fc]))
6751	// result: (MOVDconst [b2i(fc.ult())])
6752	for {
6753		if v_0.Op != OpARM64FlagConstant {
6754			break
6755		}
6756		fc := auxIntToFlagConstant(v_0.AuxInt)
6757		v.reset(OpARM64MOVDconst)
6758		v.AuxInt = int64ToAuxInt(b2i(fc.ult()))
6759		return true
6760	}
6761	// match: (LessThanU (InvertFlags x))
6762	// result: (GreaterThanU x)
6763	for {
6764		if v_0.Op != OpARM64InvertFlags {
6765			break
6766		}
6767		x := v_0.Args[0]
6768		v.reset(OpARM64GreaterThanU)
6769		v.AddArg(x)
6770		return true
6771	}
6772	return false
6773}
6774func rewriteValueARM64_OpARM64MADD(v *Value) bool {
6775	v_2 := v.Args[2]
6776	v_1 := v.Args[1]
6777	v_0 := v.Args[0]
6778	b := v.Block
6779	// match: (MADD a x (MOVDconst [-1]))
6780	// result: (SUB a x)
6781	for {
6782		a := v_0
6783		x := v_1
6784		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != -1 {
6785			break
6786		}
6787		v.reset(OpARM64SUB)
6788		v.AddArg2(a, x)
6789		return true
6790	}
6791	// match: (MADD a _ (MOVDconst [0]))
6792	// result: a
6793	for {
6794		a := v_0
6795		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
6796			break
6797		}
6798		v.copyOf(a)
6799		return true
6800	}
6801	// match: (MADD a x (MOVDconst [1]))
6802	// result: (ADD a x)
6803	for {
6804		a := v_0
6805		x := v_1
6806		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 1 {
6807			break
6808		}
6809		v.reset(OpARM64ADD)
6810		v.AddArg2(a, x)
6811		return true
6812	}
6813	// match: (MADD a x (MOVDconst [c]))
6814	// cond: isPowerOfTwo64(c)
6815	// result: (ADDshiftLL a x [log64(c)])
6816	for {
6817		a := v_0
6818		x := v_1
6819		if v_2.Op != OpARM64MOVDconst {
6820			break
6821		}
6822		c := auxIntToInt64(v_2.AuxInt)
6823		if !(isPowerOfTwo64(c)) {
6824			break
6825		}
6826		v.reset(OpARM64ADDshiftLL)
6827		v.AuxInt = int64ToAuxInt(log64(c))
6828		v.AddArg2(a, x)
6829		return true
6830	}
6831	// match: (MADD a x (MOVDconst [c]))
6832	// cond: isPowerOfTwo64(c-1) && c>=3
6833	// result: (ADD a (ADDshiftLL <x.Type> x x [log64(c-1)]))
6834	for {
6835		a := v_0
6836		x := v_1
6837		if v_2.Op != OpARM64MOVDconst {
6838			break
6839		}
6840		c := auxIntToInt64(v_2.AuxInt)
6841		if !(isPowerOfTwo64(c-1) && c >= 3) {
6842			break
6843		}
6844		v.reset(OpARM64ADD)
6845		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6846		v0.AuxInt = int64ToAuxInt(log64(c - 1))
6847		v0.AddArg2(x, x)
6848		v.AddArg2(a, v0)
6849		return true
6850	}
6851	// match: (MADD a x (MOVDconst [c]))
6852	// cond: isPowerOfTwo64(c+1) && c>=7
6853	// result: (SUB a (SUBshiftLL <x.Type> x x [log64(c+1)]))
6854	for {
6855		a := v_0
6856		x := v_1
6857		if v_2.Op != OpARM64MOVDconst {
6858			break
6859		}
6860		c := auxIntToInt64(v_2.AuxInt)
6861		if !(isPowerOfTwo64(c+1) && c >= 7) {
6862			break
6863		}
6864		v.reset(OpARM64SUB)
6865		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
6866		v0.AuxInt = int64ToAuxInt(log64(c + 1))
6867		v0.AddArg2(x, x)
6868		v.AddArg2(a, v0)
6869		return true
6870	}
6871	// match: (MADD a x (MOVDconst [c]))
6872	// cond: c%3 == 0 && isPowerOfTwo64(c/3)
6873	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)])
6874	for {
6875		a := v_0
6876		x := v_1
6877		if v_2.Op != OpARM64MOVDconst {
6878			break
6879		}
6880		c := auxIntToInt64(v_2.AuxInt)
6881		if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
6882			break
6883		}
6884		v.reset(OpARM64SUBshiftLL)
6885		v.AuxInt = int64ToAuxInt(log64(c / 3))
6886		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
6887		v0.AuxInt = int64ToAuxInt(2)
6888		v0.AddArg2(x, x)
6889		v.AddArg2(a, v0)
6890		return true
6891	}
6892	// match: (MADD a x (MOVDconst [c]))
6893	// cond: c%5 == 0 && isPowerOfTwo64(c/5)
6894	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)])
6895	for {
6896		a := v_0
6897		x := v_1
6898		if v_2.Op != OpARM64MOVDconst {
6899			break
6900		}
6901		c := auxIntToInt64(v_2.AuxInt)
6902		if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
6903			break
6904		}
6905		v.reset(OpARM64ADDshiftLL)
6906		v.AuxInt = int64ToAuxInt(log64(c / 5))
6907		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6908		v0.AuxInt = int64ToAuxInt(2)
6909		v0.AddArg2(x, x)
6910		v.AddArg2(a, v0)
6911		return true
6912	}
6913	// match: (MADD a x (MOVDconst [c]))
6914	// cond: c%7 == 0 && isPowerOfTwo64(c/7)
6915	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)])
6916	for {
6917		a := v_0
6918		x := v_1
6919		if v_2.Op != OpARM64MOVDconst {
6920			break
6921		}
6922		c := auxIntToInt64(v_2.AuxInt)
6923		if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
6924			break
6925		}
6926		v.reset(OpARM64SUBshiftLL)
6927		v.AuxInt = int64ToAuxInt(log64(c / 7))
6928		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
6929		v0.AuxInt = int64ToAuxInt(3)
6930		v0.AddArg2(x, x)
6931		v.AddArg2(a, v0)
6932		return true
6933	}
6934	// match: (MADD a x (MOVDconst [c]))
6935	// cond: c%9 == 0 && isPowerOfTwo64(c/9)
6936	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)])
6937	for {
6938		a := v_0
6939		x := v_1
6940		if v_2.Op != OpARM64MOVDconst {
6941			break
6942		}
6943		c := auxIntToInt64(v_2.AuxInt)
6944		if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
6945			break
6946		}
6947		v.reset(OpARM64ADDshiftLL)
6948		v.AuxInt = int64ToAuxInt(log64(c / 9))
6949		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6950		v0.AuxInt = int64ToAuxInt(3)
6951		v0.AddArg2(x, x)
6952		v.AddArg2(a, v0)
6953		return true
6954	}
6955	// match: (MADD a (MOVDconst [-1]) x)
6956	// result: (SUB a x)
6957	for {
6958		a := v_0
6959		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 {
6960			break
6961		}
6962		x := v_2
6963		v.reset(OpARM64SUB)
6964		v.AddArg2(a, x)
6965		return true
6966	}
6967	// match: (MADD a (MOVDconst [0]) _)
6968	// result: a
6969	for {
6970		a := v_0
6971		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
6972			break
6973		}
6974		v.copyOf(a)
6975		return true
6976	}
6977	// match: (MADD a (MOVDconst [1]) x)
6978	// result: (ADD a x)
6979	for {
6980		a := v_0
6981		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
6982			break
6983		}
6984		x := v_2
6985		v.reset(OpARM64ADD)
6986		v.AddArg2(a, x)
6987		return true
6988	}
6989	// match: (MADD a (MOVDconst [c]) x)
6990	// cond: isPowerOfTwo64(c)
6991	// result: (ADDshiftLL a x [log64(c)])
6992	for {
6993		a := v_0
6994		if v_1.Op != OpARM64MOVDconst {
6995			break
6996		}
6997		c := auxIntToInt64(v_1.AuxInt)
6998		x := v_2
6999		if !(isPowerOfTwo64(c)) {
7000			break
7001		}
7002		v.reset(OpARM64ADDshiftLL)
7003		v.AuxInt = int64ToAuxInt(log64(c))
7004		v.AddArg2(a, x)
7005		return true
7006	}
7007	// match: (MADD a (MOVDconst [c]) x)
7008	// cond: isPowerOfTwo64(c-1) && c>=3
7009	// result: (ADD a (ADDshiftLL <x.Type> x x [log64(c-1)]))
7010	for {
7011		a := v_0
7012		if v_1.Op != OpARM64MOVDconst {
7013			break
7014		}
7015		c := auxIntToInt64(v_1.AuxInt)
7016		x := v_2
7017		if !(isPowerOfTwo64(c-1) && c >= 3) {
7018			break
7019		}
7020		v.reset(OpARM64ADD)
7021		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7022		v0.AuxInt = int64ToAuxInt(log64(c - 1))
7023		v0.AddArg2(x, x)
7024		v.AddArg2(a, v0)
7025		return true
7026	}
7027	// match: (MADD a (MOVDconst [c]) x)
7028	// cond: isPowerOfTwo64(c+1) && c>=7
7029	// result: (SUB a (SUBshiftLL <x.Type> x x [log64(c+1)]))
7030	for {
7031		a := v_0
7032		if v_1.Op != OpARM64MOVDconst {
7033			break
7034		}
7035		c := auxIntToInt64(v_1.AuxInt)
7036		x := v_2
7037		if !(isPowerOfTwo64(c+1) && c >= 7) {
7038			break
7039		}
7040		v.reset(OpARM64SUB)
7041		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7042		v0.AuxInt = int64ToAuxInt(log64(c + 1))
7043		v0.AddArg2(x, x)
7044		v.AddArg2(a, v0)
7045		return true
7046	}
7047	// match: (MADD a (MOVDconst [c]) x)
7048	// cond: c%3 == 0 && isPowerOfTwo64(c/3)
7049	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)])
7050	for {
7051		a := v_0
7052		if v_1.Op != OpARM64MOVDconst {
7053			break
7054		}
7055		c := auxIntToInt64(v_1.AuxInt)
7056		x := v_2
7057		if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
7058			break
7059		}
7060		v.reset(OpARM64SUBshiftLL)
7061		v.AuxInt = int64ToAuxInt(log64(c / 3))
7062		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7063		v0.AuxInt = int64ToAuxInt(2)
7064		v0.AddArg2(x, x)
7065		v.AddArg2(a, v0)
7066		return true
7067	}
7068	// match: (MADD a (MOVDconst [c]) x)
7069	// cond: c%5 == 0 && isPowerOfTwo64(c/5)
7070	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)])
7071	for {
7072		a := v_0
7073		if v_1.Op != OpARM64MOVDconst {
7074			break
7075		}
7076		c := auxIntToInt64(v_1.AuxInt)
7077		x := v_2
7078		if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
7079			break
7080		}
7081		v.reset(OpARM64ADDshiftLL)
7082		v.AuxInt = int64ToAuxInt(log64(c / 5))
7083		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7084		v0.AuxInt = int64ToAuxInt(2)
7085		v0.AddArg2(x, x)
7086		v.AddArg2(a, v0)
7087		return true
7088	}
7089	// match: (MADD a (MOVDconst [c]) x)
7090	// cond: c%7 == 0 && isPowerOfTwo64(c/7)
7091	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)])
7092	for {
7093		a := v_0
7094		if v_1.Op != OpARM64MOVDconst {
7095			break
7096		}
7097		c := auxIntToInt64(v_1.AuxInt)
7098		x := v_2
7099		if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
7100			break
7101		}
7102		v.reset(OpARM64SUBshiftLL)
7103		v.AuxInt = int64ToAuxInt(log64(c / 7))
7104		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7105		v0.AuxInt = int64ToAuxInt(3)
7106		v0.AddArg2(x, x)
7107		v.AddArg2(a, v0)
7108		return true
7109	}
7110	// match: (MADD a (MOVDconst [c]) x)
7111	// cond: c%9 == 0 && isPowerOfTwo64(c/9)
7112	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)])
7113	for {
7114		a := v_0
7115		if v_1.Op != OpARM64MOVDconst {
7116			break
7117		}
7118		c := auxIntToInt64(v_1.AuxInt)
7119		x := v_2
7120		if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
7121			break
7122		}
7123		v.reset(OpARM64ADDshiftLL)
7124		v.AuxInt = int64ToAuxInt(log64(c / 9))
7125		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7126		v0.AuxInt = int64ToAuxInt(3)
7127		v0.AddArg2(x, x)
7128		v.AddArg2(a, v0)
7129		return true
7130	}
7131	// match: (MADD (MOVDconst [c]) x y)
7132	// result: (ADDconst [c] (MUL <x.Type> x y))
7133	for {
7134		if v_0.Op != OpARM64MOVDconst {
7135			break
7136		}
7137		c := auxIntToInt64(v_0.AuxInt)
7138		x := v_1
7139		y := v_2
7140		v.reset(OpARM64ADDconst)
7141		v.AuxInt = int64ToAuxInt(c)
7142		v0 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
7143		v0.AddArg2(x, y)
7144		v.AddArg(v0)
7145		return true
7146	}
7147	// match: (MADD a (MOVDconst [c]) (MOVDconst [d]))
7148	// result: (ADDconst [c*d] a)
7149	for {
7150		a := v_0
7151		if v_1.Op != OpARM64MOVDconst {
7152			break
7153		}
7154		c := auxIntToInt64(v_1.AuxInt)
7155		if v_2.Op != OpARM64MOVDconst {
7156			break
7157		}
7158		d := auxIntToInt64(v_2.AuxInt)
7159		v.reset(OpARM64ADDconst)
7160		v.AuxInt = int64ToAuxInt(c * d)
7161		v.AddArg(a)
7162		return true
7163	}
7164	return false
7165}
7166func rewriteValueARM64_OpARM64MADDW(v *Value) bool {
7167	v_2 := v.Args[2]
7168	v_1 := v.Args[1]
7169	v_0 := v.Args[0]
7170	b := v.Block
7171	// match: (MADDW a x (MOVDconst [c]))
7172	// cond: int32(c)==-1
7173	// result: (MOVWUreg (SUB <a.Type> a x))
7174	for {
7175		a := v_0
7176		x := v_1
7177		if v_2.Op != OpARM64MOVDconst {
7178			break
7179		}
7180		c := auxIntToInt64(v_2.AuxInt)
7181		if !(int32(c) == -1) {
7182			break
7183		}
7184		v.reset(OpARM64MOVWUreg)
7185		v0 := b.NewValue0(v.Pos, OpARM64SUB, a.Type)
7186		v0.AddArg2(a, x)
7187		v.AddArg(v0)
7188		return true
7189	}
7190	// match: (MADDW a _ (MOVDconst [c]))
7191	// cond: int32(c)==0
7192	// result: (MOVWUreg a)
7193	for {
7194		a := v_0
7195		if v_2.Op != OpARM64MOVDconst {
7196			break
7197		}
7198		c := auxIntToInt64(v_2.AuxInt)
7199		if !(int32(c) == 0) {
7200			break
7201		}
7202		v.reset(OpARM64MOVWUreg)
7203		v.AddArg(a)
7204		return true
7205	}
7206	// match: (MADDW a x (MOVDconst [c]))
7207	// cond: int32(c)==1
7208	// result: (MOVWUreg (ADD <a.Type> a x))
7209	for {
7210		a := v_0
7211		x := v_1
7212		if v_2.Op != OpARM64MOVDconst {
7213			break
7214		}
7215		c := auxIntToInt64(v_2.AuxInt)
7216		if !(int32(c) == 1) {
7217			break
7218		}
7219		v.reset(OpARM64MOVWUreg)
7220		v0 := b.NewValue0(v.Pos, OpARM64ADD, a.Type)
7221		v0.AddArg2(a, x)
7222		v.AddArg(v0)
7223		return true
7224	}
7225	// match: (MADDW a x (MOVDconst [c]))
7226	// cond: isPowerOfTwo64(c)
7227	// result: (MOVWUreg (ADDshiftLL <a.Type> a x [log64(c)]))
7228	for {
7229		a := v_0
7230		x := v_1
7231		if v_2.Op != OpARM64MOVDconst {
7232			break
7233		}
7234		c := auxIntToInt64(v_2.AuxInt)
7235		if !(isPowerOfTwo64(c)) {
7236			break
7237		}
7238		v.reset(OpARM64MOVWUreg)
7239		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type)
7240		v0.AuxInt = int64ToAuxInt(log64(c))
7241		v0.AddArg2(a, x)
7242		v.AddArg(v0)
7243		return true
7244	}
7245	// match: (MADDW a x (MOVDconst [c]))
7246	// cond: isPowerOfTwo64(c-1) && int32(c)>=3
7247	// result: (MOVWUreg (ADD <a.Type> a (ADDshiftLL <x.Type> x x [log64(c-1)])))
7248	for {
7249		a := v_0
7250		x := v_1
7251		if v_2.Op != OpARM64MOVDconst {
7252			break
7253		}
7254		c := auxIntToInt64(v_2.AuxInt)
7255		if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
7256			break
7257		}
7258		v.reset(OpARM64MOVWUreg)
7259		v0 := b.NewValue0(v.Pos, OpARM64ADD, a.Type)
7260		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7261		v1.AuxInt = int64ToAuxInt(log64(c - 1))
7262		v1.AddArg2(x, x)
7263		v0.AddArg2(a, v1)
7264		v.AddArg(v0)
7265		return true
7266	}
7267	// match: (MADDW a x (MOVDconst [c]))
7268	// cond: isPowerOfTwo64(c+1) && int32(c)>=7
7269	// result: (MOVWUreg (SUB <a.Type> a (SUBshiftLL <x.Type> x x [log64(c+1)])))
7270	for {
7271		a := v_0
7272		x := v_1
7273		if v_2.Op != OpARM64MOVDconst {
7274			break
7275		}
7276		c := auxIntToInt64(v_2.AuxInt)
7277		if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
7278			break
7279		}
7280		v.reset(OpARM64MOVWUreg)
7281		v0 := b.NewValue0(v.Pos, OpARM64SUB, a.Type)
7282		v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7283		v1.AuxInt = int64ToAuxInt(log64(c + 1))
7284		v1.AddArg2(x, x)
7285		v0.AddArg2(a, v1)
7286		v.AddArg(v0)
7287		return true
7288	}
7289	// match: (MADDW a x (MOVDconst [c]))
7290	// cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)
7291	// result: (MOVWUreg (SUBshiftLL <a.Type> a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)]))
7292	for {
7293		a := v_0
7294		x := v_1
7295		if v_2.Op != OpARM64MOVDconst {
7296			break
7297		}
7298		c := auxIntToInt64(v_2.AuxInt)
7299		if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
7300			break
7301		}
7302		v.reset(OpARM64MOVWUreg)
7303		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type)
7304		v0.AuxInt = int64ToAuxInt(log64(c / 3))
7305		v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7306		v1.AuxInt = int64ToAuxInt(2)
7307		v1.AddArg2(x, x)
7308		v0.AddArg2(a, v1)
7309		v.AddArg(v0)
7310		return true
7311	}
7312	// match: (MADDW a x (MOVDconst [c]))
7313	// cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)
7314	// result: (MOVWUreg (ADDshiftLL <a.Type> a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)]))
7315	for {
7316		a := v_0
7317		x := v_1
7318		if v_2.Op != OpARM64MOVDconst {
7319			break
7320		}
7321		c := auxIntToInt64(v_2.AuxInt)
7322		if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
7323			break
7324		}
7325		v.reset(OpARM64MOVWUreg)
7326		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type)
7327		v0.AuxInt = int64ToAuxInt(log64(c / 5))
7328		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7329		v1.AuxInt = int64ToAuxInt(2)
7330		v1.AddArg2(x, x)
7331		v0.AddArg2(a, v1)
7332		v.AddArg(v0)
7333		return true
7334	}
7335	// match: (MADDW a x (MOVDconst [c]))
7336	// cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)
7337	// result: (MOVWUreg (SUBshiftLL <a.Type> a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)]))
7338	for {
7339		a := v_0
7340		x := v_1
7341		if v_2.Op != OpARM64MOVDconst {
7342			break
7343		}
7344		c := auxIntToInt64(v_2.AuxInt)
7345		if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
7346			break
7347		}
7348		v.reset(OpARM64MOVWUreg)
7349		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type)
7350		v0.AuxInt = int64ToAuxInt(log64(c / 7))
7351		v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7352		v1.AuxInt = int64ToAuxInt(3)
7353		v1.AddArg2(x, x)
7354		v0.AddArg2(a, v1)
7355		v.AddArg(v0)
7356		return true
7357	}
7358	// match: (MADDW a x (MOVDconst [c]))
7359	// cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)
7360	// result: (MOVWUreg (ADDshiftLL <a.Type> a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)]))
7361	for {
7362		a := v_0
7363		x := v_1
7364		if v_2.Op != OpARM64MOVDconst {
7365			break
7366		}
7367		c := auxIntToInt64(v_2.AuxInt)
7368		if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
7369			break
7370		}
7371		v.reset(OpARM64MOVWUreg)
7372		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type)
7373		v0.AuxInt = int64ToAuxInt(log64(c / 9))
7374		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7375		v1.AuxInt = int64ToAuxInt(3)
7376		v1.AddArg2(x, x)
7377		v0.AddArg2(a, v1)
7378		v.AddArg(v0)
7379		return true
7380	}
7381	// match: (MADDW a (MOVDconst [c]) x)
7382	// cond: int32(c)==-1
7383	// result: (MOVWUreg (SUB <a.Type> a x))
7384	for {
7385		a := v_0
7386		if v_1.Op != OpARM64MOVDconst {
7387			break
7388		}
7389		c := auxIntToInt64(v_1.AuxInt)
7390		x := v_2
7391		if !(int32(c) == -1) {
7392			break
7393		}
7394		v.reset(OpARM64MOVWUreg)
7395		v0 := b.NewValue0(v.Pos, OpARM64SUB, a.Type)
7396		v0.AddArg2(a, x)
7397		v.AddArg(v0)
7398		return true
7399	}
7400	// match: (MADDW a (MOVDconst [c]) _)
7401	// cond: int32(c)==0
7402	// result: (MOVWUreg a)
7403	for {
7404		a := v_0
7405		if v_1.Op != OpARM64MOVDconst {
7406			break
7407		}
7408		c := auxIntToInt64(v_1.AuxInt)
7409		if !(int32(c) == 0) {
7410			break
7411		}
7412		v.reset(OpARM64MOVWUreg)
7413		v.AddArg(a)
7414		return true
7415	}
7416	// match: (MADDW a (MOVDconst [c]) x)
7417	// cond: int32(c)==1
7418	// result: (MOVWUreg (ADD <a.Type> a x))
7419	for {
7420		a := v_0
7421		if v_1.Op != OpARM64MOVDconst {
7422			break
7423		}
7424		c := auxIntToInt64(v_1.AuxInt)
7425		x := v_2
7426		if !(int32(c) == 1) {
7427			break
7428		}
7429		v.reset(OpARM64MOVWUreg)
7430		v0 := b.NewValue0(v.Pos, OpARM64ADD, a.Type)
7431		v0.AddArg2(a, x)
7432		v.AddArg(v0)
7433		return true
7434	}
7435	// match: (MADDW a (MOVDconst [c]) x)
7436	// cond: isPowerOfTwo64(c)
7437	// result: (MOVWUreg (ADDshiftLL <a.Type> a x [log64(c)]))
7438	for {
7439		a := v_0
7440		if v_1.Op != OpARM64MOVDconst {
7441			break
7442		}
7443		c := auxIntToInt64(v_1.AuxInt)
7444		x := v_2
7445		if !(isPowerOfTwo64(c)) {
7446			break
7447		}
7448		v.reset(OpARM64MOVWUreg)
7449		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type)
7450		v0.AuxInt = int64ToAuxInt(log64(c))
7451		v0.AddArg2(a, x)
7452		v.AddArg(v0)
7453		return true
7454	}
7455	// match: (MADDW a (MOVDconst [c]) x)
7456	// cond: isPowerOfTwo64(c-1) && int32(c)>=3
7457	// result: (MOVWUreg (ADD <a.Type> a (ADDshiftLL <x.Type> x x [log64(c-1)])))
7458	for {
7459		a := v_0
7460		if v_1.Op != OpARM64MOVDconst {
7461			break
7462		}
7463		c := auxIntToInt64(v_1.AuxInt)
7464		x := v_2
7465		if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
7466			break
7467		}
7468		v.reset(OpARM64MOVWUreg)
7469		v0 := b.NewValue0(v.Pos, OpARM64ADD, a.Type)
7470		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7471		v1.AuxInt = int64ToAuxInt(log64(c - 1))
7472		v1.AddArg2(x, x)
7473		v0.AddArg2(a, v1)
7474		v.AddArg(v0)
7475		return true
7476	}
7477	// match: (MADDW a (MOVDconst [c]) x)
7478	// cond: isPowerOfTwo64(c+1) && int32(c)>=7
7479	// result: (MOVWUreg (SUB <a.Type> a (SUBshiftLL <x.Type> x x [log64(c+1)])))
7480	for {
7481		a := v_0
7482		if v_1.Op != OpARM64MOVDconst {
7483			break
7484		}
7485		c := auxIntToInt64(v_1.AuxInt)
7486		x := v_2
7487		if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
7488			break
7489		}
7490		v.reset(OpARM64MOVWUreg)
7491		v0 := b.NewValue0(v.Pos, OpARM64SUB, a.Type)
7492		v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7493		v1.AuxInt = int64ToAuxInt(log64(c + 1))
7494		v1.AddArg2(x, x)
7495		v0.AddArg2(a, v1)
7496		v.AddArg(v0)
7497		return true
7498	}
7499	// match: (MADDW a (MOVDconst [c]) x)
7500	// cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)
7501	// result: (MOVWUreg (SUBshiftLL <a.Type> a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)]))
7502	for {
7503		a := v_0
7504		if v_1.Op != OpARM64MOVDconst {
7505			break
7506		}
7507		c := auxIntToInt64(v_1.AuxInt)
7508		x := v_2
7509		if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
7510			break
7511		}
7512		v.reset(OpARM64MOVWUreg)
7513		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type)
7514		v0.AuxInt = int64ToAuxInt(log64(c / 3))
7515		v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7516		v1.AuxInt = int64ToAuxInt(2)
7517		v1.AddArg2(x, x)
7518		v0.AddArg2(a, v1)
7519		v.AddArg(v0)
7520		return true
7521	}
7522	// match: (MADDW a (MOVDconst [c]) x)
7523	// cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)
7524	// result: (MOVWUreg (ADDshiftLL <a.Type> a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)]))
7525	for {
7526		a := v_0
7527		if v_1.Op != OpARM64MOVDconst {
7528			break
7529		}
7530		c := auxIntToInt64(v_1.AuxInt)
7531		x := v_2
7532		if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
7533			break
7534		}
7535		v.reset(OpARM64MOVWUreg)
7536		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type)
7537		v0.AuxInt = int64ToAuxInt(log64(c / 5))
7538		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7539		v1.AuxInt = int64ToAuxInt(2)
7540		v1.AddArg2(x, x)
7541		v0.AddArg2(a, v1)
7542		v.AddArg(v0)
7543		return true
7544	}
7545	// match: (MADDW a (MOVDconst [c]) x)
7546	// cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)
7547	// result: (MOVWUreg (SUBshiftLL <a.Type> a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)]))
7548	for {
7549		a := v_0
7550		if v_1.Op != OpARM64MOVDconst {
7551			break
7552		}
7553		c := auxIntToInt64(v_1.AuxInt)
7554		x := v_2
7555		if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
7556			break
7557		}
7558		v.reset(OpARM64MOVWUreg)
7559		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type)
7560		v0.AuxInt = int64ToAuxInt(log64(c / 7))
7561		v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7562		v1.AuxInt = int64ToAuxInt(3)
7563		v1.AddArg2(x, x)
7564		v0.AddArg2(a, v1)
7565		v.AddArg(v0)
7566		return true
7567	}
7568	// match: (MADDW a (MOVDconst [c]) x)
7569	// cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)
7570	// result: (MOVWUreg (ADDshiftLL <a.Type> a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)]))
7571	for {
7572		a := v_0
7573		if v_1.Op != OpARM64MOVDconst {
7574			break
7575		}
7576		c := auxIntToInt64(v_1.AuxInt)
7577		x := v_2
7578		if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
7579			break
7580		}
7581		v.reset(OpARM64MOVWUreg)
7582		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type)
7583		v0.AuxInt = int64ToAuxInt(log64(c / 9))
7584		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7585		v1.AuxInt = int64ToAuxInt(3)
7586		v1.AddArg2(x, x)
7587		v0.AddArg2(a, v1)
7588		v.AddArg(v0)
7589		return true
7590	}
7591	// match: (MADDW (MOVDconst [c]) x y)
7592	// result: (MOVWUreg (ADDconst <x.Type> [c] (MULW <x.Type> x y)))
7593	for {
7594		if v_0.Op != OpARM64MOVDconst {
7595			break
7596		}
7597		c := auxIntToInt64(v_0.AuxInt)
7598		x := v_1
7599		y := v_2
7600		v.reset(OpARM64MOVWUreg)
7601		v0 := b.NewValue0(v.Pos, OpARM64ADDconst, x.Type)
7602		v0.AuxInt = int64ToAuxInt(c)
7603		v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
7604		v1.AddArg2(x, y)
7605		v0.AddArg(v1)
7606		v.AddArg(v0)
7607		return true
7608	}
7609	// match: (MADDW a (MOVDconst [c]) (MOVDconst [d]))
7610	// result: (MOVWUreg (ADDconst <a.Type> [c*d] a))
7611	for {
7612		a := v_0
7613		if v_1.Op != OpARM64MOVDconst {
7614			break
7615		}
7616		c := auxIntToInt64(v_1.AuxInt)
7617		if v_2.Op != OpARM64MOVDconst {
7618			break
7619		}
7620		d := auxIntToInt64(v_2.AuxInt)
7621		v.reset(OpARM64MOVWUreg)
7622		v0 := b.NewValue0(v.Pos, OpARM64ADDconst, a.Type)
7623		v0.AuxInt = int64ToAuxInt(c * d)
7624		v0.AddArg(a)
7625		v.AddArg(v0)
7626		return true
7627	}
7628	return false
7629}
7630func rewriteValueARM64_OpARM64MNEG(v *Value) bool {
7631	v_1 := v.Args[1]
7632	v_0 := v.Args[0]
7633	b := v.Block
7634	// match: (MNEG x (MOVDconst [-1]))
7635	// result: x
7636	for {
7637		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7638			x := v_0
7639			if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 {
7640				continue
7641			}
7642			v.copyOf(x)
7643			return true
7644		}
7645		break
7646	}
7647	// match: (MNEG _ (MOVDconst [0]))
7648	// result: (MOVDconst [0])
7649	for {
7650		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7651			if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
7652				continue
7653			}
7654			v.reset(OpARM64MOVDconst)
7655			v.AuxInt = int64ToAuxInt(0)
7656			return true
7657		}
7658		break
7659	}
7660	// match: (MNEG x (MOVDconst [1]))
7661	// result: (NEG x)
7662	for {
7663		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7664			x := v_0
7665			if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
7666				continue
7667			}
7668			v.reset(OpARM64NEG)
7669			v.AddArg(x)
7670			return true
7671		}
7672		break
7673	}
7674	// match: (MNEG x (MOVDconst [c]))
7675	// cond: isPowerOfTwo64(c)
7676	// result: (NEG (SLLconst <x.Type> [log64(c)] x))
7677	for {
7678		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7679			x := v_0
7680			if v_1.Op != OpARM64MOVDconst {
7681				continue
7682			}
7683			c := auxIntToInt64(v_1.AuxInt)
7684			if !(isPowerOfTwo64(c)) {
7685				continue
7686			}
7687			v.reset(OpARM64NEG)
7688			v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
7689			v0.AuxInt = int64ToAuxInt(log64(c))
7690			v0.AddArg(x)
7691			v.AddArg(v0)
7692			return true
7693		}
7694		break
7695	}
7696	// match: (MNEG x (MOVDconst [c]))
7697	// cond: isPowerOfTwo64(c-1) && c >= 3
7698	// result: (NEG (ADDshiftLL <x.Type> x x [log64(c-1)]))
7699	for {
7700		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7701			x := v_0
7702			if v_1.Op != OpARM64MOVDconst {
7703				continue
7704			}
7705			c := auxIntToInt64(v_1.AuxInt)
7706			if !(isPowerOfTwo64(c-1) && c >= 3) {
7707				continue
7708			}
7709			v.reset(OpARM64NEG)
7710			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7711			v0.AuxInt = int64ToAuxInt(log64(c - 1))
7712			v0.AddArg2(x, x)
7713			v.AddArg(v0)
7714			return true
7715		}
7716		break
7717	}
7718	// match: (MNEG x (MOVDconst [c]))
7719	// cond: isPowerOfTwo64(c+1) && c >= 7
7720	// result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log64(c+1)]))
7721	for {
7722		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7723			x := v_0
7724			if v_1.Op != OpARM64MOVDconst {
7725				continue
7726			}
7727			c := auxIntToInt64(v_1.AuxInt)
7728			if !(isPowerOfTwo64(c+1) && c >= 7) {
7729				continue
7730			}
7731			v.reset(OpARM64NEG)
7732			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7733			v0.AuxInt = int64ToAuxInt(log64(c + 1))
7734			v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
7735			v1.AddArg(x)
7736			v0.AddArg2(v1, x)
7737			v.AddArg(v0)
7738			return true
7739		}
7740		break
7741	}
7742	// match: (MNEG x (MOVDconst [c]))
7743	// cond: c%3 == 0 && isPowerOfTwo64(c/3)
7744	// result: (SLLconst <x.Type> [log64(c/3)] (SUBshiftLL <x.Type> x x [2]))
7745	for {
7746		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7747			x := v_0
7748			if v_1.Op != OpARM64MOVDconst {
7749				continue
7750			}
7751			c := auxIntToInt64(v_1.AuxInt)
7752			if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
7753				continue
7754			}
7755			v.reset(OpARM64SLLconst)
7756			v.Type = x.Type
7757			v.AuxInt = int64ToAuxInt(log64(c / 3))
7758			v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7759			v0.AuxInt = int64ToAuxInt(2)
7760			v0.AddArg2(x, x)
7761			v.AddArg(v0)
7762			return true
7763		}
7764		break
7765	}
7766	// match: (MNEG x (MOVDconst [c]))
7767	// cond: c%5 == 0 && isPowerOfTwo64(c/5)
7768	// result: (NEG (SLLconst <x.Type> [log64(c/5)] (ADDshiftLL <x.Type> x x [2])))
7769	for {
7770		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7771			x := v_0
7772			if v_1.Op != OpARM64MOVDconst {
7773				continue
7774			}
7775			c := auxIntToInt64(v_1.AuxInt)
7776			if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
7777				continue
7778			}
7779			v.reset(OpARM64NEG)
7780			v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
7781			v0.AuxInt = int64ToAuxInt(log64(c / 5))
7782			v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7783			v1.AuxInt = int64ToAuxInt(2)
7784			v1.AddArg2(x, x)
7785			v0.AddArg(v1)
7786			v.AddArg(v0)
7787			return true
7788		}
7789		break
7790	}
7791	// match: (MNEG x (MOVDconst [c]))
7792	// cond: c%7 == 0 && isPowerOfTwo64(c/7)
7793	// result: (SLLconst <x.Type> [log64(c/7)] (SUBshiftLL <x.Type> x x [3]))
7794	for {
7795		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7796			x := v_0
7797			if v_1.Op != OpARM64MOVDconst {
7798				continue
7799			}
7800			c := auxIntToInt64(v_1.AuxInt)
7801			if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
7802				continue
7803			}
7804			v.reset(OpARM64SLLconst)
7805			v.Type = x.Type
7806			v.AuxInt = int64ToAuxInt(log64(c / 7))
7807			v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7808			v0.AuxInt = int64ToAuxInt(3)
7809			v0.AddArg2(x, x)
7810			v.AddArg(v0)
7811			return true
7812		}
7813		break
7814	}
7815	// match: (MNEG x (MOVDconst [c]))
7816	// cond: c%9 == 0 && isPowerOfTwo64(c/9)
7817	// result: (NEG (SLLconst <x.Type> [log64(c/9)] (ADDshiftLL <x.Type> x x [3])))
7818	for {
7819		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7820			x := v_0
7821			if v_1.Op != OpARM64MOVDconst {
7822				continue
7823			}
7824			c := auxIntToInt64(v_1.AuxInt)
7825			if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
7826				continue
7827			}
7828			v.reset(OpARM64NEG)
7829			v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
7830			v0.AuxInt = int64ToAuxInt(log64(c / 9))
7831			v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7832			v1.AuxInt = int64ToAuxInt(3)
7833			v1.AddArg2(x, x)
7834			v0.AddArg(v1)
7835			v.AddArg(v0)
7836			return true
7837		}
7838		break
7839	}
7840	// match: (MNEG (MOVDconst [c]) (MOVDconst [d]))
7841	// result: (MOVDconst [-c*d])
7842	for {
7843		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7844			if v_0.Op != OpARM64MOVDconst {
7845				continue
7846			}
7847			c := auxIntToInt64(v_0.AuxInt)
7848			if v_1.Op != OpARM64MOVDconst {
7849				continue
7850			}
7851			d := auxIntToInt64(v_1.AuxInt)
7852			v.reset(OpARM64MOVDconst)
7853			v.AuxInt = int64ToAuxInt(-c * d)
7854			return true
7855		}
7856		break
7857	}
7858	return false
7859}
7860func rewriteValueARM64_OpARM64MNEGW(v *Value) bool {
7861	v_1 := v.Args[1]
7862	v_0 := v.Args[0]
7863	b := v.Block
7864	// match: (MNEGW x (MOVDconst [c]))
7865	// cond: int32(c)==-1
7866	// result: (MOVWUreg x)
7867	for {
7868		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7869			x := v_0
7870			if v_1.Op != OpARM64MOVDconst {
7871				continue
7872			}
7873			c := auxIntToInt64(v_1.AuxInt)
7874			if !(int32(c) == -1) {
7875				continue
7876			}
7877			v.reset(OpARM64MOVWUreg)
7878			v.AddArg(x)
7879			return true
7880		}
7881		break
7882	}
7883	// match: (MNEGW _ (MOVDconst [c]))
7884	// cond: int32(c)==0
7885	// result: (MOVDconst [0])
7886	for {
7887		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7888			if v_1.Op != OpARM64MOVDconst {
7889				continue
7890			}
7891			c := auxIntToInt64(v_1.AuxInt)
7892			if !(int32(c) == 0) {
7893				continue
7894			}
7895			v.reset(OpARM64MOVDconst)
7896			v.AuxInt = int64ToAuxInt(0)
7897			return true
7898		}
7899		break
7900	}
7901	// match: (MNEGW x (MOVDconst [c]))
7902	// cond: int32(c)==1
7903	// result: (MOVWUreg (NEG <x.Type> x))
7904	for {
7905		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7906			x := v_0
7907			if v_1.Op != OpARM64MOVDconst {
7908				continue
7909			}
7910			c := auxIntToInt64(v_1.AuxInt)
7911			if !(int32(c) == 1) {
7912				continue
7913			}
7914			v.reset(OpARM64MOVWUreg)
7915			v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
7916			v0.AddArg(x)
7917			v.AddArg(v0)
7918			return true
7919		}
7920		break
7921	}
7922	// match: (MNEGW x (MOVDconst [c]))
7923	// cond: isPowerOfTwo64(c)
7924	// result: (NEG (SLLconst <x.Type> [log64(c)] x))
7925	for {
7926		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7927			x := v_0
7928			if v_1.Op != OpARM64MOVDconst {
7929				continue
7930			}
7931			c := auxIntToInt64(v_1.AuxInt)
7932			if !(isPowerOfTwo64(c)) {
7933				continue
7934			}
7935			v.reset(OpARM64NEG)
7936			v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
7937			v0.AuxInt = int64ToAuxInt(log64(c))
7938			v0.AddArg(x)
7939			v.AddArg(v0)
7940			return true
7941		}
7942		break
7943	}
7944	// match: (MNEGW x (MOVDconst [c]))
7945	// cond: isPowerOfTwo64(c-1) && int32(c) >= 3
7946	// result: (MOVWUreg (NEG <x.Type> (ADDshiftLL <x.Type> x x [log64(c-1)])))
7947	for {
7948		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7949			x := v_0
7950			if v_1.Op != OpARM64MOVDconst {
7951				continue
7952			}
7953			c := auxIntToInt64(v_1.AuxInt)
7954			if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
7955				continue
7956			}
7957			v.reset(OpARM64MOVWUreg)
7958			v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
7959			v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7960			v1.AuxInt = int64ToAuxInt(log64(c - 1))
7961			v1.AddArg2(x, x)
7962			v0.AddArg(v1)
7963			v.AddArg(v0)
7964			return true
7965		}
7966		break
7967	}
7968	// match: (MNEGW x (MOVDconst [c]))
7969	// cond: isPowerOfTwo64(c+1) && int32(c) >= 7
7970	// result: (MOVWUreg (NEG <x.Type> (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log64(c+1)])))
7971	for {
7972		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7973			x := v_0
7974			if v_1.Op != OpARM64MOVDconst {
7975				continue
7976			}
7977			c := auxIntToInt64(v_1.AuxInt)
7978			if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
7979				continue
7980			}
7981			v.reset(OpARM64MOVWUreg)
7982			v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
7983			v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7984			v1.AuxInt = int64ToAuxInt(log64(c + 1))
7985			v2 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
7986			v2.AddArg(x)
7987			v1.AddArg2(v2, x)
7988			v0.AddArg(v1)
7989			v.AddArg(v0)
7990			return true
7991		}
7992		break
7993	}
7994	// match: (MNEGW x (MOVDconst [c]))
7995	// cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)
7996	// result: (MOVWUreg (SLLconst <x.Type> [log64(c/3)] (SUBshiftLL <x.Type> x x [2])))
7997	for {
7998		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7999			x := v_0
8000			if v_1.Op != OpARM64MOVDconst {
8001				continue
8002			}
8003			c := auxIntToInt64(v_1.AuxInt)
8004			if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
8005				continue
8006			}
8007			v.reset(OpARM64MOVWUreg)
8008			v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
8009			v0.AuxInt = int64ToAuxInt(log64(c / 3))
8010			v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
8011			v1.AuxInt = int64ToAuxInt(2)
8012			v1.AddArg2(x, x)
8013			v0.AddArg(v1)
8014			v.AddArg(v0)
8015			return true
8016		}
8017		break
8018	}
8019	// match: (MNEGW x (MOVDconst [c]))
8020	// cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)
8021	// result: (MOVWUreg (NEG <x.Type> (SLLconst <x.Type> [log64(c/5)] (ADDshiftLL <x.Type> x x [2]))))
8022	for {
8023		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8024			x := v_0
8025			if v_1.Op != OpARM64MOVDconst {
8026				continue
8027			}
8028			c := auxIntToInt64(v_1.AuxInt)
8029			if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
8030				continue
8031			}
8032			v.reset(OpARM64MOVWUreg)
8033			v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
8034			v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
8035			v1.AuxInt = int64ToAuxInt(log64(c / 5))
8036			v2 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8037			v2.AuxInt = int64ToAuxInt(2)
8038			v2.AddArg2(x, x)
8039			v1.AddArg(v2)
8040			v0.AddArg(v1)
8041			v.AddArg(v0)
8042			return true
8043		}
8044		break
8045	}
8046	// match: (MNEGW x (MOVDconst [c]))
8047	// cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)
8048	// result: (MOVWUreg (SLLconst <x.Type> [log64(c/7)] (SUBshiftLL <x.Type> x x [3])))
8049	for {
8050		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8051			x := v_0
8052			if v_1.Op != OpARM64MOVDconst {
8053				continue
8054			}
8055			c := auxIntToInt64(v_1.AuxInt)
8056			if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
8057				continue
8058			}
8059			v.reset(OpARM64MOVWUreg)
8060			v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
8061			v0.AuxInt = int64ToAuxInt(log64(c / 7))
8062			v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
8063			v1.AuxInt = int64ToAuxInt(3)
8064			v1.AddArg2(x, x)
8065			v0.AddArg(v1)
8066			v.AddArg(v0)
8067			return true
8068		}
8069		break
8070	}
8071	// match: (MNEGW x (MOVDconst [c]))
8072	// cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)
8073	// result: (MOVWUreg (NEG <x.Type> (SLLconst <x.Type> [log64(c/9)] (ADDshiftLL <x.Type> x x [3]))))
8074	for {
8075		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8076			x := v_0
8077			if v_1.Op != OpARM64MOVDconst {
8078				continue
8079			}
8080			c := auxIntToInt64(v_1.AuxInt)
8081			if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
8082				continue
8083			}
8084			v.reset(OpARM64MOVWUreg)
8085			v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
8086			v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
8087			v1.AuxInt = int64ToAuxInt(log64(c / 9))
8088			v2 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8089			v2.AuxInt = int64ToAuxInt(3)
8090			v2.AddArg2(x, x)
8091			v1.AddArg(v2)
8092			v0.AddArg(v1)
8093			v.AddArg(v0)
8094			return true
8095		}
8096		break
8097	}
8098	// match: (MNEGW (MOVDconst [c]) (MOVDconst [d]))
8099	// result: (MOVDconst [int64(uint32(-c*d))])
8100	for {
8101		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8102			if v_0.Op != OpARM64MOVDconst {
8103				continue
8104			}
8105			c := auxIntToInt64(v_0.AuxInt)
8106			if v_1.Op != OpARM64MOVDconst {
8107				continue
8108			}
8109			d := auxIntToInt64(v_1.AuxInt)
8110			v.reset(OpARM64MOVDconst)
8111			v.AuxInt = int64ToAuxInt(int64(uint32(-c * d)))
8112			return true
8113		}
8114		break
8115	}
8116	return false
8117}
8118func rewriteValueARM64_OpARM64MOD(v *Value) bool {
8119	v_1 := v.Args[1]
8120	v_0 := v.Args[0]
8121	// match: (MOD (MOVDconst [c]) (MOVDconst [d]))
8122	// cond: d != 0
8123	// result: (MOVDconst [c%d])
8124	for {
8125		if v_0.Op != OpARM64MOVDconst {
8126			break
8127		}
8128		c := auxIntToInt64(v_0.AuxInt)
8129		if v_1.Op != OpARM64MOVDconst {
8130			break
8131		}
8132		d := auxIntToInt64(v_1.AuxInt)
8133		if !(d != 0) {
8134			break
8135		}
8136		v.reset(OpARM64MOVDconst)
8137		v.AuxInt = int64ToAuxInt(c % d)
8138		return true
8139	}
8140	return false
8141}
8142func rewriteValueARM64_OpARM64MODW(v *Value) bool {
8143	v_1 := v.Args[1]
8144	v_0 := v.Args[0]
8145	// match: (MODW (MOVDconst [c]) (MOVDconst [d]))
8146	// cond: d != 0
8147	// result: (MOVDconst [int64(uint32(int32(c)%int32(d)))])
8148	for {
8149		if v_0.Op != OpARM64MOVDconst {
8150			break
8151		}
8152		c := auxIntToInt64(v_0.AuxInt)
8153		if v_1.Op != OpARM64MOVDconst {
8154			break
8155		}
8156		d := auxIntToInt64(v_1.AuxInt)
8157		if !(d != 0) {
8158			break
8159		}
8160		v.reset(OpARM64MOVDconst)
8161		v.AuxInt = int64ToAuxInt(int64(uint32(int32(c) % int32(d))))
8162		return true
8163	}
8164	return false
8165}
8166func rewriteValueARM64_OpARM64MOVBUload(v *Value) bool {
8167	v_1 := v.Args[1]
8168	v_0 := v.Args[0]
8169	b := v.Block
8170	config := b.Func.Config
8171	// match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem)
8172	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
8173	// result: (MOVBUload [off1+int32(off2)] {sym} ptr mem)
8174	for {
8175		off1 := auxIntToInt32(v.AuxInt)
8176		sym := auxToSym(v.Aux)
8177		if v_0.Op != OpARM64ADDconst {
8178			break
8179		}
8180		off2 := auxIntToInt64(v_0.AuxInt)
8181		ptr := v_0.Args[0]
8182		mem := v_1
8183		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
8184			break
8185		}
8186		v.reset(OpARM64MOVBUload)
8187		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
8188		v.Aux = symToAux(sym)
8189		v.AddArg2(ptr, mem)
8190		return true
8191	}
8192	// match: (MOVBUload [off] {sym} (ADD ptr idx) mem)
8193	// cond: off == 0 && sym == nil
8194	// result: (MOVBUloadidx ptr idx mem)
8195	for {
8196		off := auxIntToInt32(v.AuxInt)
8197		sym := auxToSym(v.Aux)
8198		if v_0.Op != OpARM64ADD {
8199			break
8200		}
8201		idx := v_0.Args[1]
8202		ptr := v_0.Args[0]
8203		mem := v_1
8204		if !(off == 0 && sym == nil) {
8205			break
8206		}
8207		v.reset(OpARM64MOVBUloadidx)
8208		v.AddArg3(ptr, idx, mem)
8209		return true
8210	}
8211	// match: (MOVBUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
8212	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
8213	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
8214	for {
8215		off1 := auxIntToInt32(v.AuxInt)
8216		sym1 := auxToSym(v.Aux)
8217		if v_0.Op != OpARM64MOVDaddr {
8218			break
8219		}
8220		off2 := auxIntToInt32(v_0.AuxInt)
8221		sym2 := auxToSym(v_0.Aux)
8222		ptr := v_0.Args[0]
8223		mem := v_1
8224		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
8225			break
8226		}
8227		v.reset(OpARM64MOVBUload)
8228		v.AuxInt = int32ToAuxInt(off1 + off2)
8229		v.Aux = symToAux(mergeSym(sym1, sym2))
8230		v.AddArg2(ptr, mem)
8231		return true
8232	}
8233	// match: (MOVBUload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _))
8234	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
8235	// result: (MOVDconst [0])
8236	for {
8237		off := auxIntToInt32(v.AuxInt)
8238		sym := auxToSym(v.Aux)
8239		ptr := v_0
8240		if v_1.Op != OpARM64MOVBstorezero {
8241			break
8242		}
8243		off2 := auxIntToInt32(v_1.AuxInt)
8244		sym2 := auxToSym(v_1.Aux)
8245		ptr2 := v_1.Args[0]
8246		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
8247			break
8248		}
8249		v.reset(OpARM64MOVDconst)
8250		v.AuxInt = int64ToAuxInt(0)
8251		return true
8252	}
8253	// match: (MOVBUload [off] {sym} (SB) _)
8254	// cond: symIsRO(sym)
8255	// result: (MOVDconst [int64(read8(sym, int64(off)))])
8256	for {
8257		off := auxIntToInt32(v.AuxInt)
8258		sym := auxToSym(v.Aux)
8259		if v_0.Op != OpSB || !(symIsRO(sym)) {
8260			break
8261		}
8262		v.reset(OpARM64MOVDconst)
8263		v.AuxInt = int64ToAuxInt(int64(read8(sym, int64(off))))
8264		return true
8265	}
8266	return false
8267}
8268func rewriteValueARM64_OpARM64MOVBUloadidx(v *Value) bool {
8269	v_2 := v.Args[2]
8270	v_1 := v.Args[1]
8271	v_0 := v.Args[0]
8272	// match: (MOVBUloadidx ptr (MOVDconst [c]) mem)
8273	// cond: is32Bit(c)
8274	// result: (MOVBUload [int32(c)] ptr mem)
8275	for {
8276		ptr := v_0
8277		if v_1.Op != OpARM64MOVDconst {
8278			break
8279		}
8280		c := auxIntToInt64(v_1.AuxInt)
8281		mem := v_2
8282		if !(is32Bit(c)) {
8283			break
8284		}
8285		v.reset(OpARM64MOVBUload)
8286		v.AuxInt = int32ToAuxInt(int32(c))
8287		v.AddArg2(ptr, mem)
8288		return true
8289	}
8290	// match: (MOVBUloadidx (MOVDconst [c]) ptr mem)
8291	// cond: is32Bit(c)
8292	// result: (MOVBUload [int32(c)] ptr mem)
8293	for {
8294		if v_0.Op != OpARM64MOVDconst {
8295			break
8296		}
8297		c := auxIntToInt64(v_0.AuxInt)
8298		ptr := v_1
8299		mem := v_2
8300		if !(is32Bit(c)) {
8301			break
8302		}
8303		v.reset(OpARM64MOVBUload)
8304		v.AuxInt = int32ToAuxInt(int32(c))
8305		v.AddArg2(ptr, mem)
8306		return true
8307	}
8308	// match: (MOVBUloadidx ptr idx (MOVBstorezeroidx ptr2 idx2 _))
8309	// cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2))
8310	// result: (MOVDconst [0])
8311	for {
8312		ptr := v_0
8313		idx := v_1
8314		if v_2.Op != OpARM64MOVBstorezeroidx {
8315			break
8316		}
8317		idx2 := v_2.Args[1]
8318		ptr2 := v_2.Args[0]
8319		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
8320			break
8321		}
8322		v.reset(OpARM64MOVDconst)
8323		v.AuxInt = int64ToAuxInt(0)
8324		return true
8325	}
8326	return false
8327}
8328func rewriteValueARM64_OpARM64MOVBUreg(v *Value) bool {
8329	v_0 := v.Args[0]
8330	// match: (MOVBUreg (ANDconst [c] x))
8331	// result: (ANDconst [c&(1<<8-1)] x)
8332	for {
8333		if v_0.Op != OpARM64ANDconst {
8334			break
8335		}
8336		c := auxIntToInt64(v_0.AuxInt)
8337		x := v_0.Args[0]
8338		v.reset(OpARM64ANDconst)
8339		v.AuxInt = int64ToAuxInt(c & (1<<8 - 1))
8340		v.AddArg(x)
8341		return true
8342	}
8343	// match: (MOVBUreg (MOVDconst [c]))
8344	// result: (MOVDconst [int64(uint8(c))])
8345	for {
8346		if v_0.Op != OpARM64MOVDconst {
8347			break
8348		}
8349		c := auxIntToInt64(v_0.AuxInt)
8350		v.reset(OpARM64MOVDconst)
8351		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
8352		return true
8353	}
8354	// match: (MOVBUreg x)
8355	// cond: v.Type.Size() <= 1
8356	// result: x
8357	for {
8358		x := v_0
8359		if !(v.Type.Size() <= 1) {
8360			break
8361		}
8362		v.copyOf(x)
8363		return true
8364	}
8365	// match: (MOVBUreg (SLLconst [lc] x))
8366	// cond: lc >= 8
8367	// result: (MOVDconst [0])
8368	for {
8369		if v_0.Op != OpARM64SLLconst {
8370			break
8371		}
8372		lc := auxIntToInt64(v_0.AuxInt)
8373		if !(lc >= 8) {
8374			break
8375		}
8376		v.reset(OpARM64MOVDconst)
8377		v.AuxInt = int64ToAuxInt(0)
8378		return true
8379	}
8380	// match: (MOVBUreg (SLLconst [lc] x))
8381	// cond: lc < 8
8382	// result: (UBFIZ [armBFAuxInt(lc, 8-lc)] x)
8383	for {
8384		if v_0.Op != OpARM64SLLconst {
8385			break
8386		}
8387		lc := auxIntToInt64(v_0.AuxInt)
8388		x := v_0.Args[0]
8389		if !(lc < 8) {
8390			break
8391		}
8392		v.reset(OpARM64UBFIZ)
8393		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 8-lc))
8394		v.AddArg(x)
8395		return true
8396	}
8397	// match: (MOVBUreg (SRLconst [rc] x))
8398	// cond: rc < 8
8399	// result: (UBFX [armBFAuxInt(rc, 8)] x)
8400	for {
8401		if v_0.Op != OpARM64SRLconst {
8402			break
8403		}
8404		rc := auxIntToInt64(v_0.AuxInt)
8405		x := v_0.Args[0]
8406		if !(rc < 8) {
8407			break
8408		}
8409		v.reset(OpARM64UBFX)
8410		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 8))
8411		v.AddArg(x)
8412		return true
8413	}
8414	// match: (MOVBUreg (UBFX [bfc] x))
8415	// cond: bfc.getARM64BFwidth() <= 8
8416	// result: (UBFX [bfc] x)
8417	for {
8418		if v_0.Op != OpARM64UBFX {
8419			break
8420		}
8421		bfc := auxIntToArm64BitField(v_0.AuxInt)
8422		x := v_0.Args[0]
8423		if !(bfc.getARM64BFwidth() <= 8) {
8424			break
8425		}
8426		v.reset(OpARM64UBFX)
8427		v.AuxInt = arm64BitFieldToAuxInt(bfc)
8428		v.AddArg(x)
8429		return true
8430	}
8431	return false
8432}
8433func rewriteValueARM64_OpARM64MOVBload(v *Value) bool {
8434	v_1 := v.Args[1]
8435	v_0 := v.Args[0]
8436	b := v.Block
8437	config := b.Func.Config
8438	// match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem)
8439	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
8440	// result: (MOVBload [off1+int32(off2)] {sym} ptr mem)
8441	for {
8442		off1 := auxIntToInt32(v.AuxInt)
8443		sym := auxToSym(v.Aux)
8444		if v_0.Op != OpARM64ADDconst {
8445			break
8446		}
8447		off2 := auxIntToInt64(v_0.AuxInt)
8448		ptr := v_0.Args[0]
8449		mem := v_1
8450		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
8451			break
8452		}
8453		v.reset(OpARM64MOVBload)
8454		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
8455		v.Aux = symToAux(sym)
8456		v.AddArg2(ptr, mem)
8457		return true
8458	}
8459	// match: (MOVBload [off] {sym} (ADD ptr idx) mem)
8460	// cond: off == 0 && sym == nil
8461	// result: (MOVBloadidx ptr idx mem)
8462	for {
8463		off := auxIntToInt32(v.AuxInt)
8464		sym := auxToSym(v.Aux)
8465		if v_0.Op != OpARM64ADD {
8466			break
8467		}
8468		idx := v_0.Args[1]
8469		ptr := v_0.Args[0]
8470		mem := v_1
8471		if !(off == 0 && sym == nil) {
8472			break
8473		}
8474		v.reset(OpARM64MOVBloadidx)
8475		v.AddArg3(ptr, idx, mem)
8476		return true
8477	}
8478	// match: (MOVBload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
8479	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
8480	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
8481	for {
8482		off1 := auxIntToInt32(v.AuxInt)
8483		sym1 := auxToSym(v.Aux)
8484		if v_0.Op != OpARM64MOVDaddr {
8485			break
8486		}
8487		off2 := auxIntToInt32(v_0.AuxInt)
8488		sym2 := auxToSym(v_0.Aux)
8489		ptr := v_0.Args[0]
8490		mem := v_1
8491		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
8492			break
8493		}
8494		v.reset(OpARM64MOVBload)
8495		v.AuxInt = int32ToAuxInt(off1 + off2)
8496		v.Aux = symToAux(mergeSym(sym1, sym2))
8497		v.AddArg2(ptr, mem)
8498		return true
8499	}
8500	// match: (MOVBload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _))
8501	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
8502	// result: (MOVDconst [0])
8503	for {
8504		off := auxIntToInt32(v.AuxInt)
8505		sym := auxToSym(v.Aux)
8506		ptr := v_0
8507		if v_1.Op != OpARM64MOVBstorezero {
8508			break
8509		}
8510		off2 := auxIntToInt32(v_1.AuxInt)
8511		sym2 := auxToSym(v_1.Aux)
8512		ptr2 := v_1.Args[0]
8513		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
8514			break
8515		}
8516		v.reset(OpARM64MOVDconst)
8517		v.AuxInt = int64ToAuxInt(0)
8518		return true
8519	}
8520	return false
8521}
8522func rewriteValueARM64_OpARM64MOVBloadidx(v *Value) bool {
8523	v_2 := v.Args[2]
8524	v_1 := v.Args[1]
8525	v_0 := v.Args[0]
8526	// match: (MOVBloadidx ptr (MOVDconst [c]) mem)
8527	// cond: is32Bit(c)
8528	// result: (MOVBload [int32(c)] ptr mem)
8529	for {
8530		ptr := v_0
8531		if v_1.Op != OpARM64MOVDconst {
8532			break
8533		}
8534		c := auxIntToInt64(v_1.AuxInt)
8535		mem := v_2
8536		if !(is32Bit(c)) {
8537			break
8538		}
8539		v.reset(OpARM64MOVBload)
8540		v.AuxInt = int32ToAuxInt(int32(c))
8541		v.AddArg2(ptr, mem)
8542		return true
8543	}
8544	// match: (MOVBloadidx (MOVDconst [c]) ptr mem)
8545	// cond: is32Bit(c)
8546	// result: (MOVBload [int32(c)] ptr mem)
8547	for {
8548		if v_0.Op != OpARM64MOVDconst {
8549			break
8550		}
8551		c := auxIntToInt64(v_0.AuxInt)
8552		ptr := v_1
8553		mem := v_2
8554		if !(is32Bit(c)) {
8555			break
8556		}
8557		v.reset(OpARM64MOVBload)
8558		v.AuxInt = int32ToAuxInt(int32(c))
8559		v.AddArg2(ptr, mem)
8560		return true
8561	}
8562	// match: (MOVBloadidx ptr idx (MOVBstorezeroidx ptr2 idx2 _))
8563	// cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2))
8564	// result: (MOVDconst [0])
8565	for {
8566		ptr := v_0
8567		idx := v_1
8568		if v_2.Op != OpARM64MOVBstorezeroidx {
8569			break
8570		}
8571		idx2 := v_2.Args[1]
8572		ptr2 := v_2.Args[0]
8573		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
8574			break
8575		}
8576		v.reset(OpARM64MOVDconst)
8577		v.AuxInt = int64ToAuxInt(0)
8578		return true
8579	}
8580	return false
8581}
8582func rewriteValueARM64_OpARM64MOVBreg(v *Value) bool {
8583	v_0 := v.Args[0]
8584	// match: (MOVBreg (MOVDconst [c]))
8585	// result: (MOVDconst [int64(int8(c))])
8586	for {
8587		if v_0.Op != OpARM64MOVDconst {
8588			break
8589		}
8590		c := auxIntToInt64(v_0.AuxInt)
8591		v.reset(OpARM64MOVDconst)
8592		v.AuxInt = int64ToAuxInt(int64(int8(c)))
8593		return true
8594	}
8595	// match: (MOVBreg x)
8596	// cond: v.Type.Size() <= 1
8597	// result: x
8598	for {
8599		x := v_0
8600		if !(v.Type.Size() <= 1) {
8601			break
8602		}
8603		v.copyOf(x)
8604		return true
8605	}
8606	// match: (MOVBreg <t> (ANDconst x [c]))
8607	// cond: uint64(c) & uint64(0xffffffffffffff80) == 0
8608	// result: (ANDconst <t> x [c])
8609	for {
8610		t := v.Type
8611		if v_0.Op != OpARM64ANDconst {
8612			break
8613		}
8614		c := auxIntToInt64(v_0.AuxInt)
8615		x := v_0.Args[0]
8616		if !(uint64(c)&uint64(0xffffffffffffff80) == 0) {
8617			break
8618		}
8619		v.reset(OpARM64ANDconst)
8620		v.Type = t
8621		v.AuxInt = int64ToAuxInt(c)
8622		v.AddArg(x)
8623		return true
8624	}
8625	// match: (MOVBreg (SLLconst [lc] x))
8626	// cond: lc < 8
8627	// result: (SBFIZ [armBFAuxInt(lc, 8-lc)] x)
8628	for {
8629		if v_0.Op != OpARM64SLLconst {
8630			break
8631		}
8632		lc := auxIntToInt64(v_0.AuxInt)
8633		x := v_0.Args[0]
8634		if !(lc < 8) {
8635			break
8636		}
8637		v.reset(OpARM64SBFIZ)
8638		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 8-lc))
8639		v.AddArg(x)
8640		return true
8641	}
8642	// match: (MOVBreg (SBFX [bfc] x))
8643	// cond: bfc.getARM64BFwidth() <= 8
8644	// result: (SBFX [bfc] x)
8645	for {
8646		if v_0.Op != OpARM64SBFX {
8647			break
8648		}
8649		bfc := auxIntToArm64BitField(v_0.AuxInt)
8650		x := v_0.Args[0]
8651		if !(bfc.getARM64BFwidth() <= 8) {
8652			break
8653		}
8654		v.reset(OpARM64SBFX)
8655		v.AuxInt = arm64BitFieldToAuxInt(bfc)
8656		v.AddArg(x)
8657		return true
8658	}
8659	return false
8660}
8661func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
8662	v_2 := v.Args[2]
8663	v_1 := v.Args[1]
8664	v_0 := v.Args[0]
8665	b := v.Block
8666	config := b.Func.Config
8667	// match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem)
8668	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
8669	// result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem)
8670	for {
8671		off1 := auxIntToInt32(v.AuxInt)
8672		sym := auxToSym(v.Aux)
8673		if v_0.Op != OpARM64ADDconst {
8674			break
8675		}
8676		off2 := auxIntToInt64(v_0.AuxInt)
8677		ptr := v_0.Args[0]
8678		val := v_1
8679		mem := v_2
8680		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
8681			break
8682		}
8683		v.reset(OpARM64MOVBstore)
8684		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
8685		v.Aux = symToAux(sym)
8686		v.AddArg3(ptr, val, mem)
8687		return true
8688	}
8689	// match: (MOVBstore [off] {sym} (ADD ptr idx) val mem)
8690	// cond: off == 0 && sym == nil
8691	// result: (MOVBstoreidx ptr idx val mem)
8692	for {
8693		off := auxIntToInt32(v.AuxInt)
8694		sym := auxToSym(v.Aux)
8695		if v_0.Op != OpARM64ADD {
8696			break
8697		}
8698		idx := v_0.Args[1]
8699		ptr := v_0.Args[0]
8700		val := v_1
8701		mem := v_2
8702		if !(off == 0 && sym == nil) {
8703			break
8704		}
8705		v.reset(OpARM64MOVBstoreidx)
8706		v.AddArg4(ptr, idx, val, mem)
8707		return true
8708	}
8709	// match: (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
8710	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
8711	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
8712	for {
8713		off1 := auxIntToInt32(v.AuxInt)
8714		sym1 := auxToSym(v.Aux)
8715		if v_0.Op != OpARM64MOVDaddr {
8716			break
8717		}
8718		off2 := auxIntToInt32(v_0.AuxInt)
8719		sym2 := auxToSym(v_0.Aux)
8720		ptr := v_0.Args[0]
8721		val := v_1
8722		mem := v_2
8723		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
8724			break
8725		}
8726		v.reset(OpARM64MOVBstore)
8727		v.AuxInt = int32ToAuxInt(off1 + off2)
8728		v.Aux = symToAux(mergeSym(sym1, sym2))
8729		v.AddArg3(ptr, val, mem)
8730		return true
8731	}
8732	// match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
8733	// result: (MOVBstorezero [off] {sym} ptr mem)
8734	for {
8735		off := auxIntToInt32(v.AuxInt)
8736		sym := auxToSym(v.Aux)
8737		ptr := v_0
8738		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
8739			break
8740		}
8741		mem := v_2
8742		v.reset(OpARM64MOVBstorezero)
8743		v.AuxInt = int32ToAuxInt(off)
8744		v.Aux = symToAux(sym)
8745		v.AddArg2(ptr, mem)
8746		return true
8747	}
8748	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
8749	// result: (MOVBstore [off] {sym} ptr x mem)
8750	for {
8751		off := auxIntToInt32(v.AuxInt)
8752		sym := auxToSym(v.Aux)
8753		ptr := v_0
8754		if v_1.Op != OpARM64MOVBreg {
8755			break
8756		}
8757		x := v_1.Args[0]
8758		mem := v_2
8759		v.reset(OpARM64MOVBstore)
8760		v.AuxInt = int32ToAuxInt(off)
8761		v.Aux = symToAux(sym)
8762		v.AddArg3(ptr, x, mem)
8763		return true
8764	}
8765	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
8766	// result: (MOVBstore [off] {sym} ptr x mem)
8767	for {
8768		off := auxIntToInt32(v.AuxInt)
8769		sym := auxToSym(v.Aux)
8770		ptr := v_0
8771		if v_1.Op != OpARM64MOVBUreg {
8772			break
8773		}
8774		x := v_1.Args[0]
8775		mem := v_2
8776		v.reset(OpARM64MOVBstore)
8777		v.AuxInt = int32ToAuxInt(off)
8778		v.Aux = symToAux(sym)
8779		v.AddArg3(ptr, x, mem)
8780		return true
8781	}
8782	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
8783	// result: (MOVBstore [off] {sym} ptr x mem)
8784	for {
8785		off := auxIntToInt32(v.AuxInt)
8786		sym := auxToSym(v.Aux)
8787		ptr := v_0
8788		if v_1.Op != OpARM64MOVHreg {
8789			break
8790		}
8791		x := v_1.Args[0]
8792		mem := v_2
8793		v.reset(OpARM64MOVBstore)
8794		v.AuxInt = int32ToAuxInt(off)
8795		v.Aux = symToAux(sym)
8796		v.AddArg3(ptr, x, mem)
8797		return true
8798	}
8799	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
8800	// result: (MOVBstore [off] {sym} ptr x mem)
8801	for {
8802		off := auxIntToInt32(v.AuxInt)
8803		sym := auxToSym(v.Aux)
8804		ptr := v_0
8805		if v_1.Op != OpARM64MOVHUreg {
8806			break
8807		}
8808		x := v_1.Args[0]
8809		mem := v_2
8810		v.reset(OpARM64MOVBstore)
8811		v.AuxInt = int32ToAuxInt(off)
8812		v.Aux = symToAux(sym)
8813		v.AddArg3(ptr, x, mem)
8814		return true
8815	}
8816	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
8817	// result: (MOVBstore [off] {sym} ptr x mem)
8818	for {
8819		off := auxIntToInt32(v.AuxInt)
8820		sym := auxToSym(v.Aux)
8821		ptr := v_0
8822		if v_1.Op != OpARM64MOVWreg {
8823			break
8824		}
8825		x := v_1.Args[0]
8826		mem := v_2
8827		v.reset(OpARM64MOVBstore)
8828		v.AuxInt = int32ToAuxInt(off)
8829		v.Aux = symToAux(sym)
8830		v.AddArg3(ptr, x, mem)
8831		return true
8832	}
8833	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
8834	// result: (MOVBstore [off] {sym} ptr x mem)
8835	for {
8836		off := auxIntToInt32(v.AuxInt)
8837		sym := auxToSym(v.Aux)
8838		ptr := v_0
8839		if v_1.Op != OpARM64MOVWUreg {
8840			break
8841		}
8842		x := v_1.Args[0]
8843		mem := v_2
8844		v.reset(OpARM64MOVBstore)
8845		v.AuxInt = int32ToAuxInt(off)
8846		v.Aux = symToAux(sym)
8847		v.AddArg3(ptr, x, mem)
8848		return true
8849	}
8850	return false
8851}
8852func rewriteValueARM64_OpARM64MOVBstoreidx(v *Value) bool {
8853	v_3 := v.Args[3]
8854	v_2 := v.Args[2]
8855	v_1 := v.Args[1]
8856	v_0 := v.Args[0]
8857	// match: (MOVBstoreidx ptr (MOVDconst [c]) val mem)
8858	// cond: is32Bit(c)
8859	// result: (MOVBstore [int32(c)] ptr val mem)
8860	for {
8861		ptr := v_0
8862		if v_1.Op != OpARM64MOVDconst {
8863			break
8864		}
8865		c := auxIntToInt64(v_1.AuxInt)
8866		val := v_2
8867		mem := v_3
8868		if !(is32Bit(c)) {
8869			break
8870		}
8871		v.reset(OpARM64MOVBstore)
8872		v.AuxInt = int32ToAuxInt(int32(c))
8873		v.AddArg3(ptr, val, mem)
8874		return true
8875	}
8876	// match: (MOVBstoreidx (MOVDconst [c]) idx val mem)
8877	// cond: is32Bit(c)
8878	// result: (MOVBstore [int32(c)] idx val mem)
8879	for {
8880		if v_0.Op != OpARM64MOVDconst {
8881			break
8882		}
8883		c := auxIntToInt64(v_0.AuxInt)
8884		idx := v_1
8885		val := v_2
8886		mem := v_3
8887		if !(is32Bit(c)) {
8888			break
8889		}
8890		v.reset(OpARM64MOVBstore)
8891		v.AuxInt = int32ToAuxInt(int32(c))
8892		v.AddArg3(idx, val, mem)
8893		return true
8894	}
8895	// match: (MOVBstoreidx ptr idx (MOVDconst [0]) mem)
8896	// result: (MOVBstorezeroidx ptr idx mem)
8897	for {
8898		ptr := v_0
8899		idx := v_1
8900		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
8901			break
8902		}
8903		mem := v_3
8904		v.reset(OpARM64MOVBstorezeroidx)
8905		v.AddArg3(ptr, idx, mem)
8906		return true
8907	}
8908	// match: (MOVBstoreidx ptr idx (MOVBreg x) mem)
8909	// result: (MOVBstoreidx ptr idx x mem)
8910	for {
8911		ptr := v_0
8912		idx := v_1
8913		if v_2.Op != OpARM64MOVBreg {
8914			break
8915		}
8916		x := v_2.Args[0]
8917		mem := v_3
8918		v.reset(OpARM64MOVBstoreidx)
8919		v.AddArg4(ptr, idx, x, mem)
8920		return true
8921	}
8922	// match: (MOVBstoreidx ptr idx (MOVBUreg x) mem)
8923	// result: (MOVBstoreidx ptr idx x mem)
8924	for {
8925		ptr := v_0
8926		idx := v_1
8927		if v_2.Op != OpARM64MOVBUreg {
8928			break
8929		}
8930		x := v_2.Args[0]
8931		mem := v_3
8932		v.reset(OpARM64MOVBstoreidx)
8933		v.AddArg4(ptr, idx, x, mem)
8934		return true
8935	}
8936	// match: (MOVBstoreidx ptr idx (MOVHreg x) mem)
8937	// result: (MOVBstoreidx ptr idx x mem)
8938	for {
8939		ptr := v_0
8940		idx := v_1
8941		if v_2.Op != OpARM64MOVHreg {
8942			break
8943		}
8944		x := v_2.Args[0]
8945		mem := v_3
8946		v.reset(OpARM64MOVBstoreidx)
8947		v.AddArg4(ptr, idx, x, mem)
8948		return true
8949	}
8950	// match: (MOVBstoreidx ptr idx (MOVHUreg x) mem)
8951	// result: (MOVBstoreidx ptr idx x mem)
8952	for {
8953		ptr := v_0
8954		idx := v_1
8955		if v_2.Op != OpARM64MOVHUreg {
8956			break
8957		}
8958		x := v_2.Args[0]
8959		mem := v_3
8960		v.reset(OpARM64MOVBstoreidx)
8961		v.AddArg4(ptr, idx, x, mem)
8962		return true
8963	}
8964	// match: (MOVBstoreidx ptr idx (MOVWreg x) mem)
8965	// result: (MOVBstoreidx ptr idx x mem)
8966	for {
8967		ptr := v_0
8968		idx := v_1
8969		if v_2.Op != OpARM64MOVWreg {
8970			break
8971		}
8972		x := v_2.Args[0]
8973		mem := v_3
8974		v.reset(OpARM64MOVBstoreidx)
8975		v.AddArg4(ptr, idx, x, mem)
8976		return true
8977	}
8978	// match: (MOVBstoreidx ptr idx (MOVWUreg x) mem)
8979	// result: (MOVBstoreidx ptr idx x mem)
8980	for {
8981		ptr := v_0
8982		idx := v_1
8983		if v_2.Op != OpARM64MOVWUreg {
8984			break
8985		}
8986		x := v_2.Args[0]
8987		mem := v_3
8988		v.reset(OpARM64MOVBstoreidx)
8989		v.AddArg4(ptr, idx, x, mem)
8990		return true
8991	}
8992	return false
8993}
8994func rewriteValueARM64_OpARM64MOVBstorezero(v *Value) bool {
8995	v_1 := v.Args[1]
8996	v_0 := v.Args[0]
8997	b := v.Block
8998	config := b.Func.Config
8999	// match: (MOVBstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
9000	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
9001	// result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem)
9002	for {
9003		off1 := auxIntToInt32(v.AuxInt)
9004		sym := auxToSym(v.Aux)
9005		if v_0.Op != OpARM64ADDconst {
9006			break
9007		}
9008		off2 := auxIntToInt64(v_0.AuxInt)
9009		ptr := v_0.Args[0]
9010		mem := v_1
9011		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
9012			break
9013		}
9014		v.reset(OpARM64MOVBstorezero)
9015		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
9016		v.Aux = symToAux(sym)
9017		v.AddArg2(ptr, mem)
9018		return true
9019	}
9020	// match: (MOVBstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
9021	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
9022	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
9023	for {
9024		off1 := auxIntToInt32(v.AuxInt)
9025		sym1 := auxToSym(v.Aux)
9026		if v_0.Op != OpARM64MOVDaddr {
9027			break
9028		}
9029		off2 := auxIntToInt32(v_0.AuxInt)
9030		sym2 := auxToSym(v_0.Aux)
9031		ptr := v_0.Args[0]
9032		mem := v_1
9033		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
9034			break
9035		}
9036		v.reset(OpARM64MOVBstorezero)
9037		v.AuxInt = int32ToAuxInt(off1 + off2)
9038		v.Aux = symToAux(mergeSym(sym1, sym2))
9039		v.AddArg2(ptr, mem)
9040		return true
9041	}
9042	// match: (MOVBstorezero [off] {sym} (ADD ptr idx) mem)
9043	// cond: off == 0 && sym == nil
9044	// result: (MOVBstorezeroidx ptr idx mem)
9045	for {
9046		off := auxIntToInt32(v.AuxInt)
9047		sym := auxToSym(v.Aux)
9048		if v_0.Op != OpARM64ADD {
9049			break
9050		}
9051		idx := v_0.Args[1]
9052		ptr := v_0.Args[0]
9053		mem := v_1
9054		if !(off == 0 && sym == nil) {
9055			break
9056		}
9057		v.reset(OpARM64MOVBstorezeroidx)
9058		v.AddArg3(ptr, idx, mem)
9059		return true
9060	}
9061	return false
9062}
9063func rewriteValueARM64_OpARM64MOVBstorezeroidx(v *Value) bool {
9064	v_2 := v.Args[2]
9065	v_1 := v.Args[1]
9066	v_0 := v.Args[0]
9067	// match: (MOVBstorezeroidx ptr (MOVDconst [c]) mem)
9068	// cond: is32Bit(c)
9069	// result: (MOVBstorezero [int32(c)] ptr mem)
9070	for {
9071		ptr := v_0
9072		if v_1.Op != OpARM64MOVDconst {
9073			break
9074		}
9075		c := auxIntToInt64(v_1.AuxInt)
9076		mem := v_2
9077		if !(is32Bit(c)) {
9078			break
9079		}
9080		v.reset(OpARM64MOVBstorezero)
9081		v.AuxInt = int32ToAuxInt(int32(c))
9082		v.AddArg2(ptr, mem)
9083		return true
9084	}
9085	// match: (MOVBstorezeroidx (MOVDconst [c]) idx mem)
9086	// cond: is32Bit(c)
9087	// result: (MOVBstorezero [int32(c)] idx mem)
9088	for {
9089		if v_0.Op != OpARM64MOVDconst {
9090			break
9091		}
9092		c := auxIntToInt64(v_0.AuxInt)
9093		idx := v_1
9094		mem := v_2
9095		if !(is32Bit(c)) {
9096			break
9097		}
9098		v.reset(OpARM64MOVBstorezero)
9099		v.AuxInt = int32ToAuxInt(int32(c))
9100		v.AddArg2(idx, mem)
9101		return true
9102	}
9103	return false
9104}
9105func rewriteValueARM64_OpARM64MOVDload(v *Value) bool {
9106	v_1 := v.Args[1]
9107	v_0 := v.Args[0]
9108	b := v.Block
9109	config := b.Func.Config
9110	// match: (MOVDload [off] {sym} ptr (FMOVDstore [off] {sym} ptr val _))
9111	// result: (FMOVDfpgp val)
9112	for {
9113		off := auxIntToInt32(v.AuxInt)
9114		sym := auxToSym(v.Aux)
9115		ptr := v_0
9116		if v_1.Op != OpARM64FMOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
9117			break
9118		}
9119		val := v_1.Args[1]
9120		if ptr != v_1.Args[0] {
9121			break
9122		}
9123		v.reset(OpARM64FMOVDfpgp)
9124		v.AddArg(val)
9125		return true
9126	}
9127	// match: (MOVDload [off1] {sym} (ADDconst [off2] ptr) mem)
9128	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
9129	// result: (MOVDload [off1+int32(off2)] {sym} ptr mem)
9130	for {
9131		off1 := auxIntToInt32(v.AuxInt)
9132		sym := auxToSym(v.Aux)
9133		if v_0.Op != OpARM64ADDconst {
9134			break
9135		}
9136		off2 := auxIntToInt64(v_0.AuxInt)
9137		ptr := v_0.Args[0]
9138		mem := v_1
9139		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
9140			break
9141		}
9142		v.reset(OpARM64MOVDload)
9143		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
9144		v.Aux = symToAux(sym)
9145		v.AddArg2(ptr, mem)
9146		return true
9147	}
9148	// match: (MOVDload [off] {sym} (ADD ptr idx) mem)
9149	// cond: off == 0 && sym == nil
9150	// result: (MOVDloadidx ptr idx mem)
9151	for {
9152		off := auxIntToInt32(v.AuxInt)
9153		sym := auxToSym(v.Aux)
9154		if v_0.Op != OpARM64ADD {
9155			break
9156		}
9157		idx := v_0.Args[1]
9158		ptr := v_0.Args[0]
9159		mem := v_1
9160		if !(off == 0 && sym == nil) {
9161			break
9162		}
9163		v.reset(OpARM64MOVDloadidx)
9164		v.AddArg3(ptr, idx, mem)
9165		return true
9166	}
9167	// match: (MOVDload [off] {sym} (ADDshiftLL [3] ptr idx) mem)
9168	// cond: off == 0 && sym == nil
9169	// result: (MOVDloadidx8 ptr idx mem)
9170	for {
9171		off := auxIntToInt32(v.AuxInt)
9172		sym := auxToSym(v.Aux)
9173		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 {
9174			break
9175		}
9176		idx := v_0.Args[1]
9177		ptr := v_0.Args[0]
9178		mem := v_1
9179		if !(off == 0 && sym == nil) {
9180			break
9181		}
9182		v.reset(OpARM64MOVDloadidx8)
9183		v.AddArg3(ptr, idx, mem)
9184		return true
9185	}
9186	// match: (MOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
9187	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
9188	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
9189	for {
9190		off1 := auxIntToInt32(v.AuxInt)
9191		sym1 := auxToSym(v.Aux)
9192		if v_0.Op != OpARM64MOVDaddr {
9193			break
9194		}
9195		off2 := auxIntToInt32(v_0.AuxInt)
9196		sym2 := auxToSym(v_0.Aux)
9197		ptr := v_0.Args[0]
9198		mem := v_1
9199		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
9200			break
9201		}
9202		v.reset(OpARM64MOVDload)
9203		v.AuxInt = int32ToAuxInt(off1 + off2)
9204		v.Aux = symToAux(mergeSym(sym1, sym2))
9205		v.AddArg2(ptr, mem)
9206		return true
9207	}
9208	// match: (MOVDload [off] {sym} ptr (MOVDstorezero [off2] {sym2} ptr2 _))
9209	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
9210	// result: (MOVDconst [0])
9211	for {
9212		off := auxIntToInt32(v.AuxInt)
9213		sym := auxToSym(v.Aux)
9214		ptr := v_0
9215		if v_1.Op != OpARM64MOVDstorezero {
9216			break
9217		}
9218		off2 := auxIntToInt32(v_1.AuxInt)
9219		sym2 := auxToSym(v_1.Aux)
9220		ptr2 := v_1.Args[0]
9221		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
9222			break
9223		}
9224		v.reset(OpARM64MOVDconst)
9225		v.AuxInt = int64ToAuxInt(0)
9226		return true
9227	}
9228	// match: (MOVDload [off] {sym} (SB) _)
9229	// cond: symIsRO(sym)
9230	// result: (MOVDconst [int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder))])
9231	for {
9232		off := auxIntToInt32(v.AuxInt)
9233		sym := auxToSym(v.Aux)
9234		if v_0.Op != OpSB || !(symIsRO(sym)) {
9235			break
9236		}
9237		v.reset(OpARM64MOVDconst)
9238		v.AuxInt = int64ToAuxInt(int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder)))
9239		return true
9240	}
9241	return false
9242}
9243func rewriteValueARM64_OpARM64MOVDloadidx(v *Value) bool {
9244	v_2 := v.Args[2]
9245	v_1 := v.Args[1]
9246	v_0 := v.Args[0]
9247	// match: (MOVDloadidx ptr (MOVDconst [c]) mem)
9248	// cond: is32Bit(c)
9249	// result: (MOVDload [int32(c)] ptr mem)
9250	for {
9251		ptr := v_0
9252		if v_1.Op != OpARM64MOVDconst {
9253			break
9254		}
9255		c := auxIntToInt64(v_1.AuxInt)
9256		mem := v_2
9257		if !(is32Bit(c)) {
9258			break
9259		}
9260		v.reset(OpARM64MOVDload)
9261		v.AuxInt = int32ToAuxInt(int32(c))
9262		v.AddArg2(ptr, mem)
9263		return true
9264	}
9265	// match: (MOVDloadidx (MOVDconst [c]) ptr mem)
9266	// cond: is32Bit(c)
9267	// result: (MOVDload [int32(c)] ptr mem)
9268	for {
9269		if v_0.Op != OpARM64MOVDconst {
9270			break
9271		}
9272		c := auxIntToInt64(v_0.AuxInt)
9273		ptr := v_1
9274		mem := v_2
9275		if !(is32Bit(c)) {
9276			break
9277		}
9278		v.reset(OpARM64MOVDload)
9279		v.AuxInt = int32ToAuxInt(int32(c))
9280		v.AddArg2(ptr, mem)
9281		return true
9282	}
9283	// match: (MOVDloadidx ptr (SLLconst [3] idx) mem)
9284	// result: (MOVDloadidx8 ptr idx mem)
9285	for {
9286		ptr := v_0
9287		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 {
9288			break
9289		}
9290		idx := v_1.Args[0]
9291		mem := v_2
9292		v.reset(OpARM64MOVDloadidx8)
9293		v.AddArg3(ptr, idx, mem)
9294		return true
9295	}
9296	// match: (MOVDloadidx (SLLconst [3] idx) ptr mem)
9297	// result: (MOVDloadidx8 ptr idx mem)
9298	for {
9299		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 {
9300			break
9301		}
9302		idx := v_0.Args[0]
9303		ptr := v_1
9304		mem := v_2
9305		v.reset(OpARM64MOVDloadidx8)
9306		v.AddArg3(ptr, idx, mem)
9307		return true
9308	}
9309	// match: (MOVDloadidx ptr idx (MOVDstorezeroidx ptr2 idx2 _))
9310	// cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2))
9311	// result: (MOVDconst [0])
9312	for {
9313		ptr := v_0
9314		idx := v_1
9315		if v_2.Op != OpARM64MOVDstorezeroidx {
9316			break
9317		}
9318		idx2 := v_2.Args[1]
9319		ptr2 := v_2.Args[0]
9320		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
9321			break
9322		}
9323		v.reset(OpARM64MOVDconst)
9324		v.AuxInt = int64ToAuxInt(0)
9325		return true
9326	}
9327	return false
9328}
9329func rewriteValueARM64_OpARM64MOVDloadidx8(v *Value) bool {
9330	v_2 := v.Args[2]
9331	v_1 := v.Args[1]
9332	v_0 := v.Args[0]
9333	// match: (MOVDloadidx8 ptr (MOVDconst [c]) mem)
9334	// cond: is32Bit(c<<3)
9335	// result: (MOVDload [int32(c)<<3] ptr mem)
9336	for {
9337		ptr := v_0
9338		if v_1.Op != OpARM64MOVDconst {
9339			break
9340		}
9341		c := auxIntToInt64(v_1.AuxInt)
9342		mem := v_2
9343		if !(is32Bit(c << 3)) {
9344			break
9345		}
9346		v.reset(OpARM64MOVDload)
9347		v.AuxInt = int32ToAuxInt(int32(c) << 3)
9348		v.AddArg2(ptr, mem)
9349		return true
9350	}
9351	// match: (MOVDloadidx8 ptr idx (MOVDstorezeroidx8 ptr2 idx2 _))
9352	// cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)
9353	// result: (MOVDconst [0])
9354	for {
9355		ptr := v_0
9356		idx := v_1
9357		if v_2.Op != OpARM64MOVDstorezeroidx8 {
9358			break
9359		}
9360		idx2 := v_2.Args[1]
9361		ptr2 := v_2.Args[0]
9362		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
9363			break
9364		}
9365		v.reset(OpARM64MOVDconst)
9366		v.AuxInt = int64ToAuxInt(0)
9367		return true
9368	}
9369	return false
9370}
9371func rewriteValueARM64_OpARM64MOVDnop(v *Value) bool {
9372	v_0 := v.Args[0]
9373	// match: (MOVDnop (MOVDconst [c]))
9374	// result: (MOVDconst [c])
9375	for {
9376		if v_0.Op != OpARM64MOVDconst {
9377			break
9378		}
9379		c := auxIntToInt64(v_0.AuxInt)
9380		v.reset(OpARM64MOVDconst)
9381		v.AuxInt = int64ToAuxInt(c)
9382		return true
9383	}
9384	return false
9385}
9386func rewriteValueARM64_OpARM64MOVDreg(v *Value) bool {
9387	v_0 := v.Args[0]
9388	// match: (MOVDreg x)
9389	// cond: x.Uses == 1
9390	// result: (MOVDnop x)
9391	for {
9392		x := v_0
9393		if !(x.Uses == 1) {
9394			break
9395		}
9396		v.reset(OpARM64MOVDnop)
9397		v.AddArg(x)
9398		return true
9399	}
9400	// match: (MOVDreg (MOVDconst [c]))
9401	// result: (MOVDconst [c])
9402	for {
9403		if v_0.Op != OpARM64MOVDconst {
9404			break
9405		}
9406		c := auxIntToInt64(v_0.AuxInt)
9407		v.reset(OpARM64MOVDconst)
9408		v.AuxInt = int64ToAuxInt(c)
9409		return true
9410	}
9411	return false
9412}
9413func rewriteValueARM64_OpARM64MOVDstore(v *Value) bool {
9414	v_2 := v.Args[2]
9415	v_1 := v.Args[1]
9416	v_0 := v.Args[0]
9417	b := v.Block
9418	config := b.Func.Config
9419	// match: (MOVDstore [off] {sym} ptr (FMOVDfpgp val) mem)
9420	// result: (FMOVDstore [off] {sym} ptr val mem)
9421	for {
9422		off := auxIntToInt32(v.AuxInt)
9423		sym := auxToSym(v.Aux)
9424		ptr := v_0
9425		if v_1.Op != OpARM64FMOVDfpgp {
9426			break
9427		}
9428		val := v_1.Args[0]
9429		mem := v_2
9430		v.reset(OpARM64FMOVDstore)
9431		v.AuxInt = int32ToAuxInt(off)
9432		v.Aux = symToAux(sym)
9433		v.AddArg3(ptr, val, mem)
9434		return true
9435	}
9436	// match: (MOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem)
9437	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
9438	// result: (MOVDstore [off1+int32(off2)] {sym} ptr val mem)
9439	for {
9440		off1 := auxIntToInt32(v.AuxInt)
9441		sym := auxToSym(v.Aux)
9442		if v_0.Op != OpARM64ADDconst {
9443			break
9444		}
9445		off2 := auxIntToInt64(v_0.AuxInt)
9446		ptr := v_0.Args[0]
9447		val := v_1
9448		mem := v_2
9449		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
9450			break
9451		}
9452		v.reset(OpARM64MOVDstore)
9453		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
9454		v.Aux = symToAux(sym)
9455		v.AddArg3(ptr, val, mem)
9456		return true
9457	}
9458	// match: (MOVDstore [off] {sym} (ADD ptr idx) val mem)
9459	// cond: off == 0 && sym == nil
9460	// result: (MOVDstoreidx ptr idx val mem)
9461	for {
9462		off := auxIntToInt32(v.AuxInt)
9463		sym := auxToSym(v.Aux)
9464		if v_0.Op != OpARM64ADD {
9465			break
9466		}
9467		idx := v_0.Args[1]
9468		ptr := v_0.Args[0]
9469		val := v_1
9470		mem := v_2
9471		if !(off == 0 && sym == nil) {
9472			break
9473		}
9474		v.reset(OpARM64MOVDstoreidx)
9475		v.AddArg4(ptr, idx, val, mem)
9476		return true
9477	}
9478	// match: (MOVDstore [off] {sym} (ADDshiftLL [3] ptr idx) val mem)
9479	// cond: off == 0 && sym == nil
9480	// result: (MOVDstoreidx8 ptr idx val mem)
9481	for {
9482		off := auxIntToInt32(v.AuxInt)
9483		sym := auxToSym(v.Aux)
9484		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 {
9485			break
9486		}
9487		idx := v_0.Args[1]
9488		ptr := v_0.Args[0]
9489		val := v_1
9490		mem := v_2
9491		if !(off == 0 && sym == nil) {
9492			break
9493		}
9494		v.reset(OpARM64MOVDstoreidx8)
9495		v.AddArg4(ptr, idx, val, mem)
9496		return true
9497	}
9498	// match: (MOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
9499	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
9500	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
9501	for {
9502		off1 := auxIntToInt32(v.AuxInt)
9503		sym1 := auxToSym(v.Aux)
9504		if v_0.Op != OpARM64MOVDaddr {
9505			break
9506		}
9507		off2 := auxIntToInt32(v_0.AuxInt)
9508		sym2 := auxToSym(v_0.Aux)
9509		ptr := v_0.Args[0]
9510		val := v_1
9511		mem := v_2
9512		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
9513			break
9514		}
9515		v.reset(OpARM64MOVDstore)
9516		v.AuxInt = int32ToAuxInt(off1 + off2)
9517		v.Aux = symToAux(mergeSym(sym1, sym2))
9518		v.AddArg3(ptr, val, mem)
9519		return true
9520	}
9521	// match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
9522	// result: (MOVDstorezero [off] {sym} ptr mem)
9523	for {
9524		off := auxIntToInt32(v.AuxInt)
9525		sym := auxToSym(v.Aux)
9526		ptr := v_0
9527		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
9528			break
9529		}
9530		mem := v_2
9531		v.reset(OpARM64MOVDstorezero)
9532		v.AuxInt = int32ToAuxInt(off)
9533		v.Aux = symToAux(sym)
9534		v.AddArg2(ptr, mem)
9535		return true
9536	}
9537	return false
9538}
9539func rewriteValueARM64_OpARM64MOVDstoreidx(v *Value) bool {
9540	v_3 := v.Args[3]
9541	v_2 := v.Args[2]
9542	v_1 := v.Args[1]
9543	v_0 := v.Args[0]
9544	// match: (MOVDstoreidx ptr (MOVDconst [c]) val mem)
9545	// cond: is32Bit(c)
9546	// result: (MOVDstore [int32(c)] ptr val mem)
9547	for {
9548		ptr := v_0
9549		if v_1.Op != OpARM64MOVDconst {
9550			break
9551		}
9552		c := auxIntToInt64(v_1.AuxInt)
9553		val := v_2
9554		mem := v_3
9555		if !(is32Bit(c)) {
9556			break
9557		}
9558		v.reset(OpARM64MOVDstore)
9559		v.AuxInt = int32ToAuxInt(int32(c))
9560		v.AddArg3(ptr, val, mem)
9561		return true
9562	}
9563	// match: (MOVDstoreidx (MOVDconst [c]) idx val mem)
9564	// cond: is32Bit(c)
9565	// result: (MOVDstore [int32(c)] idx val mem)
9566	for {
9567		if v_0.Op != OpARM64MOVDconst {
9568			break
9569		}
9570		c := auxIntToInt64(v_0.AuxInt)
9571		idx := v_1
9572		val := v_2
9573		mem := v_3
9574		if !(is32Bit(c)) {
9575			break
9576		}
9577		v.reset(OpARM64MOVDstore)
9578		v.AuxInt = int32ToAuxInt(int32(c))
9579		v.AddArg3(idx, val, mem)
9580		return true
9581	}
9582	// match: (MOVDstoreidx ptr (SLLconst [3] idx) val mem)
9583	// result: (MOVDstoreidx8 ptr idx val mem)
9584	for {
9585		ptr := v_0
9586		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 {
9587			break
9588		}
9589		idx := v_1.Args[0]
9590		val := v_2
9591		mem := v_3
9592		v.reset(OpARM64MOVDstoreidx8)
9593		v.AddArg4(ptr, idx, val, mem)
9594		return true
9595	}
9596	// match: (MOVDstoreidx (SLLconst [3] idx) ptr val mem)
9597	// result: (MOVDstoreidx8 ptr idx val mem)
9598	for {
9599		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 {
9600			break
9601		}
9602		idx := v_0.Args[0]
9603		ptr := v_1
9604		val := v_2
9605		mem := v_3
9606		v.reset(OpARM64MOVDstoreidx8)
9607		v.AddArg4(ptr, idx, val, mem)
9608		return true
9609	}
9610	// match: (MOVDstoreidx ptr idx (MOVDconst [0]) mem)
9611	// result: (MOVDstorezeroidx ptr idx mem)
9612	for {
9613		ptr := v_0
9614		idx := v_1
9615		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
9616			break
9617		}
9618		mem := v_3
9619		v.reset(OpARM64MOVDstorezeroidx)
9620		v.AddArg3(ptr, idx, mem)
9621		return true
9622	}
9623	return false
9624}
9625func rewriteValueARM64_OpARM64MOVDstoreidx8(v *Value) bool {
9626	v_3 := v.Args[3]
9627	v_2 := v.Args[2]
9628	v_1 := v.Args[1]
9629	v_0 := v.Args[0]
9630	// match: (MOVDstoreidx8 ptr (MOVDconst [c]) val mem)
9631	// cond: is32Bit(c<<3)
9632	// result: (MOVDstore [int32(c)<<3] ptr val mem)
9633	for {
9634		ptr := v_0
9635		if v_1.Op != OpARM64MOVDconst {
9636			break
9637		}
9638		c := auxIntToInt64(v_1.AuxInt)
9639		val := v_2
9640		mem := v_3
9641		if !(is32Bit(c << 3)) {
9642			break
9643		}
9644		v.reset(OpARM64MOVDstore)
9645		v.AuxInt = int32ToAuxInt(int32(c) << 3)
9646		v.AddArg3(ptr, val, mem)
9647		return true
9648	}
9649	// match: (MOVDstoreidx8 ptr idx (MOVDconst [0]) mem)
9650	// result: (MOVDstorezeroidx8 ptr idx mem)
9651	for {
9652		ptr := v_0
9653		idx := v_1
9654		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
9655			break
9656		}
9657		mem := v_3
9658		v.reset(OpARM64MOVDstorezeroidx8)
9659		v.AddArg3(ptr, idx, mem)
9660		return true
9661	}
9662	return false
9663}
9664func rewriteValueARM64_OpARM64MOVDstorezero(v *Value) bool {
9665	v_1 := v.Args[1]
9666	v_0 := v.Args[0]
9667	b := v.Block
9668	config := b.Func.Config
9669	// match: (MOVDstorezero {s} [i] ptr x:(MOVDstorezero {s} [i+8] ptr mem))
9670	// cond: x.Uses == 1 && setPos(v, x.Pos) && clobber(x)
9671	// result: (MOVQstorezero {s} [i] ptr mem)
9672	for {
9673		i := auxIntToInt32(v.AuxInt)
9674		s := auxToSym(v.Aux)
9675		ptr := v_0
9676		x := v_1
9677		if x.Op != OpARM64MOVDstorezero || auxIntToInt32(x.AuxInt) != i+8 || auxToSym(x.Aux) != s {
9678			break
9679		}
9680		mem := x.Args[1]
9681		if ptr != x.Args[0] || !(x.Uses == 1 && setPos(v, x.Pos) && clobber(x)) {
9682			break
9683		}
9684		v.reset(OpARM64MOVQstorezero)
9685		v.AuxInt = int32ToAuxInt(i)
9686		v.Aux = symToAux(s)
9687		v.AddArg2(ptr, mem)
9688		return true
9689	}
9690	// match: (MOVDstorezero {s} [i] ptr x:(MOVDstorezero {s} [i-8] ptr mem))
9691	// cond: x.Uses == 1 && setPos(v, x.Pos) && clobber(x)
9692	// result: (MOVQstorezero {s} [i-8] ptr mem)
9693	for {
9694		i := auxIntToInt32(v.AuxInt)
9695		s := auxToSym(v.Aux)
9696		ptr := v_0
9697		x := v_1
9698		if x.Op != OpARM64MOVDstorezero || auxIntToInt32(x.AuxInt) != i-8 || auxToSym(x.Aux) != s {
9699			break
9700		}
9701		mem := x.Args[1]
9702		if ptr != x.Args[0] || !(x.Uses == 1 && setPos(v, x.Pos) && clobber(x)) {
9703			break
9704		}
9705		v.reset(OpARM64MOVQstorezero)
9706		v.AuxInt = int32ToAuxInt(i - 8)
9707		v.Aux = symToAux(s)
9708		v.AddArg2(ptr, mem)
9709		return true
9710	}
9711	// match: (MOVDstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
9712	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
9713	// result: (MOVDstorezero [off1+int32(off2)] {sym} ptr mem)
9714	for {
9715		off1 := auxIntToInt32(v.AuxInt)
9716		sym := auxToSym(v.Aux)
9717		if v_0.Op != OpARM64ADDconst {
9718			break
9719		}
9720		off2 := auxIntToInt64(v_0.AuxInt)
9721		ptr := v_0.Args[0]
9722		mem := v_1
9723		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
9724			break
9725		}
9726		v.reset(OpARM64MOVDstorezero)
9727		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
9728		v.Aux = symToAux(sym)
9729		v.AddArg2(ptr, mem)
9730		return true
9731	}
9732	// match: (MOVDstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
9733	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
9734	// result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
9735	for {
9736		off1 := auxIntToInt32(v.AuxInt)
9737		sym1 := auxToSym(v.Aux)
9738		if v_0.Op != OpARM64MOVDaddr {
9739			break
9740		}
9741		off2 := auxIntToInt32(v_0.AuxInt)
9742		sym2 := auxToSym(v_0.Aux)
9743		ptr := v_0.Args[0]
9744		mem := v_1
9745		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
9746			break
9747		}
9748		v.reset(OpARM64MOVDstorezero)
9749		v.AuxInt = int32ToAuxInt(off1 + off2)
9750		v.Aux = symToAux(mergeSym(sym1, sym2))
9751		v.AddArg2(ptr, mem)
9752		return true
9753	}
9754	// match: (MOVDstorezero [off] {sym} (ADD ptr idx) mem)
9755	// cond: off == 0 && sym == nil
9756	// result: (MOVDstorezeroidx ptr idx mem)
9757	for {
9758		off := auxIntToInt32(v.AuxInt)
9759		sym := auxToSym(v.Aux)
9760		if v_0.Op != OpARM64ADD {
9761			break
9762		}
9763		idx := v_0.Args[1]
9764		ptr := v_0.Args[0]
9765		mem := v_1
9766		if !(off == 0 && sym == nil) {
9767			break
9768		}
9769		v.reset(OpARM64MOVDstorezeroidx)
9770		v.AddArg3(ptr, idx, mem)
9771		return true
9772	}
9773	// match: (MOVDstorezero [off] {sym} (ADDshiftLL [3] ptr idx) mem)
9774	// cond: off == 0 && sym == nil
9775	// result: (MOVDstorezeroidx8 ptr idx mem)
9776	for {
9777		off := auxIntToInt32(v.AuxInt)
9778		sym := auxToSym(v.Aux)
9779		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 {
9780			break
9781		}
9782		idx := v_0.Args[1]
9783		ptr := v_0.Args[0]
9784		mem := v_1
9785		if !(off == 0 && sym == nil) {
9786			break
9787		}
9788		v.reset(OpARM64MOVDstorezeroidx8)
9789		v.AddArg3(ptr, idx, mem)
9790		return true
9791	}
9792	return false
9793}
9794func rewriteValueARM64_OpARM64MOVDstorezeroidx(v *Value) bool {
9795	v_2 := v.Args[2]
9796	v_1 := v.Args[1]
9797	v_0 := v.Args[0]
9798	// match: (MOVDstorezeroidx ptr (MOVDconst [c]) mem)
9799	// cond: is32Bit(c)
9800	// result: (MOVDstorezero [int32(c)] ptr mem)
9801	for {
9802		ptr := v_0
9803		if v_1.Op != OpARM64MOVDconst {
9804			break
9805		}
9806		c := auxIntToInt64(v_1.AuxInt)
9807		mem := v_2
9808		if !(is32Bit(c)) {
9809			break
9810		}
9811		v.reset(OpARM64MOVDstorezero)
9812		v.AuxInt = int32ToAuxInt(int32(c))
9813		v.AddArg2(ptr, mem)
9814		return true
9815	}
9816	// match: (MOVDstorezeroidx (MOVDconst [c]) idx mem)
9817	// cond: is32Bit(c)
9818	// result: (MOVDstorezero [int32(c)] idx mem)
9819	for {
9820		if v_0.Op != OpARM64MOVDconst {
9821			break
9822		}
9823		c := auxIntToInt64(v_0.AuxInt)
9824		idx := v_1
9825		mem := v_2
9826		if !(is32Bit(c)) {
9827			break
9828		}
9829		v.reset(OpARM64MOVDstorezero)
9830		v.AuxInt = int32ToAuxInt(int32(c))
9831		v.AddArg2(idx, mem)
9832		return true
9833	}
9834	// match: (MOVDstorezeroidx ptr (SLLconst [3] idx) mem)
9835	// result: (MOVDstorezeroidx8 ptr idx mem)
9836	for {
9837		ptr := v_0
9838		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 {
9839			break
9840		}
9841		idx := v_1.Args[0]
9842		mem := v_2
9843		v.reset(OpARM64MOVDstorezeroidx8)
9844		v.AddArg3(ptr, idx, mem)
9845		return true
9846	}
9847	// match: (MOVDstorezeroidx (SLLconst [3] idx) ptr mem)
9848	// result: (MOVDstorezeroidx8 ptr idx mem)
9849	for {
9850		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 {
9851			break
9852		}
9853		idx := v_0.Args[0]
9854		ptr := v_1
9855		mem := v_2
9856		v.reset(OpARM64MOVDstorezeroidx8)
9857		v.AddArg3(ptr, idx, mem)
9858		return true
9859	}
9860	return false
9861}
9862func rewriteValueARM64_OpARM64MOVDstorezeroidx8(v *Value) bool {
9863	v_2 := v.Args[2]
9864	v_1 := v.Args[1]
9865	v_0 := v.Args[0]
9866	// match: (MOVDstorezeroidx8 ptr (MOVDconst [c]) mem)
9867	// cond: is32Bit(c<<3)
9868	// result: (MOVDstorezero [int32(c<<3)] ptr mem)
9869	for {
9870		ptr := v_0
9871		if v_1.Op != OpARM64MOVDconst {
9872			break
9873		}
9874		c := auxIntToInt64(v_1.AuxInt)
9875		mem := v_2
9876		if !(is32Bit(c << 3)) {
9877			break
9878		}
9879		v.reset(OpARM64MOVDstorezero)
9880		v.AuxInt = int32ToAuxInt(int32(c << 3))
9881		v.AddArg2(ptr, mem)
9882		return true
9883	}
9884	return false
9885}
9886func rewriteValueARM64_OpARM64MOVHUload(v *Value) bool {
9887	v_1 := v.Args[1]
9888	v_0 := v.Args[0]
9889	b := v.Block
9890	config := b.Func.Config
9891	// match: (MOVHUload [off1] {sym} (ADDconst [off2] ptr) mem)
9892	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
9893	// result: (MOVHUload [off1+int32(off2)] {sym} ptr mem)
9894	for {
9895		off1 := auxIntToInt32(v.AuxInt)
9896		sym := auxToSym(v.Aux)
9897		if v_0.Op != OpARM64ADDconst {
9898			break
9899		}
9900		off2 := auxIntToInt64(v_0.AuxInt)
9901		ptr := v_0.Args[0]
9902		mem := v_1
9903		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
9904			break
9905		}
9906		v.reset(OpARM64MOVHUload)
9907		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
9908		v.Aux = symToAux(sym)
9909		v.AddArg2(ptr, mem)
9910		return true
9911	}
9912	// match: (MOVHUload [off] {sym} (ADD ptr idx) mem)
9913	// cond: off == 0 && sym == nil
9914	// result: (MOVHUloadidx ptr idx mem)
9915	for {
9916		off := auxIntToInt32(v.AuxInt)
9917		sym := auxToSym(v.Aux)
9918		if v_0.Op != OpARM64ADD {
9919			break
9920		}
9921		idx := v_0.Args[1]
9922		ptr := v_0.Args[0]
9923		mem := v_1
9924		if !(off == 0 && sym == nil) {
9925			break
9926		}
9927		v.reset(OpARM64MOVHUloadidx)
9928		v.AddArg3(ptr, idx, mem)
9929		return true
9930	}
9931	// match: (MOVHUload [off] {sym} (ADDshiftLL [1] ptr idx) mem)
9932	// cond: off == 0 && sym == nil
9933	// result: (MOVHUloadidx2 ptr idx mem)
9934	for {
9935		off := auxIntToInt32(v.AuxInt)
9936		sym := auxToSym(v.Aux)
9937		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
9938			break
9939		}
9940		idx := v_0.Args[1]
9941		ptr := v_0.Args[0]
9942		mem := v_1
9943		if !(off == 0 && sym == nil) {
9944			break
9945		}
9946		v.reset(OpARM64MOVHUloadidx2)
9947		v.AddArg3(ptr, idx, mem)
9948		return true
9949	}
9950	// match: (MOVHUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
9951	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
9952	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
9953	for {
9954		off1 := auxIntToInt32(v.AuxInt)
9955		sym1 := auxToSym(v.Aux)
9956		if v_0.Op != OpARM64MOVDaddr {
9957			break
9958		}
9959		off2 := auxIntToInt32(v_0.AuxInt)
9960		sym2 := auxToSym(v_0.Aux)
9961		ptr := v_0.Args[0]
9962		mem := v_1
9963		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
9964			break
9965		}
9966		v.reset(OpARM64MOVHUload)
9967		v.AuxInt = int32ToAuxInt(off1 + off2)
9968		v.Aux = symToAux(mergeSym(sym1, sym2))
9969		v.AddArg2(ptr, mem)
9970		return true
9971	}
9972	// match: (MOVHUload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _))
9973	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
9974	// result: (MOVDconst [0])
9975	for {
9976		off := auxIntToInt32(v.AuxInt)
9977		sym := auxToSym(v.Aux)
9978		ptr := v_0
9979		if v_1.Op != OpARM64MOVHstorezero {
9980			break
9981		}
9982		off2 := auxIntToInt32(v_1.AuxInt)
9983		sym2 := auxToSym(v_1.Aux)
9984		ptr2 := v_1.Args[0]
9985		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
9986			break
9987		}
9988		v.reset(OpARM64MOVDconst)
9989		v.AuxInt = int64ToAuxInt(0)
9990		return true
9991	}
9992	// match: (MOVHUload [off] {sym} (SB) _)
9993	// cond: symIsRO(sym)
9994	// result: (MOVDconst [int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
9995	for {
9996		off := auxIntToInt32(v.AuxInt)
9997		sym := auxToSym(v.Aux)
9998		if v_0.Op != OpSB || !(symIsRO(sym)) {
9999			break
10000		}
10001		v.reset(OpARM64MOVDconst)
10002		v.AuxInt = int64ToAuxInt(int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))
10003		return true
10004	}
10005	return false
10006}
10007func rewriteValueARM64_OpARM64MOVHUloadidx(v *Value) bool {
10008	v_2 := v.Args[2]
10009	v_1 := v.Args[1]
10010	v_0 := v.Args[0]
10011	// match: (MOVHUloadidx ptr (MOVDconst [c]) mem)
10012	// cond: is32Bit(c)
10013	// result: (MOVHUload [int32(c)] ptr mem)
10014	for {
10015		ptr := v_0
10016		if v_1.Op != OpARM64MOVDconst {
10017			break
10018		}
10019		c := auxIntToInt64(v_1.AuxInt)
10020		mem := v_2
10021		if !(is32Bit(c)) {
10022			break
10023		}
10024		v.reset(OpARM64MOVHUload)
10025		v.AuxInt = int32ToAuxInt(int32(c))
10026		v.AddArg2(ptr, mem)
10027		return true
10028	}
10029	// match: (MOVHUloadidx (MOVDconst [c]) ptr mem)
10030	// cond: is32Bit(c)
10031	// result: (MOVHUload [int32(c)] ptr mem)
10032	for {
10033		if v_0.Op != OpARM64MOVDconst {
10034			break
10035		}
10036		c := auxIntToInt64(v_0.AuxInt)
10037		ptr := v_1
10038		mem := v_2
10039		if !(is32Bit(c)) {
10040			break
10041		}
10042		v.reset(OpARM64MOVHUload)
10043		v.AuxInt = int32ToAuxInt(int32(c))
10044		v.AddArg2(ptr, mem)
10045		return true
10046	}
10047	// match: (MOVHUloadidx ptr (SLLconst [1] idx) mem)
10048	// result: (MOVHUloadidx2 ptr idx mem)
10049	for {
10050		ptr := v_0
10051		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 1 {
10052			break
10053		}
10054		idx := v_1.Args[0]
10055		mem := v_2
10056		v.reset(OpARM64MOVHUloadidx2)
10057		v.AddArg3(ptr, idx, mem)
10058		return true
10059	}
10060	// match: (MOVHUloadidx ptr (ADD idx idx) mem)
10061	// result: (MOVHUloadidx2 ptr idx mem)
10062	for {
10063		ptr := v_0
10064		if v_1.Op != OpARM64ADD {
10065			break
10066		}
10067		idx := v_1.Args[1]
10068		if idx != v_1.Args[0] {
10069			break
10070		}
10071		mem := v_2
10072		v.reset(OpARM64MOVHUloadidx2)
10073		v.AddArg3(ptr, idx, mem)
10074		return true
10075	}
10076	// match: (MOVHUloadidx (ADD idx idx) ptr mem)
10077	// result: (MOVHUloadidx2 ptr idx mem)
10078	for {
10079		if v_0.Op != OpARM64ADD {
10080			break
10081		}
10082		idx := v_0.Args[1]
10083		if idx != v_0.Args[0] {
10084			break
10085		}
10086		ptr := v_1
10087		mem := v_2
10088		v.reset(OpARM64MOVHUloadidx2)
10089		v.AddArg3(ptr, idx, mem)
10090		return true
10091	}
10092	// match: (MOVHUloadidx ptr idx (MOVHstorezeroidx ptr2 idx2 _))
10093	// cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2))
10094	// result: (MOVDconst [0])
10095	for {
10096		ptr := v_0
10097		idx := v_1
10098		if v_2.Op != OpARM64MOVHstorezeroidx {
10099			break
10100		}
10101		idx2 := v_2.Args[1]
10102		ptr2 := v_2.Args[0]
10103		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
10104			break
10105		}
10106		v.reset(OpARM64MOVDconst)
10107		v.AuxInt = int64ToAuxInt(0)
10108		return true
10109	}
10110	return false
10111}
10112func rewriteValueARM64_OpARM64MOVHUloadidx2(v *Value) bool {
10113	v_2 := v.Args[2]
10114	v_1 := v.Args[1]
10115	v_0 := v.Args[0]
10116	// match: (MOVHUloadidx2 ptr (MOVDconst [c]) mem)
10117	// cond: is32Bit(c<<1)
10118	// result: (MOVHUload [int32(c)<<1] ptr mem)
10119	for {
10120		ptr := v_0
10121		if v_1.Op != OpARM64MOVDconst {
10122			break
10123		}
10124		c := auxIntToInt64(v_1.AuxInt)
10125		mem := v_2
10126		if !(is32Bit(c << 1)) {
10127			break
10128		}
10129		v.reset(OpARM64MOVHUload)
10130		v.AuxInt = int32ToAuxInt(int32(c) << 1)
10131		v.AddArg2(ptr, mem)
10132		return true
10133	}
10134	// match: (MOVHUloadidx2 ptr idx (MOVHstorezeroidx2 ptr2 idx2 _))
10135	// cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)
10136	// result: (MOVDconst [0])
10137	for {
10138		ptr := v_0
10139		idx := v_1
10140		if v_2.Op != OpARM64MOVHstorezeroidx2 {
10141			break
10142		}
10143		idx2 := v_2.Args[1]
10144		ptr2 := v_2.Args[0]
10145		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
10146			break
10147		}
10148		v.reset(OpARM64MOVDconst)
10149		v.AuxInt = int64ToAuxInt(0)
10150		return true
10151	}
10152	return false
10153}
10154func rewriteValueARM64_OpARM64MOVHUreg(v *Value) bool {
10155	v_0 := v.Args[0]
10156	// match: (MOVHUreg (ANDconst [c] x))
10157	// result: (ANDconst [c&(1<<16-1)] x)
10158	for {
10159		if v_0.Op != OpARM64ANDconst {
10160			break
10161		}
10162		c := auxIntToInt64(v_0.AuxInt)
10163		x := v_0.Args[0]
10164		v.reset(OpARM64ANDconst)
10165		v.AuxInt = int64ToAuxInt(c & (1<<16 - 1))
10166		v.AddArg(x)
10167		return true
10168	}
10169	// match: (MOVHUreg (MOVDconst [c]))
10170	// result: (MOVDconst [int64(uint16(c))])
10171	for {
10172		if v_0.Op != OpARM64MOVDconst {
10173			break
10174		}
10175		c := auxIntToInt64(v_0.AuxInt)
10176		v.reset(OpARM64MOVDconst)
10177		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
10178		return true
10179	}
10180	// match: (MOVHUreg x)
10181	// cond: v.Type.Size() <= 2
10182	// result: x
10183	for {
10184		x := v_0
10185		if !(v.Type.Size() <= 2) {
10186			break
10187		}
10188		v.copyOf(x)
10189		return true
10190	}
10191	// match: (MOVHUreg (SLLconst [lc] x))
10192	// cond: lc >= 16
10193	// result: (MOVDconst [0])
10194	for {
10195		if v_0.Op != OpARM64SLLconst {
10196			break
10197		}
10198		lc := auxIntToInt64(v_0.AuxInt)
10199		if !(lc >= 16) {
10200			break
10201		}
10202		v.reset(OpARM64MOVDconst)
10203		v.AuxInt = int64ToAuxInt(0)
10204		return true
10205	}
10206	// match: (MOVHUreg (SLLconst [lc] x))
10207	// cond: lc < 16
10208	// result: (UBFIZ [armBFAuxInt(lc, 16-lc)] x)
10209	for {
10210		if v_0.Op != OpARM64SLLconst {
10211			break
10212		}
10213		lc := auxIntToInt64(v_0.AuxInt)
10214		x := v_0.Args[0]
10215		if !(lc < 16) {
10216			break
10217		}
10218		v.reset(OpARM64UBFIZ)
10219		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 16-lc))
10220		v.AddArg(x)
10221		return true
10222	}
10223	// match: (MOVHUreg (SRLconst [rc] x))
10224	// cond: rc < 16
10225	// result: (UBFX [armBFAuxInt(rc, 16)] x)
10226	for {
10227		if v_0.Op != OpARM64SRLconst {
10228			break
10229		}
10230		rc := auxIntToInt64(v_0.AuxInt)
10231		x := v_0.Args[0]
10232		if !(rc < 16) {
10233			break
10234		}
10235		v.reset(OpARM64UBFX)
10236		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 16))
10237		v.AddArg(x)
10238		return true
10239	}
10240	// match: (MOVHUreg (UBFX [bfc] x))
10241	// cond: bfc.getARM64BFwidth() <= 16
10242	// result: (UBFX [bfc] x)
10243	for {
10244		if v_0.Op != OpARM64UBFX {
10245			break
10246		}
10247		bfc := auxIntToArm64BitField(v_0.AuxInt)
10248		x := v_0.Args[0]
10249		if !(bfc.getARM64BFwidth() <= 16) {
10250			break
10251		}
10252		v.reset(OpARM64UBFX)
10253		v.AuxInt = arm64BitFieldToAuxInt(bfc)
10254		v.AddArg(x)
10255		return true
10256	}
10257	return false
10258}
10259func rewriteValueARM64_OpARM64MOVHload(v *Value) bool {
10260	v_1 := v.Args[1]
10261	v_0 := v.Args[0]
10262	b := v.Block
10263	config := b.Func.Config
10264	// match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem)
10265	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
10266	// result: (MOVHload [off1+int32(off2)] {sym} ptr mem)
10267	for {
10268		off1 := auxIntToInt32(v.AuxInt)
10269		sym := auxToSym(v.Aux)
10270		if v_0.Op != OpARM64ADDconst {
10271			break
10272		}
10273		off2 := auxIntToInt64(v_0.AuxInt)
10274		ptr := v_0.Args[0]
10275		mem := v_1
10276		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
10277			break
10278		}
10279		v.reset(OpARM64MOVHload)
10280		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
10281		v.Aux = symToAux(sym)
10282		v.AddArg2(ptr, mem)
10283		return true
10284	}
10285	// match: (MOVHload [off] {sym} (ADD ptr idx) mem)
10286	// cond: off == 0 && sym == nil
10287	// result: (MOVHloadidx ptr idx mem)
10288	for {
10289		off := auxIntToInt32(v.AuxInt)
10290		sym := auxToSym(v.Aux)
10291		if v_0.Op != OpARM64ADD {
10292			break
10293		}
10294		idx := v_0.Args[1]
10295		ptr := v_0.Args[0]
10296		mem := v_1
10297		if !(off == 0 && sym == nil) {
10298			break
10299		}
10300		v.reset(OpARM64MOVHloadidx)
10301		v.AddArg3(ptr, idx, mem)
10302		return true
10303	}
10304	// match: (MOVHload [off] {sym} (ADDshiftLL [1] ptr idx) mem)
10305	// cond: off == 0 && sym == nil
10306	// result: (MOVHloadidx2 ptr idx mem)
10307	for {
10308		off := auxIntToInt32(v.AuxInt)
10309		sym := auxToSym(v.Aux)
10310		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
10311			break
10312		}
10313		idx := v_0.Args[1]
10314		ptr := v_0.Args[0]
10315		mem := v_1
10316		if !(off == 0 && sym == nil) {
10317			break
10318		}
10319		v.reset(OpARM64MOVHloadidx2)
10320		v.AddArg3(ptr, idx, mem)
10321		return true
10322	}
10323	// match: (MOVHload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
10324	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
10325	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
10326	for {
10327		off1 := auxIntToInt32(v.AuxInt)
10328		sym1 := auxToSym(v.Aux)
10329		if v_0.Op != OpARM64MOVDaddr {
10330			break
10331		}
10332		off2 := auxIntToInt32(v_0.AuxInt)
10333		sym2 := auxToSym(v_0.Aux)
10334		ptr := v_0.Args[0]
10335		mem := v_1
10336		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
10337			break
10338		}
10339		v.reset(OpARM64MOVHload)
10340		v.AuxInt = int32ToAuxInt(off1 + off2)
10341		v.Aux = symToAux(mergeSym(sym1, sym2))
10342		v.AddArg2(ptr, mem)
10343		return true
10344	}
10345	// match: (MOVHload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _))
10346	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
10347	// result: (MOVDconst [0])
10348	for {
10349		off := auxIntToInt32(v.AuxInt)
10350		sym := auxToSym(v.Aux)
10351		ptr := v_0
10352		if v_1.Op != OpARM64MOVHstorezero {
10353			break
10354		}
10355		off2 := auxIntToInt32(v_1.AuxInt)
10356		sym2 := auxToSym(v_1.Aux)
10357		ptr2 := v_1.Args[0]
10358		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
10359			break
10360		}
10361		v.reset(OpARM64MOVDconst)
10362		v.AuxInt = int64ToAuxInt(0)
10363		return true
10364	}
10365	return false
10366}
10367func rewriteValueARM64_OpARM64MOVHloadidx(v *Value) bool {
10368	v_2 := v.Args[2]
10369	v_1 := v.Args[1]
10370	v_0 := v.Args[0]
10371	// match: (MOVHloadidx ptr (MOVDconst [c]) mem)
10372	// cond: is32Bit(c)
10373	// result: (MOVHload [int32(c)] ptr mem)
10374	for {
10375		ptr := v_0
10376		if v_1.Op != OpARM64MOVDconst {
10377			break
10378		}
10379		c := auxIntToInt64(v_1.AuxInt)
10380		mem := v_2
10381		if !(is32Bit(c)) {
10382			break
10383		}
10384		v.reset(OpARM64MOVHload)
10385		v.AuxInt = int32ToAuxInt(int32(c))
10386		v.AddArg2(ptr, mem)
10387		return true
10388	}
10389	// match: (MOVHloadidx (MOVDconst [c]) ptr mem)
10390	// cond: is32Bit(c)
10391	// result: (MOVHload [int32(c)] ptr mem)
10392	for {
10393		if v_0.Op != OpARM64MOVDconst {
10394			break
10395		}
10396		c := auxIntToInt64(v_0.AuxInt)
10397		ptr := v_1
10398		mem := v_2
10399		if !(is32Bit(c)) {
10400			break
10401		}
10402		v.reset(OpARM64MOVHload)
10403		v.AuxInt = int32ToAuxInt(int32(c))
10404		v.AddArg2(ptr, mem)
10405		return true
10406	}
10407	// match: (MOVHloadidx ptr (SLLconst [1] idx) mem)
10408	// result: (MOVHloadidx2 ptr idx mem)
10409	for {
10410		ptr := v_0
10411		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 1 {
10412			break
10413		}
10414		idx := v_1.Args[0]
10415		mem := v_2
10416		v.reset(OpARM64MOVHloadidx2)
10417		v.AddArg3(ptr, idx, mem)
10418		return true
10419	}
10420	// match: (MOVHloadidx ptr (ADD idx idx) mem)
10421	// result: (MOVHloadidx2 ptr idx mem)
10422	for {
10423		ptr := v_0
10424		if v_1.Op != OpARM64ADD {
10425			break
10426		}
10427		idx := v_1.Args[1]
10428		if idx != v_1.Args[0] {
10429			break
10430		}
10431		mem := v_2
10432		v.reset(OpARM64MOVHloadidx2)
10433		v.AddArg3(ptr, idx, mem)
10434		return true
10435	}
10436	// match: (MOVHloadidx (ADD idx idx) ptr mem)
10437	// result: (MOVHloadidx2 ptr idx mem)
10438	for {
10439		if v_0.Op != OpARM64ADD {
10440			break
10441		}
10442		idx := v_0.Args[1]
10443		if idx != v_0.Args[0] {
10444			break
10445		}
10446		ptr := v_1
10447		mem := v_2
10448		v.reset(OpARM64MOVHloadidx2)
10449		v.AddArg3(ptr, idx, mem)
10450		return true
10451	}
10452	// match: (MOVHloadidx ptr idx (MOVHstorezeroidx ptr2 idx2 _))
10453	// cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2))
10454	// result: (MOVDconst [0])
10455	for {
10456		ptr := v_0
10457		idx := v_1
10458		if v_2.Op != OpARM64MOVHstorezeroidx {
10459			break
10460		}
10461		idx2 := v_2.Args[1]
10462		ptr2 := v_2.Args[0]
10463		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
10464			break
10465		}
10466		v.reset(OpARM64MOVDconst)
10467		v.AuxInt = int64ToAuxInt(0)
10468		return true
10469	}
10470	return false
10471}
10472func rewriteValueARM64_OpARM64MOVHloadidx2(v *Value) bool {
10473	v_2 := v.Args[2]
10474	v_1 := v.Args[1]
10475	v_0 := v.Args[0]
10476	// match: (MOVHloadidx2 ptr (MOVDconst [c]) mem)
10477	// cond: is32Bit(c<<1)
10478	// result: (MOVHload [int32(c)<<1] ptr mem)
10479	for {
10480		ptr := v_0
10481		if v_1.Op != OpARM64MOVDconst {
10482			break
10483		}
10484		c := auxIntToInt64(v_1.AuxInt)
10485		mem := v_2
10486		if !(is32Bit(c << 1)) {
10487			break
10488		}
10489		v.reset(OpARM64MOVHload)
10490		v.AuxInt = int32ToAuxInt(int32(c) << 1)
10491		v.AddArg2(ptr, mem)
10492		return true
10493	}
10494	// match: (MOVHloadidx2 ptr idx (MOVHstorezeroidx2 ptr2 idx2 _))
10495	// cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)
10496	// result: (MOVDconst [0])
10497	for {
10498		ptr := v_0
10499		idx := v_1
10500		if v_2.Op != OpARM64MOVHstorezeroidx2 {
10501			break
10502		}
10503		idx2 := v_2.Args[1]
10504		ptr2 := v_2.Args[0]
10505		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
10506			break
10507		}
10508		v.reset(OpARM64MOVDconst)
10509		v.AuxInt = int64ToAuxInt(0)
10510		return true
10511	}
10512	return false
10513}
10514func rewriteValueARM64_OpARM64MOVHreg(v *Value) bool {
10515	v_0 := v.Args[0]
10516	// match: (MOVHreg (MOVDconst [c]))
10517	// result: (MOVDconst [int64(int16(c))])
10518	for {
10519		if v_0.Op != OpARM64MOVDconst {
10520			break
10521		}
10522		c := auxIntToInt64(v_0.AuxInt)
10523		v.reset(OpARM64MOVDconst)
10524		v.AuxInt = int64ToAuxInt(int64(int16(c)))
10525		return true
10526	}
10527	// match: (MOVHreg x)
10528	// cond: v.Type.Size() <= 2
10529	// result: x
10530	for {
10531		x := v_0
10532		if !(v.Type.Size() <= 2) {
10533			break
10534		}
10535		v.copyOf(x)
10536		return true
10537	}
10538	// match: (MOVHreg <t> (ANDconst x [c]))
10539	// cond: uint64(c) & uint64(0xffffffffffff8000) == 0
10540	// result: (ANDconst <t> x [c])
10541	for {
10542		t := v.Type
10543		if v_0.Op != OpARM64ANDconst {
10544			break
10545		}
10546		c := auxIntToInt64(v_0.AuxInt)
10547		x := v_0.Args[0]
10548		if !(uint64(c)&uint64(0xffffffffffff8000) == 0) {
10549			break
10550		}
10551		v.reset(OpARM64ANDconst)
10552		v.Type = t
10553		v.AuxInt = int64ToAuxInt(c)
10554		v.AddArg(x)
10555		return true
10556	}
10557	// match: (MOVHreg (SLLconst [lc] x))
10558	// cond: lc < 16
10559	// result: (SBFIZ [armBFAuxInt(lc, 16-lc)] x)
10560	for {
10561		if v_0.Op != OpARM64SLLconst {
10562			break
10563		}
10564		lc := auxIntToInt64(v_0.AuxInt)
10565		x := v_0.Args[0]
10566		if !(lc < 16) {
10567			break
10568		}
10569		v.reset(OpARM64SBFIZ)
10570		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 16-lc))
10571		v.AddArg(x)
10572		return true
10573	}
10574	// match: (MOVHreg (SBFX [bfc] x))
10575	// cond: bfc.getARM64BFwidth() <= 16
10576	// result: (SBFX [bfc] x)
10577	for {
10578		if v_0.Op != OpARM64SBFX {
10579			break
10580		}
10581		bfc := auxIntToArm64BitField(v_0.AuxInt)
10582		x := v_0.Args[0]
10583		if !(bfc.getARM64BFwidth() <= 16) {
10584			break
10585		}
10586		v.reset(OpARM64SBFX)
10587		v.AuxInt = arm64BitFieldToAuxInt(bfc)
10588		v.AddArg(x)
10589		return true
10590	}
10591	return false
10592}
10593func rewriteValueARM64_OpARM64MOVHstore(v *Value) bool {
10594	v_2 := v.Args[2]
10595	v_1 := v.Args[1]
10596	v_0 := v.Args[0]
10597	b := v.Block
10598	config := b.Func.Config
10599	// match: (MOVHstore [off1] {sym} (ADDconst [off2] ptr) val mem)
10600	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
10601	// result: (MOVHstore [off1+int32(off2)] {sym} ptr val mem)
10602	for {
10603		off1 := auxIntToInt32(v.AuxInt)
10604		sym := auxToSym(v.Aux)
10605		if v_0.Op != OpARM64ADDconst {
10606			break
10607		}
10608		off2 := auxIntToInt64(v_0.AuxInt)
10609		ptr := v_0.Args[0]
10610		val := v_1
10611		mem := v_2
10612		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
10613			break
10614		}
10615		v.reset(OpARM64MOVHstore)
10616		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
10617		v.Aux = symToAux(sym)
10618		v.AddArg3(ptr, val, mem)
10619		return true
10620	}
10621	// match: (MOVHstore [off] {sym} (ADD ptr idx) val mem)
10622	// cond: off == 0 && sym == nil
10623	// result: (MOVHstoreidx ptr idx val mem)
10624	for {
10625		off := auxIntToInt32(v.AuxInt)
10626		sym := auxToSym(v.Aux)
10627		if v_0.Op != OpARM64ADD {
10628			break
10629		}
10630		idx := v_0.Args[1]
10631		ptr := v_0.Args[0]
10632		val := v_1
10633		mem := v_2
10634		if !(off == 0 && sym == nil) {
10635			break
10636		}
10637		v.reset(OpARM64MOVHstoreidx)
10638		v.AddArg4(ptr, idx, val, mem)
10639		return true
10640	}
10641	// match: (MOVHstore [off] {sym} (ADDshiftLL [1] ptr idx) val mem)
10642	// cond: off == 0 && sym == nil
10643	// result: (MOVHstoreidx2 ptr idx val mem)
10644	for {
10645		off := auxIntToInt32(v.AuxInt)
10646		sym := auxToSym(v.Aux)
10647		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
10648			break
10649		}
10650		idx := v_0.Args[1]
10651		ptr := v_0.Args[0]
10652		val := v_1
10653		mem := v_2
10654		if !(off == 0 && sym == nil) {
10655			break
10656		}
10657		v.reset(OpARM64MOVHstoreidx2)
10658		v.AddArg4(ptr, idx, val, mem)
10659		return true
10660	}
10661	// match: (MOVHstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
10662	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
10663	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
10664	for {
10665		off1 := auxIntToInt32(v.AuxInt)
10666		sym1 := auxToSym(v.Aux)
10667		if v_0.Op != OpARM64MOVDaddr {
10668			break
10669		}
10670		off2 := auxIntToInt32(v_0.AuxInt)
10671		sym2 := auxToSym(v_0.Aux)
10672		ptr := v_0.Args[0]
10673		val := v_1
10674		mem := v_2
10675		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
10676			break
10677		}
10678		v.reset(OpARM64MOVHstore)
10679		v.AuxInt = int32ToAuxInt(off1 + off2)
10680		v.Aux = symToAux(mergeSym(sym1, sym2))
10681		v.AddArg3(ptr, val, mem)
10682		return true
10683	}
10684	// match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem)
10685	// result: (MOVHstorezero [off] {sym} ptr mem)
10686	for {
10687		off := auxIntToInt32(v.AuxInt)
10688		sym := auxToSym(v.Aux)
10689		ptr := v_0
10690		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
10691			break
10692		}
10693		mem := v_2
10694		v.reset(OpARM64MOVHstorezero)
10695		v.AuxInt = int32ToAuxInt(off)
10696		v.Aux = symToAux(sym)
10697		v.AddArg2(ptr, mem)
10698		return true
10699	}
10700	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
10701	// result: (MOVHstore [off] {sym} ptr x mem)
10702	for {
10703		off := auxIntToInt32(v.AuxInt)
10704		sym := auxToSym(v.Aux)
10705		ptr := v_0
10706		if v_1.Op != OpARM64MOVHreg {
10707			break
10708		}
10709		x := v_1.Args[0]
10710		mem := v_2
10711		v.reset(OpARM64MOVHstore)
10712		v.AuxInt = int32ToAuxInt(off)
10713		v.Aux = symToAux(sym)
10714		v.AddArg3(ptr, x, mem)
10715		return true
10716	}
10717	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
10718	// result: (MOVHstore [off] {sym} ptr x mem)
10719	for {
10720		off := auxIntToInt32(v.AuxInt)
10721		sym := auxToSym(v.Aux)
10722		ptr := v_0
10723		if v_1.Op != OpARM64MOVHUreg {
10724			break
10725		}
10726		x := v_1.Args[0]
10727		mem := v_2
10728		v.reset(OpARM64MOVHstore)
10729		v.AuxInt = int32ToAuxInt(off)
10730		v.Aux = symToAux(sym)
10731		v.AddArg3(ptr, x, mem)
10732		return true
10733	}
10734	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
10735	// result: (MOVHstore [off] {sym} ptr x mem)
10736	for {
10737		off := auxIntToInt32(v.AuxInt)
10738		sym := auxToSym(v.Aux)
10739		ptr := v_0
10740		if v_1.Op != OpARM64MOVWreg {
10741			break
10742		}
10743		x := v_1.Args[0]
10744		mem := v_2
10745		v.reset(OpARM64MOVHstore)
10746		v.AuxInt = int32ToAuxInt(off)
10747		v.Aux = symToAux(sym)
10748		v.AddArg3(ptr, x, mem)
10749		return true
10750	}
10751	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
10752	// result: (MOVHstore [off] {sym} ptr x mem)
10753	for {
10754		off := auxIntToInt32(v.AuxInt)
10755		sym := auxToSym(v.Aux)
10756		ptr := v_0
10757		if v_1.Op != OpARM64MOVWUreg {
10758			break
10759		}
10760		x := v_1.Args[0]
10761		mem := v_2
10762		v.reset(OpARM64MOVHstore)
10763		v.AuxInt = int32ToAuxInt(off)
10764		v.Aux = symToAux(sym)
10765		v.AddArg3(ptr, x, mem)
10766		return true
10767	}
10768	return false
10769}
10770func rewriteValueARM64_OpARM64MOVHstoreidx(v *Value) bool {
10771	v_3 := v.Args[3]
10772	v_2 := v.Args[2]
10773	v_1 := v.Args[1]
10774	v_0 := v.Args[0]
10775	// match: (MOVHstoreidx ptr (MOVDconst [c]) val mem)
10776	// cond: is32Bit(c)
10777	// result: (MOVHstore [int32(c)] ptr val mem)
10778	for {
10779		ptr := v_0
10780		if v_1.Op != OpARM64MOVDconst {
10781			break
10782		}
10783		c := auxIntToInt64(v_1.AuxInt)
10784		val := v_2
10785		mem := v_3
10786		if !(is32Bit(c)) {
10787			break
10788		}
10789		v.reset(OpARM64MOVHstore)
10790		v.AuxInt = int32ToAuxInt(int32(c))
10791		v.AddArg3(ptr, val, mem)
10792		return true
10793	}
10794	// match: (MOVHstoreidx (MOVDconst [c]) idx val mem)
10795	// cond: is32Bit(c)
10796	// result: (MOVHstore [int32(c)] idx val mem)
10797	for {
10798		if v_0.Op != OpARM64MOVDconst {
10799			break
10800		}
10801		c := auxIntToInt64(v_0.AuxInt)
10802		idx := v_1
10803		val := v_2
10804		mem := v_3
10805		if !(is32Bit(c)) {
10806			break
10807		}
10808		v.reset(OpARM64MOVHstore)
10809		v.AuxInt = int32ToAuxInt(int32(c))
10810		v.AddArg3(idx, val, mem)
10811		return true
10812	}
10813	// match: (MOVHstoreidx ptr (SLLconst [1] idx) val mem)
10814	// result: (MOVHstoreidx2 ptr idx val mem)
10815	for {
10816		ptr := v_0
10817		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 1 {
10818			break
10819		}
10820		idx := v_1.Args[0]
10821		val := v_2
10822		mem := v_3
10823		v.reset(OpARM64MOVHstoreidx2)
10824		v.AddArg4(ptr, idx, val, mem)
10825		return true
10826	}
10827	// match: (MOVHstoreidx ptr (ADD idx idx) val mem)
10828	// result: (MOVHstoreidx2 ptr idx val mem)
10829	for {
10830		ptr := v_0
10831		if v_1.Op != OpARM64ADD {
10832			break
10833		}
10834		idx := v_1.Args[1]
10835		if idx != v_1.Args[0] {
10836			break
10837		}
10838		val := v_2
10839		mem := v_3
10840		v.reset(OpARM64MOVHstoreidx2)
10841		v.AddArg4(ptr, idx, val, mem)
10842		return true
10843	}
10844	// match: (MOVHstoreidx (SLLconst [1] idx) ptr val mem)
10845	// result: (MOVHstoreidx2 ptr idx val mem)
10846	for {
10847		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 1 {
10848			break
10849		}
10850		idx := v_0.Args[0]
10851		ptr := v_1
10852		val := v_2
10853		mem := v_3
10854		v.reset(OpARM64MOVHstoreidx2)
10855		v.AddArg4(ptr, idx, val, mem)
10856		return true
10857	}
10858	// match: (MOVHstoreidx (ADD idx idx) ptr val mem)
10859	// result: (MOVHstoreidx2 ptr idx val mem)
10860	for {
10861		if v_0.Op != OpARM64ADD {
10862			break
10863		}
10864		idx := v_0.Args[1]
10865		if idx != v_0.Args[0] {
10866			break
10867		}
10868		ptr := v_1
10869		val := v_2
10870		mem := v_3
10871		v.reset(OpARM64MOVHstoreidx2)
10872		v.AddArg4(ptr, idx, val, mem)
10873		return true
10874	}
10875	// match: (MOVHstoreidx ptr idx (MOVDconst [0]) mem)
10876	// result: (MOVHstorezeroidx ptr idx mem)
10877	for {
10878		ptr := v_0
10879		idx := v_1
10880		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
10881			break
10882		}
10883		mem := v_3
10884		v.reset(OpARM64MOVHstorezeroidx)
10885		v.AddArg3(ptr, idx, mem)
10886		return true
10887	}
10888	// match: (MOVHstoreidx ptr idx (MOVHreg x) mem)
10889	// result: (MOVHstoreidx ptr idx x mem)
10890	for {
10891		ptr := v_0
10892		idx := v_1
10893		if v_2.Op != OpARM64MOVHreg {
10894			break
10895		}
10896		x := v_2.Args[0]
10897		mem := v_3
10898		v.reset(OpARM64MOVHstoreidx)
10899		v.AddArg4(ptr, idx, x, mem)
10900		return true
10901	}
10902	// match: (MOVHstoreidx ptr idx (MOVHUreg x) mem)
10903	// result: (MOVHstoreidx ptr idx x mem)
10904	for {
10905		ptr := v_0
10906		idx := v_1
10907		if v_2.Op != OpARM64MOVHUreg {
10908			break
10909		}
10910		x := v_2.Args[0]
10911		mem := v_3
10912		v.reset(OpARM64MOVHstoreidx)
10913		v.AddArg4(ptr, idx, x, mem)
10914		return true
10915	}
10916	// match: (MOVHstoreidx ptr idx (MOVWreg x) mem)
10917	// result: (MOVHstoreidx ptr idx x mem)
10918	for {
10919		ptr := v_0
10920		idx := v_1
10921		if v_2.Op != OpARM64MOVWreg {
10922			break
10923		}
10924		x := v_2.Args[0]
10925		mem := v_3
10926		v.reset(OpARM64MOVHstoreidx)
10927		v.AddArg4(ptr, idx, x, mem)
10928		return true
10929	}
10930	// match: (MOVHstoreidx ptr idx (MOVWUreg x) mem)
10931	// result: (MOVHstoreidx ptr idx x mem)
10932	for {
10933		ptr := v_0
10934		idx := v_1
10935		if v_2.Op != OpARM64MOVWUreg {
10936			break
10937		}
10938		x := v_2.Args[0]
10939		mem := v_3
10940		v.reset(OpARM64MOVHstoreidx)
10941		v.AddArg4(ptr, idx, x, mem)
10942		return true
10943	}
10944	return false
10945}
10946func rewriteValueARM64_OpARM64MOVHstoreidx2(v *Value) bool {
10947	v_3 := v.Args[3]
10948	v_2 := v.Args[2]
10949	v_1 := v.Args[1]
10950	v_0 := v.Args[0]
10951	// match: (MOVHstoreidx2 ptr (MOVDconst [c]) val mem)
10952	// cond: is32Bit(c<<1)
10953	// result: (MOVHstore [int32(c)<<1] ptr val mem)
10954	for {
10955		ptr := v_0
10956		if v_1.Op != OpARM64MOVDconst {
10957			break
10958		}
10959		c := auxIntToInt64(v_1.AuxInt)
10960		val := v_2
10961		mem := v_3
10962		if !(is32Bit(c << 1)) {
10963			break
10964		}
10965		v.reset(OpARM64MOVHstore)
10966		v.AuxInt = int32ToAuxInt(int32(c) << 1)
10967		v.AddArg3(ptr, val, mem)
10968		return true
10969	}
10970	// match: (MOVHstoreidx2 ptr idx (MOVDconst [0]) mem)
10971	// result: (MOVHstorezeroidx2 ptr idx mem)
10972	for {
10973		ptr := v_0
10974		idx := v_1
10975		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
10976			break
10977		}
10978		mem := v_3
10979		v.reset(OpARM64MOVHstorezeroidx2)
10980		v.AddArg3(ptr, idx, mem)
10981		return true
10982	}
10983	// match: (MOVHstoreidx2 ptr idx (MOVHreg x) mem)
10984	// result: (MOVHstoreidx2 ptr idx x mem)
10985	for {
10986		ptr := v_0
10987		idx := v_1
10988		if v_2.Op != OpARM64MOVHreg {
10989			break
10990		}
10991		x := v_2.Args[0]
10992		mem := v_3
10993		v.reset(OpARM64MOVHstoreidx2)
10994		v.AddArg4(ptr, idx, x, mem)
10995		return true
10996	}
10997	// match: (MOVHstoreidx2 ptr idx (MOVHUreg x) mem)
10998	// result: (MOVHstoreidx2 ptr idx x mem)
10999	for {
11000		ptr := v_0
11001		idx := v_1
11002		if v_2.Op != OpARM64MOVHUreg {
11003			break
11004		}
11005		x := v_2.Args[0]
11006		mem := v_3
11007		v.reset(OpARM64MOVHstoreidx2)
11008		v.AddArg4(ptr, idx, x, mem)
11009		return true
11010	}
11011	// match: (MOVHstoreidx2 ptr idx (MOVWreg x) mem)
11012	// result: (MOVHstoreidx2 ptr idx x mem)
11013	for {
11014		ptr := v_0
11015		idx := v_1
11016		if v_2.Op != OpARM64MOVWreg {
11017			break
11018		}
11019		x := v_2.Args[0]
11020		mem := v_3
11021		v.reset(OpARM64MOVHstoreidx2)
11022		v.AddArg4(ptr, idx, x, mem)
11023		return true
11024	}
11025	// match: (MOVHstoreidx2 ptr idx (MOVWUreg x) mem)
11026	// result: (MOVHstoreidx2 ptr idx x mem)
11027	for {
11028		ptr := v_0
11029		idx := v_1
11030		if v_2.Op != OpARM64MOVWUreg {
11031			break
11032		}
11033		x := v_2.Args[0]
11034		mem := v_3
11035		v.reset(OpARM64MOVHstoreidx2)
11036		v.AddArg4(ptr, idx, x, mem)
11037		return true
11038	}
11039	return false
11040}
11041func rewriteValueARM64_OpARM64MOVHstorezero(v *Value) bool {
11042	v_1 := v.Args[1]
11043	v_0 := v.Args[0]
11044	b := v.Block
11045	config := b.Func.Config
11046	// match: (MOVHstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
11047	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
11048	// result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem)
11049	for {
11050		off1 := auxIntToInt32(v.AuxInt)
11051		sym := auxToSym(v.Aux)
11052		if v_0.Op != OpARM64ADDconst {
11053			break
11054		}
11055		off2 := auxIntToInt64(v_0.AuxInt)
11056		ptr := v_0.Args[0]
11057		mem := v_1
11058		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
11059			break
11060		}
11061		v.reset(OpARM64MOVHstorezero)
11062		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
11063		v.Aux = symToAux(sym)
11064		v.AddArg2(ptr, mem)
11065		return true
11066	}
11067	// match: (MOVHstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
11068	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
11069	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
11070	for {
11071		off1 := auxIntToInt32(v.AuxInt)
11072		sym1 := auxToSym(v.Aux)
11073		if v_0.Op != OpARM64MOVDaddr {
11074			break
11075		}
11076		off2 := auxIntToInt32(v_0.AuxInt)
11077		sym2 := auxToSym(v_0.Aux)
11078		ptr := v_0.Args[0]
11079		mem := v_1
11080		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
11081			break
11082		}
11083		v.reset(OpARM64MOVHstorezero)
11084		v.AuxInt = int32ToAuxInt(off1 + off2)
11085		v.Aux = symToAux(mergeSym(sym1, sym2))
11086		v.AddArg2(ptr, mem)
11087		return true
11088	}
11089	// match: (MOVHstorezero [off] {sym} (ADD ptr idx) mem)
11090	// cond: off == 0 && sym == nil
11091	// result: (MOVHstorezeroidx ptr idx mem)
11092	for {
11093		off := auxIntToInt32(v.AuxInt)
11094		sym := auxToSym(v.Aux)
11095		if v_0.Op != OpARM64ADD {
11096			break
11097		}
11098		idx := v_0.Args[1]
11099		ptr := v_0.Args[0]
11100		mem := v_1
11101		if !(off == 0 && sym == nil) {
11102			break
11103		}
11104		v.reset(OpARM64MOVHstorezeroidx)
11105		v.AddArg3(ptr, idx, mem)
11106		return true
11107	}
11108	// match: (MOVHstorezero [off] {sym} (ADDshiftLL [1] ptr idx) mem)
11109	// cond: off == 0 && sym == nil
11110	// result: (MOVHstorezeroidx2 ptr idx mem)
11111	for {
11112		off := auxIntToInt32(v.AuxInt)
11113		sym := auxToSym(v.Aux)
11114		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
11115			break
11116		}
11117		idx := v_0.Args[1]
11118		ptr := v_0.Args[0]
11119		mem := v_1
11120		if !(off == 0 && sym == nil) {
11121			break
11122		}
11123		v.reset(OpARM64MOVHstorezeroidx2)
11124		v.AddArg3(ptr, idx, mem)
11125		return true
11126	}
11127	return false
11128}
11129func rewriteValueARM64_OpARM64MOVHstorezeroidx(v *Value) bool {
11130	v_2 := v.Args[2]
11131	v_1 := v.Args[1]
11132	v_0 := v.Args[0]
11133	// match: (MOVHstorezeroidx ptr (MOVDconst [c]) mem)
11134	// cond: is32Bit(c)
11135	// result: (MOVHstorezero [int32(c)] ptr mem)
11136	for {
11137		ptr := v_0
11138		if v_1.Op != OpARM64MOVDconst {
11139			break
11140		}
11141		c := auxIntToInt64(v_1.AuxInt)
11142		mem := v_2
11143		if !(is32Bit(c)) {
11144			break
11145		}
11146		v.reset(OpARM64MOVHstorezero)
11147		v.AuxInt = int32ToAuxInt(int32(c))
11148		v.AddArg2(ptr, mem)
11149		return true
11150	}
11151	// match: (MOVHstorezeroidx (MOVDconst [c]) idx mem)
11152	// cond: is32Bit(c)
11153	// result: (MOVHstorezero [int32(c)] idx mem)
11154	for {
11155		if v_0.Op != OpARM64MOVDconst {
11156			break
11157		}
11158		c := auxIntToInt64(v_0.AuxInt)
11159		idx := v_1
11160		mem := v_2
11161		if !(is32Bit(c)) {
11162			break
11163		}
11164		v.reset(OpARM64MOVHstorezero)
11165		v.AuxInt = int32ToAuxInt(int32(c))
11166		v.AddArg2(idx, mem)
11167		return true
11168	}
11169	// match: (MOVHstorezeroidx ptr (SLLconst [1] idx) mem)
11170	// result: (MOVHstorezeroidx2 ptr idx mem)
11171	for {
11172		ptr := v_0
11173		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 1 {
11174			break
11175		}
11176		idx := v_1.Args[0]
11177		mem := v_2
11178		v.reset(OpARM64MOVHstorezeroidx2)
11179		v.AddArg3(ptr, idx, mem)
11180		return true
11181	}
11182	// match: (MOVHstorezeroidx ptr (ADD idx idx) mem)
11183	// result: (MOVHstorezeroidx2 ptr idx mem)
11184	for {
11185		ptr := v_0
11186		if v_1.Op != OpARM64ADD {
11187			break
11188		}
11189		idx := v_1.Args[1]
11190		if idx != v_1.Args[0] {
11191			break
11192		}
11193		mem := v_2
11194		v.reset(OpARM64MOVHstorezeroidx2)
11195		v.AddArg3(ptr, idx, mem)
11196		return true
11197	}
11198	// match: (MOVHstorezeroidx (SLLconst [1] idx) ptr mem)
11199	// result: (MOVHstorezeroidx2 ptr idx mem)
11200	for {
11201		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 1 {
11202			break
11203		}
11204		idx := v_0.Args[0]
11205		ptr := v_1
11206		mem := v_2
11207		v.reset(OpARM64MOVHstorezeroidx2)
11208		v.AddArg3(ptr, idx, mem)
11209		return true
11210	}
11211	// match: (MOVHstorezeroidx (ADD idx idx) ptr mem)
11212	// result: (MOVHstorezeroidx2 ptr idx mem)
11213	for {
11214		if v_0.Op != OpARM64ADD {
11215			break
11216		}
11217		idx := v_0.Args[1]
11218		if idx != v_0.Args[0] {
11219			break
11220		}
11221		ptr := v_1
11222		mem := v_2
11223		v.reset(OpARM64MOVHstorezeroidx2)
11224		v.AddArg3(ptr, idx, mem)
11225		return true
11226	}
11227	return false
11228}
11229func rewriteValueARM64_OpARM64MOVHstorezeroidx2(v *Value) bool {
11230	v_2 := v.Args[2]
11231	v_1 := v.Args[1]
11232	v_0 := v.Args[0]
11233	// match: (MOVHstorezeroidx2 ptr (MOVDconst [c]) mem)
11234	// cond: is32Bit(c<<1)
11235	// result: (MOVHstorezero [int32(c<<1)] ptr mem)
11236	for {
11237		ptr := v_0
11238		if v_1.Op != OpARM64MOVDconst {
11239			break
11240		}
11241		c := auxIntToInt64(v_1.AuxInt)
11242		mem := v_2
11243		if !(is32Bit(c << 1)) {
11244			break
11245		}
11246		v.reset(OpARM64MOVHstorezero)
11247		v.AuxInt = int32ToAuxInt(int32(c << 1))
11248		v.AddArg2(ptr, mem)
11249		return true
11250	}
11251	return false
11252}
11253func rewriteValueARM64_OpARM64MOVQstorezero(v *Value) bool {
11254	v_1 := v.Args[1]
11255	v_0 := v.Args[0]
11256	b := v.Block
11257	config := b.Func.Config
11258	// match: (MOVQstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
11259	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
11260	// result: (MOVQstorezero [off1+int32(off2)] {sym} ptr mem)
11261	for {
11262		off1 := auxIntToInt32(v.AuxInt)
11263		sym := auxToSym(v.Aux)
11264		if v_0.Op != OpARM64ADDconst {
11265			break
11266		}
11267		off2 := auxIntToInt64(v_0.AuxInt)
11268		ptr := v_0.Args[0]
11269		mem := v_1
11270		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
11271			break
11272		}
11273		v.reset(OpARM64MOVQstorezero)
11274		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
11275		v.Aux = symToAux(sym)
11276		v.AddArg2(ptr, mem)
11277		return true
11278	}
11279	// match: (MOVQstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
11280	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
11281	// result: (MOVQstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
11282	for {
11283		off1 := auxIntToInt32(v.AuxInt)
11284		sym1 := auxToSym(v.Aux)
11285		if v_0.Op != OpARM64MOVDaddr {
11286			break
11287		}
11288		off2 := auxIntToInt32(v_0.AuxInt)
11289		sym2 := auxToSym(v_0.Aux)
11290		ptr := v_0.Args[0]
11291		mem := v_1
11292		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
11293			break
11294		}
11295		v.reset(OpARM64MOVQstorezero)
11296		v.AuxInt = int32ToAuxInt(off1 + off2)
11297		v.Aux = symToAux(mergeSym(sym1, sym2))
11298		v.AddArg2(ptr, mem)
11299		return true
11300	}
11301	return false
11302}
11303func rewriteValueARM64_OpARM64MOVWUload(v *Value) bool {
11304	v_1 := v.Args[1]
11305	v_0 := v.Args[0]
11306	b := v.Block
11307	config := b.Func.Config
11308	// match: (MOVWUload [off] {sym} ptr (FMOVSstore [off] {sym} ptr val _))
11309	// result: (FMOVSfpgp val)
11310	for {
11311		off := auxIntToInt32(v.AuxInt)
11312		sym := auxToSym(v.Aux)
11313		ptr := v_0
11314		if v_1.Op != OpARM64FMOVSstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
11315			break
11316		}
11317		val := v_1.Args[1]
11318		if ptr != v_1.Args[0] {
11319			break
11320		}
11321		v.reset(OpARM64FMOVSfpgp)
11322		v.AddArg(val)
11323		return true
11324	}
11325	// match: (MOVWUload [off1] {sym} (ADDconst [off2] ptr) mem)
11326	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
11327	// result: (MOVWUload [off1+int32(off2)] {sym} ptr mem)
11328	for {
11329		off1 := auxIntToInt32(v.AuxInt)
11330		sym := auxToSym(v.Aux)
11331		if v_0.Op != OpARM64ADDconst {
11332			break
11333		}
11334		off2 := auxIntToInt64(v_0.AuxInt)
11335		ptr := v_0.Args[0]
11336		mem := v_1
11337		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
11338			break
11339		}
11340		v.reset(OpARM64MOVWUload)
11341		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
11342		v.Aux = symToAux(sym)
11343		v.AddArg2(ptr, mem)
11344		return true
11345	}
11346	// match: (MOVWUload [off] {sym} (ADD ptr idx) mem)
11347	// cond: off == 0 && sym == nil
11348	// result: (MOVWUloadidx ptr idx mem)
11349	for {
11350		off := auxIntToInt32(v.AuxInt)
11351		sym := auxToSym(v.Aux)
11352		if v_0.Op != OpARM64ADD {
11353			break
11354		}
11355		idx := v_0.Args[1]
11356		ptr := v_0.Args[0]
11357		mem := v_1
11358		if !(off == 0 && sym == nil) {
11359			break
11360		}
11361		v.reset(OpARM64MOVWUloadidx)
11362		v.AddArg3(ptr, idx, mem)
11363		return true
11364	}
11365	// match: (MOVWUload [off] {sym} (ADDshiftLL [2] ptr idx) mem)
11366	// cond: off == 0 && sym == nil
11367	// result: (MOVWUloadidx4 ptr idx mem)
11368	for {
11369		off := auxIntToInt32(v.AuxInt)
11370		sym := auxToSym(v.Aux)
11371		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
11372			break
11373		}
11374		idx := v_0.Args[1]
11375		ptr := v_0.Args[0]
11376		mem := v_1
11377		if !(off == 0 && sym == nil) {
11378			break
11379		}
11380		v.reset(OpARM64MOVWUloadidx4)
11381		v.AddArg3(ptr, idx, mem)
11382		return true
11383	}
11384	// match: (MOVWUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
11385	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
11386	// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
11387	for {
11388		off1 := auxIntToInt32(v.AuxInt)
11389		sym1 := auxToSym(v.Aux)
11390		if v_0.Op != OpARM64MOVDaddr {
11391			break
11392		}
11393		off2 := auxIntToInt32(v_0.AuxInt)
11394		sym2 := auxToSym(v_0.Aux)
11395		ptr := v_0.Args[0]
11396		mem := v_1
11397		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
11398			break
11399		}
11400		v.reset(OpARM64MOVWUload)
11401		v.AuxInt = int32ToAuxInt(off1 + off2)
11402		v.Aux = symToAux(mergeSym(sym1, sym2))
11403		v.AddArg2(ptr, mem)
11404		return true
11405	}
11406	// match: (MOVWUload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _))
11407	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
11408	// result: (MOVDconst [0])
11409	for {
11410		off := auxIntToInt32(v.AuxInt)
11411		sym := auxToSym(v.Aux)
11412		ptr := v_0
11413		if v_1.Op != OpARM64MOVWstorezero {
11414			break
11415		}
11416		off2 := auxIntToInt32(v_1.AuxInt)
11417		sym2 := auxToSym(v_1.Aux)
11418		ptr2 := v_1.Args[0]
11419		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
11420			break
11421		}
11422		v.reset(OpARM64MOVDconst)
11423		v.AuxInt = int64ToAuxInt(0)
11424		return true
11425	}
11426	// match: (MOVWUload [off] {sym} (SB) _)
11427	// cond: symIsRO(sym)
11428	// result: (MOVDconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
11429	for {
11430		off := auxIntToInt32(v.AuxInt)
11431		sym := auxToSym(v.Aux)
11432		if v_0.Op != OpSB || !(symIsRO(sym)) {
11433			break
11434		}
11435		v.reset(OpARM64MOVDconst)
11436		v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
11437		return true
11438	}
11439	return false
11440}
11441func rewriteValueARM64_OpARM64MOVWUloadidx(v *Value) bool {
11442	v_2 := v.Args[2]
11443	v_1 := v.Args[1]
11444	v_0 := v.Args[0]
11445	// match: (MOVWUloadidx ptr (MOVDconst [c]) mem)
11446	// cond: is32Bit(c)
11447	// result: (MOVWUload [int32(c)] ptr mem)
11448	for {
11449		ptr := v_0
11450		if v_1.Op != OpARM64MOVDconst {
11451			break
11452		}
11453		c := auxIntToInt64(v_1.AuxInt)
11454		mem := v_2
11455		if !(is32Bit(c)) {
11456			break
11457		}
11458		v.reset(OpARM64MOVWUload)
11459		v.AuxInt = int32ToAuxInt(int32(c))
11460		v.AddArg2(ptr, mem)
11461		return true
11462	}
11463	// match: (MOVWUloadidx (MOVDconst [c]) ptr mem)
11464	// cond: is32Bit(c)
11465	// result: (MOVWUload [int32(c)] ptr mem)
11466	for {
11467		if v_0.Op != OpARM64MOVDconst {
11468			break
11469		}
11470		c := auxIntToInt64(v_0.AuxInt)
11471		ptr := v_1
11472		mem := v_2
11473		if !(is32Bit(c)) {
11474			break
11475		}
11476		v.reset(OpARM64MOVWUload)
11477		v.AuxInt = int32ToAuxInt(int32(c))
11478		v.AddArg2(ptr, mem)
11479		return true
11480	}
11481	// match: (MOVWUloadidx ptr (SLLconst [2] idx) mem)
11482	// result: (MOVWUloadidx4 ptr idx mem)
11483	for {
11484		ptr := v_0
11485		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
11486			break
11487		}
11488		idx := v_1.Args[0]
11489		mem := v_2
11490		v.reset(OpARM64MOVWUloadidx4)
11491		v.AddArg3(ptr, idx, mem)
11492		return true
11493	}
11494	// match: (MOVWUloadidx (SLLconst [2] idx) ptr mem)
11495	// result: (MOVWUloadidx4 ptr idx mem)
11496	for {
11497		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
11498			break
11499		}
11500		idx := v_0.Args[0]
11501		ptr := v_1
11502		mem := v_2
11503		v.reset(OpARM64MOVWUloadidx4)
11504		v.AddArg3(ptr, idx, mem)
11505		return true
11506	}
11507	// match: (MOVWUloadidx ptr idx (MOVWstorezeroidx ptr2 idx2 _))
11508	// cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2))
11509	// result: (MOVDconst [0])
11510	for {
11511		ptr := v_0
11512		idx := v_1
11513		if v_2.Op != OpARM64MOVWstorezeroidx {
11514			break
11515		}
11516		idx2 := v_2.Args[1]
11517		ptr2 := v_2.Args[0]
11518		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
11519			break
11520		}
11521		v.reset(OpARM64MOVDconst)
11522		v.AuxInt = int64ToAuxInt(0)
11523		return true
11524	}
11525	return false
11526}
11527func rewriteValueARM64_OpARM64MOVWUloadidx4(v *Value) bool {
11528	v_2 := v.Args[2]
11529	v_1 := v.Args[1]
11530	v_0 := v.Args[0]
11531	// match: (MOVWUloadidx4 ptr (MOVDconst [c]) mem)
11532	// cond: is32Bit(c<<2)
11533	// result: (MOVWUload [int32(c)<<2] ptr mem)
11534	for {
11535		ptr := v_0
11536		if v_1.Op != OpARM64MOVDconst {
11537			break
11538		}
11539		c := auxIntToInt64(v_1.AuxInt)
11540		mem := v_2
11541		if !(is32Bit(c << 2)) {
11542			break
11543		}
11544		v.reset(OpARM64MOVWUload)
11545		v.AuxInt = int32ToAuxInt(int32(c) << 2)
11546		v.AddArg2(ptr, mem)
11547		return true
11548	}
11549	// match: (MOVWUloadidx4 ptr idx (MOVWstorezeroidx4 ptr2 idx2 _))
11550	// cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)
11551	// result: (MOVDconst [0])
11552	for {
11553		ptr := v_0
11554		idx := v_1
11555		if v_2.Op != OpARM64MOVWstorezeroidx4 {
11556			break
11557		}
11558		idx2 := v_2.Args[1]
11559		ptr2 := v_2.Args[0]
11560		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
11561			break
11562		}
11563		v.reset(OpARM64MOVDconst)
11564		v.AuxInt = int64ToAuxInt(0)
11565		return true
11566	}
11567	return false
11568}
11569func rewriteValueARM64_OpARM64MOVWUreg(v *Value) bool {
11570	v_0 := v.Args[0]
11571	// match: (MOVWUreg (ANDconst [c] x))
11572	// result: (ANDconst [c&(1<<32-1)] x)
11573	for {
11574		if v_0.Op != OpARM64ANDconst {
11575			break
11576		}
11577		c := auxIntToInt64(v_0.AuxInt)
11578		x := v_0.Args[0]
11579		v.reset(OpARM64ANDconst)
11580		v.AuxInt = int64ToAuxInt(c & (1<<32 - 1))
11581		v.AddArg(x)
11582		return true
11583	}
11584	// match: (MOVWUreg (MOVDconst [c]))
11585	// result: (MOVDconst [int64(uint32(c))])
11586	for {
11587		if v_0.Op != OpARM64MOVDconst {
11588			break
11589		}
11590		c := auxIntToInt64(v_0.AuxInt)
11591		v.reset(OpARM64MOVDconst)
11592		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
11593		return true
11594	}
11595	// match: (MOVWUreg x)
11596	// cond: v.Type.Size() <= 4
11597	// result: x
11598	for {
11599		x := v_0
11600		if !(v.Type.Size() <= 4) {
11601			break
11602		}
11603		v.copyOf(x)
11604		return true
11605	}
11606	// match: (MOVWUreg (SLLconst [lc] x))
11607	// cond: lc >= 32
11608	// result: (MOVDconst [0])
11609	for {
11610		if v_0.Op != OpARM64SLLconst {
11611			break
11612		}
11613		lc := auxIntToInt64(v_0.AuxInt)
11614		if !(lc >= 32) {
11615			break
11616		}
11617		v.reset(OpARM64MOVDconst)
11618		v.AuxInt = int64ToAuxInt(0)
11619		return true
11620	}
11621	// match: (MOVWUreg (SLLconst [lc] x))
11622	// cond: lc < 32
11623	// result: (UBFIZ [armBFAuxInt(lc, 32-lc)] x)
11624	for {
11625		if v_0.Op != OpARM64SLLconst {
11626			break
11627		}
11628		lc := auxIntToInt64(v_0.AuxInt)
11629		x := v_0.Args[0]
11630		if !(lc < 32) {
11631			break
11632		}
11633		v.reset(OpARM64UBFIZ)
11634		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 32-lc))
11635		v.AddArg(x)
11636		return true
11637	}
11638	// match: (MOVWUreg (SRLconst [rc] x))
11639	// cond: rc < 32
11640	// result: (UBFX [armBFAuxInt(rc, 32)] x)
11641	for {
11642		if v_0.Op != OpARM64SRLconst {
11643			break
11644		}
11645		rc := auxIntToInt64(v_0.AuxInt)
11646		x := v_0.Args[0]
11647		if !(rc < 32) {
11648			break
11649		}
11650		v.reset(OpARM64UBFX)
11651		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 32))
11652		v.AddArg(x)
11653		return true
11654	}
11655	// match: (MOVWUreg (UBFX [bfc] x))
11656	// cond: bfc.getARM64BFwidth() <= 32
11657	// result: (UBFX [bfc] x)
11658	for {
11659		if v_0.Op != OpARM64UBFX {
11660			break
11661		}
11662		bfc := auxIntToArm64BitField(v_0.AuxInt)
11663		x := v_0.Args[0]
11664		if !(bfc.getARM64BFwidth() <= 32) {
11665			break
11666		}
11667		v.reset(OpARM64UBFX)
11668		v.AuxInt = arm64BitFieldToAuxInt(bfc)
11669		v.AddArg(x)
11670		return true
11671	}
11672	return false
11673}
11674func rewriteValueARM64_OpARM64MOVWload(v *Value) bool {
11675	v_1 := v.Args[1]
11676	v_0 := v.Args[0]
11677	b := v.Block
11678	config := b.Func.Config
11679	// match: (MOVWload [off1] {sym} (ADDconst [off2] ptr) mem)
11680	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
11681	// result: (MOVWload [off1+int32(off2)] {sym} ptr mem)
11682	for {
11683		off1 := auxIntToInt32(v.AuxInt)
11684		sym := auxToSym(v.Aux)
11685		if v_0.Op != OpARM64ADDconst {
11686			break
11687		}
11688		off2 := auxIntToInt64(v_0.AuxInt)
11689		ptr := v_0.Args[0]
11690		mem := v_1
11691		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
11692			break
11693		}
11694		v.reset(OpARM64MOVWload)
11695		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
11696		v.Aux = symToAux(sym)
11697		v.AddArg2(ptr, mem)
11698		return true
11699	}
11700	// match: (MOVWload [off] {sym} (ADD ptr idx) mem)
11701	// cond: off == 0 && sym == nil
11702	// result: (MOVWloadidx ptr idx mem)
11703	for {
11704		off := auxIntToInt32(v.AuxInt)
11705		sym := auxToSym(v.Aux)
11706		if v_0.Op != OpARM64ADD {
11707			break
11708		}
11709		idx := v_0.Args[1]
11710		ptr := v_0.Args[0]
11711		mem := v_1
11712		if !(off == 0 && sym == nil) {
11713			break
11714		}
11715		v.reset(OpARM64MOVWloadidx)
11716		v.AddArg3(ptr, idx, mem)
11717		return true
11718	}
11719	// match: (MOVWload [off] {sym} (ADDshiftLL [2] ptr idx) mem)
11720	// cond: off == 0 && sym == nil
11721	// result: (MOVWloadidx4 ptr idx mem)
11722	for {
11723		off := auxIntToInt32(v.AuxInt)
11724		sym := auxToSym(v.Aux)
11725		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
11726			break
11727		}
11728		idx := v_0.Args[1]
11729		ptr := v_0.Args[0]
11730		mem := v_1
11731		if !(off == 0 && sym == nil) {
11732			break
11733		}
11734		v.reset(OpARM64MOVWloadidx4)
11735		v.AddArg3(ptr, idx, mem)
11736		return true
11737	}
11738	// match: (MOVWload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
11739	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
11740	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
11741	for {
11742		off1 := auxIntToInt32(v.AuxInt)
11743		sym1 := auxToSym(v.Aux)
11744		if v_0.Op != OpARM64MOVDaddr {
11745			break
11746		}
11747		off2 := auxIntToInt32(v_0.AuxInt)
11748		sym2 := auxToSym(v_0.Aux)
11749		ptr := v_0.Args[0]
11750		mem := v_1
11751		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
11752			break
11753		}
11754		v.reset(OpARM64MOVWload)
11755		v.AuxInt = int32ToAuxInt(off1 + off2)
11756		v.Aux = symToAux(mergeSym(sym1, sym2))
11757		v.AddArg2(ptr, mem)
11758		return true
11759	}
11760	// match: (MOVWload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _))
11761	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
11762	// result: (MOVDconst [0])
11763	for {
11764		off := auxIntToInt32(v.AuxInt)
11765		sym := auxToSym(v.Aux)
11766		ptr := v_0
11767		if v_1.Op != OpARM64MOVWstorezero {
11768			break
11769		}
11770		off2 := auxIntToInt32(v_1.AuxInt)
11771		sym2 := auxToSym(v_1.Aux)
11772		ptr2 := v_1.Args[0]
11773		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
11774			break
11775		}
11776		v.reset(OpARM64MOVDconst)
11777		v.AuxInt = int64ToAuxInt(0)
11778		return true
11779	}
11780	return false
11781}
11782func rewriteValueARM64_OpARM64MOVWloadidx(v *Value) bool {
11783	v_2 := v.Args[2]
11784	v_1 := v.Args[1]
11785	v_0 := v.Args[0]
11786	// match: (MOVWloadidx ptr (MOVDconst [c]) mem)
11787	// cond: is32Bit(c)
11788	// result: (MOVWload [int32(c)] ptr mem)
11789	for {
11790		ptr := v_0
11791		if v_1.Op != OpARM64MOVDconst {
11792			break
11793		}
11794		c := auxIntToInt64(v_1.AuxInt)
11795		mem := v_2
11796		if !(is32Bit(c)) {
11797			break
11798		}
11799		v.reset(OpARM64MOVWload)
11800		v.AuxInt = int32ToAuxInt(int32(c))
11801		v.AddArg2(ptr, mem)
11802		return true
11803	}
11804	// match: (MOVWloadidx (MOVDconst [c]) ptr mem)
11805	// cond: is32Bit(c)
11806	// result: (MOVWload [int32(c)] ptr mem)
11807	for {
11808		if v_0.Op != OpARM64MOVDconst {
11809			break
11810		}
11811		c := auxIntToInt64(v_0.AuxInt)
11812		ptr := v_1
11813		mem := v_2
11814		if !(is32Bit(c)) {
11815			break
11816		}
11817		v.reset(OpARM64MOVWload)
11818		v.AuxInt = int32ToAuxInt(int32(c))
11819		v.AddArg2(ptr, mem)
11820		return true
11821	}
11822	// match: (MOVWloadidx ptr (SLLconst [2] idx) mem)
11823	// result: (MOVWloadidx4 ptr idx mem)
11824	for {
11825		ptr := v_0
11826		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
11827			break
11828		}
11829		idx := v_1.Args[0]
11830		mem := v_2
11831		v.reset(OpARM64MOVWloadidx4)
11832		v.AddArg3(ptr, idx, mem)
11833		return true
11834	}
11835	// match: (MOVWloadidx (SLLconst [2] idx) ptr mem)
11836	// result: (MOVWloadidx4 ptr idx mem)
11837	for {
11838		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
11839			break
11840		}
11841		idx := v_0.Args[0]
11842		ptr := v_1
11843		mem := v_2
11844		v.reset(OpARM64MOVWloadidx4)
11845		v.AddArg3(ptr, idx, mem)
11846		return true
11847	}
11848	// match: (MOVWloadidx ptr idx (MOVWstorezeroidx ptr2 idx2 _))
11849	// cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2))
11850	// result: (MOVDconst [0])
11851	for {
11852		ptr := v_0
11853		idx := v_1
11854		if v_2.Op != OpARM64MOVWstorezeroidx {
11855			break
11856		}
11857		idx2 := v_2.Args[1]
11858		ptr2 := v_2.Args[0]
11859		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
11860			break
11861		}
11862		v.reset(OpARM64MOVDconst)
11863		v.AuxInt = int64ToAuxInt(0)
11864		return true
11865	}
11866	return false
11867}
11868func rewriteValueARM64_OpARM64MOVWloadidx4(v *Value) bool {
11869	v_2 := v.Args[2]
11870	v_1 := v.Args[1]
11871	v_0 := v.Args[0]
11872	// match: (MOVWloadidx4 ptr (MOVDconst [c]) mem)
11873	// cond: is32Bit(c<<2)
11874	// result: (MOVWload [int32(c)<<2] ptr mem)
11875	for {
11876		ptr := v_0
11877		if v_1.Op != OpARM64MOVDconst {
11878			break
11879		}
11880		c := auxIntToInt64(v_1.AuxInt)
11881		mem := v_2
11882		if !(is32Bit(c << 2)) {
11883			break
11884		}
11885		v.reset(OpARM64MOVWload)
11886		v.AuxInt = int32ToAuxInt(int32(c) << 2)
11887		v.AddArg2(ptr, mem)
11888		return true
11889	}
11890	// match: (MOVWloadidx4 ptr idx (MOVWstorezeroidx4 ptr2 idx2 _))
11891	// cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)
11892	// result: (MOVDconst [0])
11893	for {
11894		ptr := v_0
11895		idx := v_1
11896		if v_2.Op != OpARM64MOVWstorezeroidx4 {
11897			break
11898		}
11899		idx2 := v_2.Args[1]
11900		ptr2 := v_2.Args[0]
11901		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
11902			break
11903		}
11904		v.reset(OpARM64MOVDconst)
11905		v.AuxInt = int64ToAuxInt(0)
11906		return true
11907	}
11908	return false
11909}
11910func rewriteValueARM64_OpARM64MOVWreg(v *Value) bool {
11911	v_0 := v.Args[0]
11912	// match: (MOVWreg (MOVDconst [c]))
11913	// result: (MOVDconst [int64(int32(c))])
11914	for {
11915		if v_0.Op != OpARM64MOVDconst {
11916			break
11917		}
11918		c := auxIntToInt64(v_0.AuxInt)
11919		v.reset(OpARM64MOVDconst)
11920		v.AuxInt = int64ToAuxInt(int64(int32(c)))
11921		return true
11922	}
11923	// match: (MOVWreg x)
11924	// cond: v.Type.Size() <= 4
11925	// result: x
11926	for {
11927		x := v_0
11928		if !(v.Type.Size() <= 4) {
11929			break
11930		}
11931		v.copyOf(x)
11932		return true
11933	}
11934	// match: (MOVWreg <t> (ANDconst x [c]))
11935	// cond: uint64(c) & uint64(0xffffffff80000000) == 0
11936	// result: (ANDconst <t> x [c])
11937	for {
11938		t := v.Type
11939		if v_0.Op != OpARM64ANDconst {
11940			break
11941		}
11942		c := auxIntToInt64(v_0.AuxInt)
11943		x := v_0.Args[0]
11944		if !(uint64(c)&uint64(0xffffffff80000000) == 0) {
11945			break
11946		}
11947		v.reset(OpARM64ANDconst)
11948		v.Type = t
11949		v.AuxInt = int64ToAuxInt(c)
11950		v.AddArg(x)
11951		return true
11952	}
11953	// match: (MOVWreg (SLLconst [lc] x))
11954	// cond: lc < 32
11955	// result: (SBFIZ [armBFAuxInt(lc, 32-lc)] x)
11956	for {
11957		if v_0.Op != OpARM64SLLconst {
11958			break
11959		}
11960		lc := auxIntToInt64(v_0.AuxInt)
11961		x := v_0.Args[0]
11962		if !(lc < 32) {
11963			break
11964		}
11965		v.reset(OpARM64SBFIZ)
11966		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 32-lc))
11967		v.AddArg(x)
11968		return true
11969	}
11970	// match: (MOVWreg (SBFX [bfc] x))
11971	// cond: bfc.getARM64BFwidth() <= 32
11972	// result: (SBFX [bfc] x)
11973	for {
11974		if v_0.Op != OpARM64SBFX {
11975			break
11976		}
11977		bfc := auxIntToArm64BitField(v_0.AuxInt)
11978		x := v_0.Args[0]
11979		if !(bfc.getARM64BFwidth() <= 32) {
11980			break
11981		}
11982		v.reset(OpARM64SBFX)
11983		v.AuxInt = arm64BitFieldToAuxInt(bfc)
11984		v.AddArg(x)
11985		return true
11986	}
11987	return false
11988}
11989func rewriteValueARM64_OpARM64MOVWstore(v *Value) bool {
11990	v_2 := v.Args[2]
11991	v_1 := v.Args[1]
11992	v_0 := v.Args[0]
11993	b := v.Block
11994	config := b.Func.Config
11995	// match: (MOVWstore [off] {sym} ptr (FMOVSfpgp val) mem)
11996	// result: (FMOVSstore [off] {sym} ptr val mem)
11997	for {
11998		off := auxIntToInt32(v.AuxInt)
11999		sym := auxToSym(v.Aux)
12000		ptr := v_0
12001		if v_1.Op != OpARM64FMOVSfpgp {
12002			break
12003		}
12004		val := v_1.Args[0]
12005		mem := v_2
12006		v.reset(OpARM64FMOVSstore)
12007		v.AuxInt = int32ToAuxInt(off)
12008		v.Aux = symToAux(sym)
12009		v.AddArg3(ptr, val, mem)
12010		return true
12011	}
12012	// match: (MOVWstore [off1] {sym} (ADDconst [off2] ptr) val mem)
12013	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
12014	// result: (MOVWstore [off1+int32(off2)] {sym} ptr val mem)
12015	for {
12016		off1 := auxIntToInt32(v.AuxInt)
12017		sym := auxToSym(v.Aux)
12018		if v_0.Op != OpARM64ADDconst {
12019			break
12020		}
12021		off2 := auxIntToInt64(v_0.AuxInt)
12022		ptr := v_0.Args[0]
12023		val := v_1
12024		mem := v_2
12025		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
12026			break
12027		}
12028		v.reset(OpARM64MOVWstore)
12029		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
12030		v.Aux = symToAux(sym)
12031		v.AddArg3(ptr, val, mem)
12032		return true
12033	}
12034	// match: (MOVWstore [off] {sym} (ADD ptr idx) val mem)
12035	// cond: off == 0 && sym == nil
12036	// result: (MOVWstoreidx ptr idx val mem)
12037	for {
12038		off := auxIntToInt32(v.AuxInt)
12039		sym := auxToSym(v.Aux)
12040		if v_0.Op != OpARM64ADD {
12041			break
12042		}
12043		idx := v_0.Args[1]
12044		ptr := v_0.Args[0]
12045		val := v_1
12046		mem := v_2
12047		if !(off == 0 && sym == nil) {
12048			break
12049		}
12050		v.reset(OpARM64MOVWstoreidx)
12051		v.AddArg4(ptr, idx, val, mem)
12052		return true
12053	}
12054	// match: (MOVWstore [off] {sym} (ADDshiftLL [2] ptr idx) val mem)
12055	// cond: off == 0 && sym == nil
12056	// result: (MOVWstoreidx4 ptr idx val mem)
12057	for {
12058		off := auxIntToInt32(v.AuxInt)
12059		sym := auxToSym(v.Aux)
12060		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
12061			break
12062		}
12063		idx := v_0.Args[1]
12064		ptr := v_0.Args[0]
12065		val := v_1
12066		mem := v_2
12067		if !(off == 0 && sym == nil) {
12068			break
12069		}
12070		v.reset(OpARM64MOVWstoreidx4)
12071		v.AddArg4(ptr, idx, val, mem)
12072		return true
12073	}
12074	// match: (MOVWstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
12075	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
12076	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
12077	for {
12078		off1 := auxIntToInt32(v.AuxInt)
12079		sym1 := auxToSym(v.Aux)
12080		if v_0.Op != OpARM64MOVDaddr {
12081			break
12082		}
12083		off2 := auxIntToInt32(v_0.AuxInt)
12084		sym2 := auxToSym(v_0.Aux)
12085		ptr := v_0.Args[0]
12086		val := v_1
12087		mem := v_2
12088		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
12089			break
12090		}
12091		v.reset(OpARM64MOVWstore)
12092		v.AuxInt = int32ToAuxInt(off1 + off2)
12093		v.Aux = symToAux(mergeSym(sym1, sym2))
12094		v.AddArg3(ptr, val, mem)
12095		return true
12096	}
12097	// match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem)
12098	// result: (MOVWstorezero [off] {sym} ptr mem)
12099	for {
12100		off := auxIntToInt32(v.AuxInt)
12101		sym := auxToSym(v.Aux)
12102		ptr := v_0
12103		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
12104			break
12105		}
12106		mem := v_2
12107		v.reset(OpARM64MOVWstorezero)
12108		v.AuxInt = int32ToAuxInt(off)
12109		v.Aux = symToAux(sym)
12110		v.AddArg2(ptr, mem)
12111		return true
12112	}
12113	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
12114	// result: (MOVWstore [off] {sym} ptr x mem)
12115	for {
12116		off := auxIntToInt32(v.AuxInt)
12117		sym := auxToSym(v.Aux)
12118		ptr := v_0
12119		if v_1.Op != OpARM64MOVWreg {
12120			break
12121		}
12122		x := v_1.Args[0]
12123		mem := v_2
12124		v.reset(OpARM64MOVWstore)
12125		v.AuxInt = int32ToAuxInt(off)
12126		v.Aux = symToAux(sym)
12127		v.AddArg3(ptr, x, mem)
12128		return true
12129	}
12130	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
12131	// result: (MOVWstore [off] {sym} ptr x mem)
12132	for {
12133		off := auxIntToInt32(v.AuxInt)
12134		sym := auxToSym(v.Aux)
12135		ptr := v_0
12136		if v_1.Op != OpARM64MOVWUreg {
12137			break
12138		}
12139		x := v_1.Args[0]
12140		mem := v_2
12141		v.reset(OpARM64MOVWstore)
12142		v.AuxInt = int32ToAuxInt(off)
12143		v.Aux = symToAux(sym)
12144		v.AddArg3(ptr, x, mem)
12145		return true
12146	}
12147	return false
12148}
12149func rewriteValueARM64_OpARM64MOVWstoreidx(v *Value) bool {
12150	v_3 := v.Args[3]
12151	v_2 := v.Args[2]
12152	v_1 := v.Args[1]
12153	v_0 := v.Args[0]
12154	// match: (MOVWstoreidx ptr (MOVDconst [c]) val mem)
12155	// cond: is32Bit(c)
12156	// result: (MOVWstore [int32(c)] ptr val mem)
12157	for {
12158		ptr := v_0
12159		if v_1.Op != OpARM64MOVDconst {
12160			break
12161		}
12162		c := auxIntToInt64(v_1.AuxInt)
12163		val := v_2
12164		mem := v_3
12165		if !(is32Bit(c)) {
12166			break
12167		}
12168		v.reset(OpARM64MOVWstore)
12169		v.AuxInt = int32ToAuxInt(int32(c))
12170		v.AddArg3(ptr, val, mem)
12171		return true
12172	}
12173	// match: (MOVWstoreidx (MOVDconst [c]) idx val mem)
12174	// cond: is32Bit(c)
12175	// result: (MOVWstore [int32(c)] idx val mem)
12176	for {
12177		if v_0.Op != OpARM64MOVDconst {
12178			break
12179		}
12180		c := auxIntToInt64(v_0.AuxInt)
12181		idx := v_1
12182		val := v_2
12183		mem := v_3
12184		if !(is32Bit(c)) {
12185			break
12186		}
12187		v.reset(OpARM64MOVWstore)
12188		v.AuxInt = int32ToAuxInt(int32(c))
12189		v.AddArg3(idx, val, mem)
12190		return true
12191	}
12192	// match: (MOVWstoreidx ptr (SLLconst [2] idx) val mem)
12193	// result: (MOVWstoreidx4 ptr idx val mem)
12194	for {
12195		ptr := v_0
12196		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
12197			break
12198		}
12199		idx := v_1.Args[0]
12200		val := v_2
12201		mem := v_3
12202		v.reset(OpARM64MOVWstoreidx4)
12203		v.AddArg4(ptr, idx, val, mem)
12204		return true
12205	}
12206	// match: (MOVWstoreidx (SLLconst [2] idx) ptr val mem)
12207	// result: (MOVWstoreidx4 ptr idx val mem)
12208	for {
12209		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
12210			break
12211		}
12212		idx := v_0.Args[0]
12213		ptr := v_1
12214		val := v_2
12215		mem := v_3
12216		v.reset(OpARM64MOVWstoreidx4)
12217		v.AddArg4(ptr, idx, val, mem)
12218		return true
12219	}
12220	// match: (MOVWstoreidx ptr idx (MOVDconst [0]) mem)
12221	// result: (MOVWstorezeroidx ptr idx mem)
12222	for {
12223		ptr := v_0
12224		idx := v_1
12225		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
12226			break
12227		}
12228		mem := v_3
12229		v.reset(OpARM64MOVWstorezeroidx)
12230		v.AddArg3(ptr, idx, mem)
12231		return true
12232	}
12233	// match: (MOVWstoreidx ptr idx (MOVWreg x) mem)
12234	// result: (MOVWstoreidx ptr idx x mem)
12235	for {
12236		ptr := v_0
12237		idx := v_1
12238		if v_2.Op != OpARM64MOVWreg {
12239			break
12240		}
12241		x := v_2.Args[0]
12242		mem := v_3
12243		v.reset(OpARM64MOVWstoreidx)
12244		v.AddArg4(ptr, idx, x, mem)
12245		return true
12246	}
12247	// match: (MOVWstoreidx ptr idx (MOVWUreg x) mem)
12248	// result: (MOVWstoreidx ptr idx x mem)
12249	for {
12250		ptr := v_0
12251		idx := v_1
12252		if v_2.Op != OpARM64MOVWUreg {
12253			break
12254		}
12255		x := v_2.Args[0]
12256		mem := v_3
12257		v.reset(OpARM64MOVWstoreidx)
12258		v.AddArg4(ptr, idx, x, mem)
12259		return true
12260	}
12261	return false
12262}
12263func rewriteValueARM64_OpARM64MOVWstoreidx4(v *Value) bool {
12264	v_3 := v.Args[3]
12265	v_2 := v.Args[2]
12266	v_1 := v.Args[1]
12267	v_0 := v.Args[0]
12268	// match: (MOVWstoreidx4 ptr (MOVDconst [c]) val mem)
12269	// cond: is32Bit(c<<2)
12270	// result: (MOVWstore [int32(c)<<2] ptr val mem)
12271	for {
12272		ptr := v_0
12273		if v_1.Op != OpARM64MOVDconst {
12274			break
12275		}
12276		c := auxIntToInt64(v_1.AuxInt)
12277		val := v_2
12278		mem := v_3
12279		if !(is32Bit(c << 2)) {
12280			break
12281		}
12282		v.reset(OpARM64MOVWstore)
12283		v.AuxInt = int32ToAuxInt(int32(c) << 2)
12284		v.AddArg3(ptr, val, mem)
12285		return true
12286	}
12287	// match: (MOVWstoreidx4 ptr idx (MOVDconst [0]) mem)
12288	// result: (MOVWstorezeroidx4 ptr idx mem)
12289	for {
12290		ptr := v_0
12291		idx := v_1
12292		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
12293			break
12294		}
12295		mem := v_3
12296		v.reset(OpARM64MOVWstorezeroidx4)
12297		v.AddArg3(ptr, idx, mem)
12298		return true
12299	}
12300	// match: (MOVWstoreidx4 ptr idx (MOVWreg x) mem)
12301	// result: (MOVWstoreidx4 ptr idx x mem)
12302	for {
12303		ptr := v_0
12304		idx := v_1
12305		if v_2.Op != OpARM64MOVWreg {
12306			break
12307		}
12308		x := v_2.Args[0]
12309		mem := v_3
12310		v.reset(OpARM64MOVWstoreidx4)
12311		v.AddArg4(ptr, idx, x, mem)
12312		return true
12313	}
12314	// match: (MOVWstoreidx4 ptr idx (MOVWUreg x) mem)
12315	// result: (MOVWstoreidx4 ptr idx x mem)
12316	for {
12317		ptr := v_0
12318		idx := v_1
12319		if v_2.Op != OpARM64MOVWUreg {
12320			break
12321		}
12322		x := v_2.Args[0]
12323		mem := v_3
12324		v.reset(OpARM64MOVWstoreidx4)
12325		v.AddArg4(ptr, idx, x, mem)
12326		return true
12327	}
12328	return false
12329}
12330func rewriteValueARM64_OpARM64MOVWstorezero(v *Value) bool {
12331	v_1 := v.Args[1]
12332	v_0 := v.Args[0]
12333	b := v.Block
12334	config := b.Func.Config
12335	// match: (MOVWstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
12336	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
12337	// result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem)
12338	for {
12339		off1 := auxIntToInt32(v.AuxInt)
12340		sym := auxToSym(v.Aux)
12341		if v_0.Op != OpARM64ADDconst {
12342			break
12343		}
12344		off2 := auxIntToInt64(v_0.AuxInt)
12345		ptr := v_0.Args[0]
12346		mem := v_1
12347		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
12348			break
12349		}
12350		v.reset(OpARM64MOVWstorezero)
12351		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
12352		v.Aux = symToAux(sym)
12353		v.AddArg2(ptr, mem)
12354		return true
12355	}
12356	// match: (MOVWstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
12357	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
12358	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
12359	for {
12360		off1 := auxIntToInt32(v.AuxInt)
12361		sym1 := auxToSym(v.Aux)
12362		if v_0.Op != OpARM64MOVDaddr {
12363			break
12364		}
12365		off2 := auxIntToInt32(v_0.AuxInt)
12366		sym2 := auxToSym(v_0.Aux)
12367		ptr := v_0.Args[0]
12368		mem := v_1
12369		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
12370			break
12371		}
12372		v.reset(OpARM64MOVWstorezero)
12373		v.AuxInt = int32ToAuxInt(off1 + off2)
12374		v.Aux = symToAux(mergeSym(sym1, sym2))
12375		v.AddArg2(ptr, mem)
12376		return true
12377	}
12378	// match: (MOVWstorezero [off] {sym} (ADD ptr idx) mem)
12379	// cond: off == 0 && sym == nil
12380	// result: (MOVWstorezeroidx ptr idx mem)
12381	for {
12382		off := auxIntToInt32(v.AuxInt)
12383		sym := auxToSym(v.Aux)
12384		if v_0.Op != OpARM64ADD {
12385			break
12386		}
12387		idx := v_0.Args[1]
12388		ptr := v_0.Args[0]
12389		mem := v_1
12390		if !(off == 0 && sym == nil) {
12391			break
12392		}
12393		v.reset(OpARM64MOVWstorezeroidx)
12394		v.AddArg3(ptr, idx, mem)
12395		return true
12396	}
12397	// match: (MOVWstorezero [off] {sym} (ADDshiftLL [2] ptr idx) mem)
12398	// cond: off == 0 && sym == nil
12399	// result: (MOVWstorezeroidx4 ptr idx mem)
12400	for {
12401		off := auxIntToInt32(v.AuxInt)
12402		sym := auxToSym(v.Aux)
12403		if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
12404			break
12405		}
12406		idx := v_0.Args[1]
12407		ptr := v_0.Args[0]
12408		mem := v_1
12409		if !(off == 0 && sym == nil) {
12410			break
12411		}
12412		v.reset(OpARM64MOVWstorezeroidx4)
12413		v.AddArg3(ptr, idx, mem)
12414		return true
12415	}
12416	return false
12417}
12418func rewriteValueARM64_OpARM64MOVWstorezeroidx(v *Value) bool {
12419	v_2 := v.Args[2]
12420	v_1 := v.Args[1]
12421	v_0 := v.Args[0]
12422	// match: (MOVWstorezeroidx ptr (MOVDconst [c]) mem)
12423	// cond: is32Bit(c)
12424	// result: (MOVWstorezero [int32(c)] ptr mem)
12425	for {
12426		ptr := v_0
12427		if v_1.Op != OpARM64MOVDconst {
12428			break
12429		}
12430		c := auxIntToInt64(v_1.AuxInt)
12431		mem := v_2
12432		if !(is32Bit(c)) {
12433			break
12434		}
12435		v.reset(OpARM64MOVWstorezero)
12436		v.AuxInt = int32ToAuxInt(int32(c))
12437		v.AddArg2(ptr, mem)
12438		return true
12439	}
12440	// match: (MOVWstorezeroidx (MOVDconst [c]) idx mem)
12441	// cond: is32Bit(c)
12442	// result: (MOVWstorezero [int32(c)] idx mem)
12443	for {
12444		if v_0.Op != OpARM64MOVDconst {
12445			break
12446		}
12447		c := auxIntToInt64(v_0.AuxInt)
12448		idx := v_1
12449		mem := v_2
12450		if !(is32Bit(c)) {
12451			break
12452		}
12453		v.reset(OpARM64MOVWstorezero)
12454		v.AuxInt = int32ToAuxInt(int32(c))
12455		v.AddArg2(idx, mem)
12456		return true
12457	}
12458	// match: (MOVWstorezeroidx ptr (SLLconst [2] idx) mem)
12459	// result: (MOVWstorezeroidx4 ptr idx mem)
12460	for {
12461		ptr := v_0
12462		if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
12463			break
12464		}
12465		idx := v_1.Args[0]
12466		mem := v_2
12467		v.reset(OpARM64MOVWstorezeroidx4)
12468		v.AddArg3(ptr, idx, mem)
12469		return true
12470	}
12471	// match: (MOVWstorezeroidx (SLLconst [2] idx) ptr mem)
12472	// result: (MOVWstorezeroidx4 ptr idx mem)
12473	for {
12474		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
12475			break
12476		}
12477		idx := v_0.Args[0]
12478		ptr := v_1
12479		mem := v_2
12480		v.reset(OpARM64MOVWstorezeroidx4)
12481		v.AddArg3(ptr, idx, mem)
12482		return true
12483	}
12484	return false
12485}
12486func rewriteValueARM64_OpARM64MOVWstorezeroidx4(v *Value) bool {
12487	v_2 := v.Args[2]
12488	v_1 := v.Args[1]
12489	v_0 := v.Args[0]
12490	// match: (MOVWstorezeroidx4 ptr (MOVDconst [c]) mem)
12491	// cond: is32Bit(c<<2)
12492	// result: (MOVWstorezero [int32(c<<2)] ptr mem)
12493	for {
12494		ptr := v_0
12495		if v_1.Op != OpARM64MOVDconst {
12496			break
12497		}
12498		c := auxIntToInt64(v_1.AuxInt)
12499		mem := v_2
12500		if !(is32Bit(c << 2)) {
12501			break
12502		}
12503		v.reset(OpARM64MOVWstorezero)
12504		v.AuxInt = int32ToAuxInt(int32(c << 2))
12505		v.AddArg2(ptr, mem)
12506		return true
12507	}
12508	return false
12509}
12510func rewriteValueARM64_OpARM64MSUB(v *Value) bool {
12511	v_2 := v.Args[2]
12512	v_1 := v.Args[1]
12513	v_0 := v.Args[0]
12514	b := v.Block
12515	// match: (MSUB a x (MOVDconst [-1]))
12516	// result: (ADD a x)
12517	for {
12518		a := v_0
12519		x := v_1
12520		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != -1 {
12521			break
12522		}
12523		v.reset(OpARM64ADD)
12524		v.AddArg2(a, x)
12525		return true
12526	}
12527	// match: (MSUB a _ (MOVDconst [0]))
12528	// result: a
12529	for {
12530		a := v_0
12531		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
12532			break
12533		}
12534		v.copyOf(a)
12535		return true
12536	}
12537	// match: (MSUB a x (MOVDconst [1]))
12538	// result: (SUB a x)
12539	for {
12540		a := v_0
12541		x := v_1
12542		if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 1 {
12543			break
12544		}
12545		v.reset(OpARM64SUB)
12546		v.AddArg2(a, x)
12547		return true
12548	}
12549	// match: (MSUB a x (MOVDconst [c]))
12550	// cond: isPowerOfTwo64(c)
12551	// result: (SUBshiftLL a x [log64(c)])
12552	for {
12553		a := v_0
12554		x := v_1
12555		if v_2.Op != OpARM64MOVDconst {
12556			break
12557		}
12558		c := auxIntToInt64(v_2.AuxInt)
12559		if !(isPowerOfTwo64(c)) {
12560			break
12561		}
12562		v.reset(OpARM64SUBshiftLL)
12563		v.AuxInt = int64ToAuxInt(log64(c))
12564		v.AddArg2(a, x)
12565		return true
12566	}
12567	// match: (MSUB a x (MOVDconst [c]))
12568	// cond: isPowerOfTwo64(c-1) && c>=3
12569	// result: (SUB a (ADDshiftLL <x.Type> x x [log64(c-1)]))
12570	for {
12571		a := v_0
12572		x := v_1
12573		if v_2.Op != OpARM64MOVDconst {
12574			break
12575		}
12576		c := auxIntToInt64(v_2.AuxInt)
12577		if !(isPowerOfTwo64(c-1) && c >= 3) {
12578			break
12579		}
12580		v.reset(OpARM64SUB)
12581		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
12582		v0.AuxInt = int64ToAuxInt(log64(c - 1))
12583		v0.AddArg2(x, x)
12584		v.AddArg2(a, v0)
12585		return true
12586	}
12587	// match: (MSUB a x (MOVDconst [c]))
12588	// cond: isPowerOfTwo64(c+1) && c>=7
12589	// result: (ADD a (SUBshiftLL <x.Type> x x [log64(c+1)]))
12590	for {
12591		a := v_0
12592		x := v_1
12593		if v_2.Op != OpARM64MOVDconst {
12594			break
12595		}
12596		c := auxIntToInt64(v_2.AuxInt)
12597		if !(isPowerOfTwo64(c+1) && c >= 7) {
12598			break
12599		}
12600		v.reset(OpARM64ADD)
12601		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
12602		v0.AuxInt = int64ToAuxInt(log64(c + 1))
12603		v0.AddArg2(x, x)
12604		v.AddArg2(a, v0)
12605		return true
12606	}
12607	// match: (MSUB a x (MOVDconst [c]))
12608	// cond: c%3 == 0 && isPowerOfTwo64(c/3)
12609	// result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)])
12610	for {
12611		a := v_0
12612		x := v_1
12613		if v_2.Op != OpARM64MOVDconst {
12614			break
12615		}
12616		c := auxIntToInt64(v_2.AuxInt)
12617		if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
12618			break
12619		}
12620		v.reset(OpARM64ADDshiftLL)
12621		v.AuxInt = int64ToAuxInt(log64(c / 3))
12622		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
12623		v0.AuxInt = int64ToAuxInt(2)
12624		v0.AddArg2(x, x)
12625		v.AddArg2(a, v0)
12626		return true
12627	}
12628	// match: (MSUB a x (MOVDconst [c]))
12629	// cond: c%5 == 0 && isPowerOfTwo64(c/5)
12630	// result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)])
12631	for {
12632		a := v_0
12633		x := v_1
12634		if v_2.Op != OpARM64MOVDconst {
12635			break
12636		}
12637		c := auxIntToInt64(v_2.AuxInt)
12638		if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
12639			break
12640		}
12641		v.reset(OpARM64SUBshiftLL)
12642		v.AuxInt = int64ToAuxInt(log64(c / 5))
12643		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
12644		v0.AuxInt = int64ToAuxInt(2)
12645		v0.AddArg2(x, x)
12646		v.AddArg2(a, v0)
12647		return true
12648	}
12649	// match: (MSUB a x (MOVDconst [c]))
12650	// cond: c%7 == 0 && isPowerOfTwo64(c/7)
12651	// result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)])
12652	for {
12653		a := v_0
12654		x := v_1
12655		if v_2.Op != OpARM64MOVDconst {
12656			break
12657		}
12658		c := auxIntToInt64(v_2.AuxInt)
12659		if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
12660			break
12661		}
12662		v.reset(OpARM64ADDshiftLL)
12663		v.AuxInt = int64ToAuxInt(log64(c / 7))
12664		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
12665		v0.AuxInt = int64ToAuxInt(3)
12666		v0.AddArg2(x, x)
12667		v.AddArg2(a, v0)
12668		return true
12669	}
12670	// match: (MSUB a x (MOVDconst [c]))
12671	// cond: c%9 == 0 && isPowerOfTwo64(c/9)
12672	// result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)])
12673	for {
12674		a := v_0
12675		x := v_1
12676		if v_2.Op != OpARM64MOVDconst {
12677			break
12678		}
12679		c := auxIntToInt64(v_2.AuxInt)
12680		if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
12681			break
12682		}
12683		v.reset(OpARM64SUBshiftLL)
12684		v.AuxInt = int64ToAuxInt(log64(c / 9))
12685		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
12686		v0.AuxInt = int64ToAuxInt(3)
12687		v0.AddArg2(x, x)
12688		v.AddArg2(a, v0)
12689		return true
12690	}
12691	// match: (MSUB a (MOVDconst [-1]) x)
12692	// result: (ADD a x)
12693	for {
12694		a := v_0
12695		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 {
12696			break
12697		}
12698		x := v_2
12699		v.reset(OpARM64ADD)
12700		v.AddArg2(a, x)
12701		return true
12702	}
12703	// match: (MSUB a (MOVDconst [0]) _)
12704	// result: a
12705	for {
12706		a := v_0
12707		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
12708			break
12709		}
12710		v.copyOf(a)
12711		return true
12712	}
12713	// match: (MSUB a (MOVDconst [1]) x)
12714	// result: (SUB a x)
12715	for {
12716		a := v_0
12717		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
12718			break
12719		}
12720		x := v_2
12721		v.reset(OpARM64SUB)
12722		v.AddArg2(a, x)
12723		return true
12724	}
12725	// match: (MSUB a (MOVDconst [c]) x)
12726	// cond: isPowerOfTwo64(c)
12727	// result: (SUBshiftLL a x [log64(c)])
12728	for {
12729		a := v_0
12730		if v_1.Op != OpARM64MOVDconst {
12731			break
12732		}
12733		c := auxIntToInt64(v_1.AuxInt)
12734		x := v_2
12735		if !(isPowerOfTwo64(c)) {
12736			break
12737		}
12738		v.reset(OpARM64SUBshiftLL)
12739		v.AuxInt = int64ToAuxInt(log64(c))
12740		v.AddArg2(a, x)
12741		return true
12742	}
12743	// match: (MSUB a (MOVDconst [c]) x)
12744	// cond: isPowerOfTwo64(c-1) && c>=3
12745	// result: (SUB a (ADDshiftLL <x.Type> x x [log64(c-1)]))
12746	for {
12747		a := v_0
12748		if v_1.Op != OpARM64MOVDconst {
12749			break
12750		}
12751		c := auxIntToInt64(v_1.AuxInt)
12752		x := v_2
12753		if !(isPowerOfTwo64(c-1) && c >= 3) {
12754			break
12755		}
12756		v.reset(OpARM64SUB)
12757		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
12758		v0.AuxInt = int64ToAuxInt(log64(c - 1))
12759		v0.AddArg2(x, x)
12760		v.AddArg2(a, v0)
12761		return true
12762	}
12763	// match: (MSUB a (MOVDconst [c]) x)
12764	// cond: isPowerOfTwo64(c+1) && c>=7
12765	// result: (ADD a (SUBshiftLL <x.Type> x x [log64(c+1)]))
12766	for {
12767		a := v_0
12768		if v_1.Op != OpARM64MOVDconst {
12769			break
12770		}
12771		c := auxIntToInt64(v_1.AuxInt)
12772		x := v_2
12773		if !(isPowerOfTwo64(c+1) && c >= 7) {
12774			break
12775		}
12776		v.reset(OpARM64ADD)
12777		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
12778		v0.AuxInt = int64ToAuxInt(log64(c + 1))
12779		v0.AddArg2(x, x)
12780		v.AddArg2(a, v0)
12781		return true
12782	}
12783	// match: (MSUB a (MOVDconst [c]) x)
12784	// cond: c%3 == 0 && isPowerOfTwo64(c/3)
12785	// result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)])
12786	for {
12787		a := v_0
12788		if v_1.Op != OpARM64MOVDconst {
12789			break
12790		}
12791		c := auxIntToInt64(v_1.AuxInt)
12792		x := v_2
12793		if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
12794			break
12795		}
12796		v.reset(OpARM64ADDshiftLL)
12797		v.AuxInt = int64ToAuxInt(log64(c / 3))
12798		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
12799		v0.AuxInt = int64ToAuxInt(2)
12800		v0.AddArg2(x, x)
12801		v.AddArg2(a, v0)
12802		return true
12803	}
12804	// match: (MSUB a (MOVDconst [c]) x)
12805	// cond: c%5 == 0 && isPowerOfTwo64(c/5)
12806	// result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)])
12807	for {
12808		a := v_0
12809		if v_1.Op != OpARM64MOVDconst {
12810			break
12811		}
12812		c := auxIntToInt64(v_1.AuxInt)
12813		x := v_2
12814		if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
12815			break
12816		}
12817		v.reset(OpARM64SUBshiftLL)
12818		v.AuxInt = int64ToAuxInt(log64(c / 5))
12819		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
12820		v0.AuxInt = int64ToAuxInt(2)
12821		v0.AddArg2(x, x)
12822		v.AddArg2(a, v0)
12823		return true
12824	}
12825	// match: (MSUB a (MOVDconst [c]) x)
12826	// cond: c%7 == 0 && isPowerOfTwo64(c/7)
12827	// result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)])
12828	for {
12829		a := v_0
12830		if v_1.Op != OpARM64MOVDconst {
12831			break
12832		}
12833		c := auxIntToInt64(v_1.AuxInt)
12834		x := v_2
12835		if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
12836			break
12837		}
12838		v.reset(OpARM64ADDshiftLL)
12839		v.AuxInt = int64ToAuxInt(log64(c / 7))
12840		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
12841		v0.AuxInt = int64ToAuxInt(3)
12842		v0.AddArg2(x, x)
12843		v.AddArg2(a, v0)
12844		return true
12845	}
12846	// match: (MSUB a (MOVDconst [c]) x)
12847	// cond: c%9 == 0 && isPowerOfTwo64(c/9)
12848	// result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)])
12849	for {
12850		a := v_0
12851		if v_1.Op != OpARM64MOVDconst {
12852			break
12853		}
12854		c := auxIntToInt64(v_1.AuxInt)
12855		x := v_2
12856		if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
12857			break
12858		}
12859		v.reset(OpARM64SUBshiftLL)
12860		v.AuxInt = int64ToAuxInt(log64(c / 9))
12861		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
12862		v0.AuxInt = int64ToAuxInt(3)
12863		v0.AddArg2(x, x)
12864		v.AddArg2(a, v0)
12865		return true
12866	}
12867	// match: (MSUB (MOVDconst [c]) x y)
12868	// result: (ADDconst [c] (MNEG <x.Type> x y))
12869	for {
12870		if v_0.Op != OpARM64MOVDconst {
12871			break
12872		}
12873		c := auxIntToInt64(v_0.AuxInt)
12874		x := v_1
12875		y := v_2
12876		v.reset(OpARM64ADDconst)
12877		v.AuxInt = int64ToAuxInt(c)
12878		v0 := b.NewValue0(v.Pos, OpARM64MNEG, x.Type)
12879		v0.AddArg2(x, y)
12880		v.AddArg(v0)
12881		return true
12882	}
12883	// match: (MSUB a (MOVDconst [c]) (MOVDconst [d]))
12884	// result: (SUBconst [c*d] a)
12885	for {
12886		a := v_0
12887		if v_1.Op != OpARM64MOVDconst {
12888			break
12889		}
12890		c := auxIntToInt64(v_1.AuxInt)
12891		if v_2.Op != OpARM64MOVDconst {
12892			break
12893		}
12894		d := auxIntToInt64(v_2.AuxInt)
12895		v.reset(OpARM64SUBconst)
12896		v.AuxInt = int64ToAuxInt(c * d)
12897		v.AddArg(a)
12898		return true
12899	}
12900	return false
12901}
12902func rewriteValueARM64_OpARM64MSUBW(v *Value) bool {
12903	v_2 := v.Args[2]
12904	v_1 := v.Args[1]
12905	v_0 := v.Args[0]
12906	b := v.Block
12907	// match: (MSUBW a x (MOVDconst [c]))
12908	// cond: int32(c)==-1
12909	// result: (MOVWUreg (ADD <a.Type> a x))
12910	for {
12911		a := v_0
12912		x := v_1
12913		if v_2.Op != OpARM64MOVDconst {
12914			break
12915		}
12916		c := auxIntToInt64(v_2.AuxInt)
12917		if !(int32(c) == -1) {
12918			break
12919		}
12920		v.reset(OpARM64MOVWUreg)
12921		v0 := b.NewValue0(v.Pos, OpARM64ADD, a.Type)
12922		v0.AddArg2(a, x)
12923		v.AddArg(v0)
12924		return true
12925	}
12926	// match: (MSUBW a _ (MOVDconst [c]))
12927	// cond: int32(c)==0
12928	// result: (MOVWUreg a)
12929	for {
12930		a := v_0
12931		if v_2.Op != OpARM64MOVDconst {
12932			break
12933		}
12934		c := auxIntToInt64(v_2.AuxInt)
12935		if !(int32(c) == 0) {
12936			break
12937		}
12938		v.reset(OpARM64MOVWUreg)
12939		v.AddArg(a)
12940		return true
12941	}
12942	// match: (MSUBW a x (MOVDconst [c]))
12943	// cond: int32(c)==1
12944	// result: (MOVWUreg (SUB <a.Type> a x))
12945	for {
12946		a := v_0
12947		x := v_1
12948		if v_2.Op != OpARM64MOVDconst {
12949			break
12950		}
12951		c := auxIntToInt64(v_2.AuxInt)
12952		if !(int32(c) == 1) {
12953			break
12954		}
12955		v.reset(OpARM64MOVWUreg)
12956		v0 := b.NewValue0(v.Pos, OpARM64SUB, a.Type)
12957		v0.AddArg2(a, x)
12958		v.AddArg(v0)
12959		return true
12960	}
12961	// match: (MSUBW a x (MOVDconst [c]))
12962	// cond: isPowerOfTwo64(c)
12963	// result: (MOVWUreg (SUBshiftLL <a.Type> a x [log64(c)]))
12964	for {
12965		a := v_0
12966		x := v_1
12967		if v_2.Op != OpARM64MOVDconst {
12968			break
12969		}
12970		c := auxIntToInt64(v_2.AuxInt)
12971		if !(isPowerOfTwo64(c)) {
12972			break
12973		}
12974		v.reset(OpARM64MOVWUreg)
12975		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type)
12976		v0.AuxInt = int64ToAuxInt(log64(c))
12977		v0.AddArg2(a, x)
12978		v.AddArg(v0)
12979		return true
12980	}
12981	// match: (MSUBW a x (MOVDconst [c]))
12982	// cond: isPowerOfTwo64(c-1) && int32(c)>=3
12983	// result: (MOVWUreg (SUB <a.Type> a (ADDshiftLL <x.Type> x x [log64(c-1)])))
12984	for {
12985		a := v_0
12986		x := v_1
12987		if v_2.Op != OpARM64MOVDconst {
12988			break
12989		}
12990		c := auxIntToInt64(v_2.AuxInt)
12991		if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
12992			break
12993		}
12994		v.reset(OpARM64MOVWUreg)
12995		v0 := b.NewValue0(v.Pos, OpARM64SUB, a.Type)
12996		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
12997		v1.AuxInt = int64ToAuxInt(log64(c - 1))
12998		v1.AddArg2(x, x)
12999		v0.AddArg2(a, v1)
13000		v.AddArg(v0)
13001		return true
13002	}
13003	// match: (MSUBW a x (MOVDconst [c]))
13004	// cond: isPowerOfTwo64(c+1) && int32(c)>=7
13005	// result: (MOVWUreg (ADD <a.Type> a (SUBshiftLL <x.Type> x x [log64(c+1)])))
13006	for {
13007		a := v_0
13008		x := v_1
13009		if v_2.Op != OpARM64MOVDconst {
13010			break
13011		}
13012		c := auxIntToInt64(v_2.AuxInt)
13013		if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
13014			break
13015		}
13016		v.reset(OpARM64MOVWUreg)
13017		v0 := b.NewValue0(v.Pos, OpARM64ADD, a.Type)
13018		v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
13019		v1.AuxInt = int64ToAuxInt(log64(c + 1))
13020		v1.AddArg2(x, x)
13021		v0.AddArg2(a, v1)
13022		v.AddArg(v0)
13023		return true
13024	}
13025	// match: (MSUBW a x (MOVDconst [c]))
13026	// cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)
13027	// result: (MOVWUreg (ADDshiftLL <a.Type> a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)]))
13028	for {
13029		a := v_0
13030		x := v_1
13031		if v_2.Op != OpARM64MOVDconst {
13032			break
13033		}
13034		c := auxIntToInt64(v_2.AuxInt)
13035		if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
13036			break
13037		}
13038		v.reset(OpARM64MOVWUreg)
13039		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type)
13040		v0.AuxInt = int64ToAuxInt(log64(c / 3))
13041		v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
13042		v1.AuxInt = int64ToAuxInt(2)
13043		v1.AddArg2(x, x)
13044		v0.AddArg2(a, v1)
13045		v.AddArg(v0)
13046		return true
13047	}
13048	// match: (MSUBW a x (MOVDconst [c]))
13049	// cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)
13050	// result: (MOVWUreg (SUBshiftLL <a.Type> a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)]))
13051	for {
13052		a := v_0
13053		x := v_1
13054		if v_2.Op != OpARM64MOVDconst {
13055			break
13056		}
13057		c := auxIntToInt64(v_2.AuxInt)
13058		if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
13059			break
13060		}
13061		v.reset(OpARM64MOVWUreg)
13062		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type)
13063		v0.AuxInt = int64ToAuxInt(log64(c / 5))
13064		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
13065		v1.AuxInt = int64ToAuxInt(2)
13066		v1.AddArg2(x, x)
13067		v0.AddArg2(a, v1)
13068		v.AddArg(v0)
13069		return true
13070	}
13071	// match: (MSUBW a x (MOVDconst [c]))
13072	// cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)
13073	// result: (MOVWUreg (ADDshiftLL <a.Type> a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)]))
13074	for {
13075		a := v_0
13076		x := v_1
13077		if v_2.Op != OpARM64MOVDconst {
13078			break
13079		}
13080		c := auxIntToInt64(v_2.AuxInt)
13081		if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
13082			break
13083		}
13084		v.reset(OpARM64MOVWUreg)
13085		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type)
13086		v0.AuxInt = int64ToAuxInt(log64(c / 7))
13087		v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
13088		v1.AuxInt = int64ToAuxInt(3)
13089		v1.AddArg2(x, x)
13090		v0.AddArg2(a, v1)
13091		v.AddArg(v0)
13092		return true
13093	}
13094	// match: (MSUBW a x (MOVDconst [c]))
13095	// cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)
13096	// result: (MOVWUreg (SUBshiftLL <a.Type> a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)]))
13097	for {
13098		a := v_0
13099		x := v_1
13100		if v_2.Op != OpARM64MOVDconst {
13101			break
13102		}
13103		c := auxIntToInt64(v_2.AuxInt)
13104		if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
13105			break
13106		}
13107		v.reset(OpARM64MOVWUreg)
13108		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type)
13109		v0.AuxInt = int64ToAuxInt(log64(c / 9))
13110		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
13111		v1.AuxInt = int64ToAuxInt(3)
13112		v1.AddArg2(x, x)
13113		v0.AddArg2(a, v1)
13114		v.AddArg(v0)
13115		return true
13116	}
13117	// match: (MSUBW a (MOVDconst [c]) x)
13118	// cond: int32(c)==-1
13119	// result: (MOVWUreg (ADD <a.Type> a x))
13120	for {
13121		a := v_0
13122		if v_1.Op != OpARM64MOVDconst {
13123			break
13124		}
13125		c := auxIntToInt64(v_1.AuxInt)
13126		x := v_2
13127		if !(int32(c) == -1) {
13128			break
13129		}
13130		v.reset(OpARM64MOVWUreg)
13131		v0 := b.NewValue0(v.Pos, OpARM64ADD, a.Type)
13132		v0.AddArg2(a, x)
13133		v.AddArg(v0)
13134		return true
13135	}
13136	// match: (MSUBW a (MOVDconst [c]) _)
13137	// cond: int32(c)==0
13138	// result: (MOVWUreg a)
13139	for {
13140		a := v_0
13141		if v_1.Op != OpARM64MOVDconst {
13142			break
13143		}
13144		c := auxIntToInt64(v_1.AuxInt)
13145		if !(int32(c) == 0) {
13146			break
13147		}
13148		v.reset(OpARM64MOVWUreg)
13149		v.AddArg(a)
13150		return true
13151	}
13152	// match: (MSUBW a (MOVDconst [c]) x)
13153	// cond: int32(c)==1
13154	// result: (MOVWUreg (SUB <a.Type> a x))
13155	for {
13156		a := v_0
13157		if v_1.Op != OpARM64MOVDconst {
13158			break
13159		}
13160		c := auxIntToInt64(v_1.AuxInt)
13161		x := v_2
13162		if !(int32(c) == 1) {
13163			break
13164		}
13165		v.reset(OpARM64MOVWUreg)
13166		v0 := b.NewValue0(v.Pos, OpARM64SUB, a.Type)
13167		v0.AddArg2(a, x)
13168		v.AddArg(v0)
13169		return true
13170	}
13171	// match: (MSUBW a (MOVDconst [c]) x)
13172	// cond: isPowerOfTwo64(c)
13173	// result: (MOVWUreg (SUBshiftLL <a.Type> a x [log64(c)]))
13174	for {
13175		a := v_0
13176		if v_1.Op != OpARM64MOVDconst {
13177			break
13178		}
13179		c := auxIntToInt64(v_1.AuxInt)
13180		x := v_2
13181		if !(isPowerOfTwo64(c)) {
13182			break
13183		}
13184		v.reset(OpARM64MOVWUreg)
13185		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type)
13186		v0.AuxInt = int64ToAuxInt(log64(c))
13187		v0.AddArg2(a, x)
13188		v.AddArg(v0)
13189		return true
13190	}
13191	// match: (MSUBW a (MOVDconst [c]) x)
13192	// cond: isPowerOfTwo64(c-1) && int32(c)>=3
13193	// result: (MOVWUreg (SUB <a.Type> a (ADDshiftLL <x.Type> x x [log64(c-1)])))
13194	for {
13195		a := v_0
13196		if v_1.Op != OpARM64MOVDconst {
13197			break
13198		}
13199		c := auxIntToInt64(v_1.AuxInt)
13200		x := v_2
13201		if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
13202			break
13203		}
13204		v.reset(OpARM64MOVWUreg)
13205		v0 := b.NewValue0(v.Pos, OpARM64SUB, a.Type)
13206		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
13207		v1.AuxInt = int64ToAuxInt(log64(c - 1))
13208		v1.AddArg2(x, x)
13209		v0.AddArg2(a, v1)
13210		v.AddArg(v0)
13211		return true
13212	}
13213	// match: (MSUBW a (MOVDconst [c]) x)
13214	// cond: isPowerOfTwo64(c+1) && int32(c)>=7
13215	// result: (MOVWUreg (ADD <a.Type> a (SUBshiftLL <x.Type> x x [log64(c+1)])))
13216	for {
13217		a := v_0
13218		if v_1.Op != OpARM64MOVDconst {
13219			break
13220		}
13221		c := auxIntToInt64(v_1.AuxInt)
13222		x := v_2
13223		if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
13224			break
13225		}
13226		v.reset(OpARM64MOVWUreg)
13227		v0 := b.NewValue0(v.Pos, OpARM64ADD, a.Type)
13228		v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
13229		v1.AuxInt = int64ToAuxInt(log64(c + 1))
13230		v1.AddArg2(x, x)
13231		v0.AddArg2(a, v1)
13232		v.AddArg(v0)
13233		return true
13234	}
13235	// match: (MSUBW a (MOVDconst [c]) x)
13236	// cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)
13237	// result: (MOVWUreg (ADDshiftLL <a.Type> a (SUBshiftLL <x.Type> x x [2]) [log64(c/3)]))
13238	for {
13239		a := v_0
13240		if v_1.Op != OpARM64MOVDconst {
13241			break
13242		}
13243		c := auxIntToInt64(v_1.AuxInt)
13244		x := v_2
13245		if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
13246			break
13247		}
13248		v.reset(OpARM64MOVWUreg)
13249		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type)
13250		v0.AuxInt = int64ToAuxInt(log64(c / 3))
13251		v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
13252		v1.AuxInt = int64ToAuxInt(2)
13253		v1.AddArg2(x, x)
13254		v0.AddArg2(a, v1)
13255		v.AddArg(v0)
13256		return true
13257	}
13258	// match: (MSUBW a (MOVDconst [c]) x)
13259	// cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)
13260	// result: (MOVWUreg (SUBshiftLL <a.Type> a (ADDshiftLL <x.Type> x x [2]) [log64(c/5)]))
13261	for {
13262		a := v_0
13263		if v_1.Op != OpARM64MOVDconst {
13264			break
13265		}
13266		c := auxIntToInt64(v_1.AuxInt)
13267		x := v_2
13268		if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
13269			break
13270		}
13271		v.reset(OpARM64MOVWUreg)
13272		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type)
13273		v0.AuxInt = int64ToAuxInt(log64(c / 5))
13274		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
13275		v1.AuxInt = int64ToAuxInt(2)
13276		v1.AddArg2(x, x)
13277		v0.AddArg2(a, v1)
13278		v.AddArg(v0)
13279		return true
13280	}
13281	// match: (MSUBW a (MOVDconst [c]) x)
13282	// cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)
13283	// result: (MOVWUreg (ADDshiftLL <a.Type> a (SUBshiftLL <x.Type> x x [3]) [log64(c/7)]))
13284	for {
13285		a := v_0
13286		if v_1.Op != OpARM64MOVDconst {
13287			break
13288		}
13289		c := auxIntToInt64(v_1.AuxInt)
13290		x := v_2
13291		if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
13292			break
13293		}
13294		v.reset(OpARM64MOVWUreg)
13295		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, a.Type)
13296		v0.AuxInt = int64ToAuxInt(log64(c / 7))
13297		v1 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
13298		v1.AuxInt = int64ToAuxInt(3)
13299		v1.AddArg2(x, x)
13300		v0.AddArg2(a, v1)
13301		v.AddArg(v0)
13302		return true
13303	}
13304	// match: (MSUBW a (MOVDconst [c]) x)
13305	// cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)
13306	// result: (MOVWUreg (SUBshiftLL <a.Type> a (ADDshiftLL <x.Type> x x [3]) [log64(c/9)]))
13307	for {
13308		a := v_0
13309		if v_1.Op != OpARM64MOVDconst {
13310			break
13311		}
13312		c := auxIntToInt64(v_1.AuxInt)
13313		x := v_2
13314		if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
13315			break
13316		}
13317		v.reset(OpARM64MOVWUreg)
13318		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, a.Type)
13319		v0.AuxInt = int64ToAuxInt(log64(c / 9))
13320		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
13321		v1.AuxInt = int64ToAuxInt(3)
13322		v1.AddArg2(x, x)
13323		v0.AddArg2(a, v1)
13324		v.AddArg(v0)
13325		return true
13326	}
13327	// match: (MSUBW (MOVDconst [c]) x y)
13328	// result: (MOVWUreg (ADDconst <x.Type> [c] (MNEGW <x.Type> x y)))
13329	for {
13330		if v_0.Op != OpARM64MOVDconst {
13331			break
13332		}
13333		c := auxIntToInt64(v_0.AuxInt)
13334		x := v_1
13335		y := v_2
13336		v.reset(OpARM64MOVWUreg)
13337		v0 := b.NewValue0(v.Pos, OpARM64ADDconst, x.Type)
13338		v0.AuxInt = int64ToAuxInt(c)
13339		v1 := b.NewValue0(v.Pos, OpARM64MNEGW, x.Type)
13340		v1.AddArg2(x, y)
13341		v0.AddArg(v1)
13342		v.AddArg(v0)
13343		return true
13344	}
13345	// match: (MSUBW a (MOVDconst [c]) (MOVDconst [d]))
13346	// result: (MOVWUreg (SUBconst <a.Type> [c*d] a))
13347	for {
13348		a := v_0
13349		if v_1.Op != OpARM64MOVDconst {
13350			break
13351		}
13352		c := auxIntToInt64(v_1.AuxInt)
13353		if v_2.Op != OpARM64MOVDconst {
13354			break
13355		}
13356		d := auxIntToInt64(v_2.AuxInt)
13357		v.reset(OpARM64MOVWUreg)
13358		v0 := b.NewValue0(v.Pos, OpARM64SUBconst, a.Type)
13359		v0.AuxInt = int64ToAuxInt(c * d)
13360		v0.AddArg(a)
13361		v.AddArg(v0)
13362		return true
13363	}
13364	return false
13365}
13366func rewriteValueARM64_OpARM64MUL(v *Value) bool {
13367	v_1 := v.Args[1]
13368	v_0 := v.Args[0]
13369	b := v.Block
13370	// match: (MUL (NEG x) y)
13371	// result: (MNEG x y)
13372	for {
13373		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13374			if v_0.Op != OpARM64NEG {
13375				continue
13376			}
13377			x := v_0.Args[0]
13378			y := v_1
13379			v.reset(OpARM64MNEG)
13380			v.AddArg2(x, y)
13381			return true
13382		}
13383		break
13384	}
13385	// match: (MUL x (MOVDconst [-1]))
13386	// result: (NEG x)
13387	for {
13388		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13389			x := v_0
13390			if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 {
13391				continue
13392			}
13393			v.reset(OpARM64NEG)
13394			v.AddArg(x)
13395			return true
13396		}
13397		break
13398	}
13399	// match: (MUL _ (MOVDconst [0]))
13400	// result: (MOVDconst [0])
13401	for {
13402		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13403			if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
13404				continue
13405			}
13406			v.reset(OpARM64MOVDconst)
13407			v.AuxInt = int64ToAuxInt(0)
13408			return true
13409		}
13410		break
13411	}
13412	// match: (MUL x (MOVDconst [1]))
13413	// result: x
13414	for {
13415		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13416			x := v_0
13417			if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
13418				continue
13419			}
13420			v.copyOf(x)
13421			return true
13422		}
13423		break
13424	}
13425	// match: (MUL x (MOVDconst [c]))
13426	// cond: isPowerOfTwo64(c)
13427	// result: (SLLconst [log64(c)] x)
13428	for {
13429		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13430			x := v_0
13431			if v_1.Op != OpARM64MOVDconst {
13432				continue
13433			}
13434			c := auxIntToInt64(v_1.AuxInt)
13435			if !(isPowerOfTwo64(c)) {
13436				continue
13437			}
13438			v.reset(OpARM64SLLconst)
13439			v.AuxInt = int64ToAuxInt(log64(c))
13440			v.AddArg(x)
13441			return true
13442		}
13443		break
13444	}
13445	// match: (MUL x (MOVDconst [c]))
13446	// cond: isPowerOfTwo64(c-1) && c >= 3
13447	// result: (ADDshiftLL x x [log64(c-1)])
13448	for {
13449		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13450			x := v_0
13451			if v_1.Op != OpARM64MOVDconst {
13452				continue
13453			}
13454			c := auxIntToInt64(v_1.AuxInt)
13455			if !(isPowerOfTwo64(c-1) && c >= 3) {
13456				continue
13457			}
13458			v.reset(OpARM64ADDshiftLL)
13459			v.AuxInt = int64ToAuxInt(log64(c - 1))
13460			v.AddArg2(x, x)
13461			return true
13462		}
13463		break
13464	}
13465	// match: (MUL x (MOVDconst [c]))
13466	// cond: isPowerOfTwo64(c+1) && c >= 7
13467	// result: (ADDshiftLL (NEG <x.Type> x) x [log64(c+1)])
13468	for {
13469		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13470			x := v_0
13471			if v_1.Op != OpARM64MOVDconst {
13472				continue
13473			}
13474			c := auxIntToInt64(v_1.AuxInt)
13475			if !(isPowerOfTwo64(c+1) && c >= 7) {
13476				continue
13477			}
13478			v.reset(OpARM64ADDshiftLL)
13479			v.AuxInt = int64ToAuxInt(log64(c + 1))
13480			v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
13481			v0.AddArg(x)
13482			v.AddArg2(v0, x)
13483			return true
13484		}
13485		break
13486	}
13487	// match: (MUL x (MOVDconst [c]))
13488	// cond: c%3 == 0 && isPowerOfTwo64(c/3)
13489	// result: (SLLconst [log64(c/3)] (ADDshiftLL <x.Type> x x [1]))
13490	for {
13491		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13492			x := v_0
13493			if v_1.Op != OpARM64MOVDconst {
13494				continue
13495			}
13496			c := auxIntToInt64(v_1.AuxInt)
13497			if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
13498				continue
13499			}
13500			v.reset(OpARM64SLLconst)
13501			v.AuxInt = int64ToAuxInt(log64(c / 3))
13502			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
13503			v0.AuxInt = int64ToAuxInt(1)
13504			v0.AddArg2(x, x)
13505			v.AddArg(v0)
13506			return true
13507		}
13508		break
13509	}
13510	// match: (MUL x (MOVDconst [c]))
13511	// cond: c%5 == 0 && isPowerOfTwo64(c/5)
13512	// result: (SLLconst [log64(c/5)] (ADDshiftLL <x.Type> x x [2]))
13513	for {
13514		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13515			x := v_0
13516			if v_1.Op != OpARM64MOVDconst {
13517				continue
13518			}
13519			c := auxIntToInt64(v_1.AuxInt)
13520			if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
13521				continue
13522			}
13523			v.reset(OpARM64SLLconst)
13524			v.AuxInt = int64ToAuxInt(log64(c / 5))
13525			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
13526			v0.AuxInt = int64ToAuxInt(2)
13527			v0.AddArg2(x, x)
13528			v.AddArg(v0)
13529			return true
13530		}
13531		break
13532	}
13533	// match: (MUL x (MOVDconst [c]))
13534	// cond: c%7 == 0 && isPowerOfTwo64(c/7)
13535	// result: (SLLconst [log64(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3]))
13536	for {
13537		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13538			x := v_0
13539			if v_1.Op != OpARM64MOVDconst {
13540				continue
13541			}
13542			c := auxIntToInt64(v_1.AuxInt)
13543			if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
13544				continue
13545			}
13546			v.reset(OpARM64SLLconst)
13547			v.AuxInt = int64ToAuxInt(log64(c / 7))
13548			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
13549			v0.AuxInt = int64ToAuxInt(3)
13550			v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
13551			v1.AddArg(x)
13552			v0.AddArg2(v1, x)
13553			v.AddArg(v0)
13554			return true
13555		}
13556		break
13557	}
13558	// match: (MUL x (MOVDconst [c]))
13559	// cond: c%9 == 0 && isPowerOfTwo64(c/9)
13560	// result: (SLLconst [log64(c/9)] (ADDshiftLL <x.Type> x x [3]))
13561	for {
13562		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13563			x := v_0
13564			if v_1.Op != OpARM64MOVDconst {
13565				continue
13566			}
13567			c := auxIntToInt64(v_1.AuxInt)
13568			if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
13569				continue
13570			}
13571			v.reset(OpARM64SLLconst)
13572			v.AuxInt = int64ToAuxInt(log64(c / 9))
13573			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
13574			v0.AuxInt = int64ToAuxInt(3)
13575			v0.AddArg2(x, x)
13576			v.AddArg(v0)
13577			return true
13578		}
13579		break
13580	}
13581	// match: (MUL (MOVDconst [c]) (MOVDconst [d]))
13582	// result: (MOVDconst [c*d])
13583	for {
13584		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13585			if v_0.Op != OpARM64MOVDconst {
13586				continue
13587			}
13588			c := auxIntToInt64(v_0.AuxInt)
13589			if v_1.Op != OpARM64MOVDconst {
13590				continue
13591			}
13592			d := auxIntToInt64(v_1.AuxInt)
13593			v.reset(OpARM64MOVDconst)
13594			v.AuxInt = int64ToAuxInt(c * d)
13595			return true
13596		}
13597		break
13598	}
13599	return false
13600}
13601func rewriteValueARM64_OpARM64MULW(v *Value) bool {
13602	v_1 := v.Args[1]
13603	v_0 := v.Args[0]
13604	b := v.Block
13605	// match: (MULW (NEG x) y)
13606	// result: (MNEGW x y)
13607	for {
13608		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13609			if v_0.Op != OpARM64NEG {
13610				continue
13611			}
13612			x := v_0.Args[0]
13613			y := v_1
13614			v.reset(OpARM64MNEGW)
13615			v.AddArg2(x, y)
13616			return true
13617		}
13618		break
13619	}
13620	// match: (MULW x (MOVDconst [c]))
13621	// cond: int32(c)==-1
13622	// result: (MOVWUreg (NEG <x.Type> x))
13623	for {
13624		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13625			x := v_0
13626			if v_1.Op != OpARM64MOVDconst {
13627				continue
13628			}
13629			c := auxIntToInt64(v_1.AuxInt)
13630			if !(int32(c) == -1) {
13631				continue
13632			}
13633			v.reset(OpARM64MOVWUreg)
13634			v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
13635			v0.AddArg(x)
13636			v.AddArg(v0)
13637			return true
13638		}
13639		break
13640	}
13641	// match: (MULW _ (MOVDconst [c]))
13642	// cond: int32(c)==0
13643	// result: (MOVDconst [0])
13644	for {
13645		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13646			if v_1.Op != OpARM64MOVDconst {
13647				continue
13648			}
13649			c := auxIntToInt64(v_1.AuxInt)
13650			if !(int32(c) == 0) {
13651				continue
13652			}
13653			v.reset(OpARM64MOVDconst)
13654			v.AuxInt = int64ToAuxInt(0)
13655			return true
13656		}
13657		break
13658	}
13659	// match: (MULW x (MOVDconst [c]))
13660	// cond: int32(c)==1
13661	// result: (MOVWUreg x)
13662	for {
13663		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13664			x := v_0
13665			if v_1.Op != OpARM64MOVDconst {
13666				continue
13667			}
13668			c := auxIntToInt64(v_1.AuxInt)
13669			if !(int32(c) == 1) {
13670				continue
13671			}
13672			v.reset(OpARM64MOVWUreg)
13673			v.AddArg(x)
13674			return true
13675		}
13676		break
13677	}
13678	// match: (MULW x (MOVDconst [c]))
13679	// cond: isPowerOfTwo64(c)
13680	// result: (MOVWUreg (SLLconst <x.Type> [log64(c)] x))
13681	for {
13682		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13683			x := v_0
13684			if v_1.Op != OpARM64MOVDconst {
13685				continue
13686			}
13687			c := auxIntToInt64(v_1.AuxInt)
13688			if !(isPowerOfTwo64(c)) {
13689				continue
13690			}
13691			v.reset(OpARM64MOVWUreg)
13692			v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
13693			v0.AuxInt = int64ToAuxInt(log64(c))
13694			v0.AddArg(x)
13695			v.AddArg(v0)
13696			return true
13697		}
13698		break
13699	}
13700	// match: (MULW x (MOVDconst [c]))
13701	// cond: isPowerOfTwo64(c-1) && int32(c) >= 3
13702	// result: (MOVWUreg (ADDshiftLL <x.Type> x x [log64(c-1)]))
13703	for {
13704		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13705			x := v_0
13706			if v_1.Op != OpARM64MOVDconst {
13707				continue
13708			}
13709			c := auxIntToInt64(v_1.AuxInt)
13710			if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
13711				continue
13712			}
13713			v.reset(OpARM64MOVWUreg)
13714			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
13715			v0.AuxInt = int64ToAuxInt(log64(c - 1))
13716			v0.AddArg2(x, x)
13717			v.AddArg(v0)
13718			return true
13719		}
13720		break
13721	}
13722	// match: (MULW x (MOVDconst [c]))
13723	// cond: isPowerOfTwo64(c+1) && int32(c) >= 7
13724	// result: (MOVWUreg (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log64(c+1)]))
13725	for {
13726		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13727			x := v_0
13728			if v_1.Op != OpARM64MOVDconst {
13729				continue
13730			}
13731			c := auxIntToInt64(v_1.AuxInt)
13732			if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
13733				continue
13734			}
13735			v.reset(OpARM64MOVWUreg)
13736			v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
13737			v0.AuxInt = int64ToAuxInt(log64(c + 1))
13738			v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
13739			v1.AddArg(x)
13740			v0.AddArg2(v1, x)
13741			v.AddArg(v0)
13742			return true
13743		}
13744		break
13745	}
13746	// match: (MULW x (MOVDconst [c]))
13747	// cond: c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)
13748	// result: (MOVWUreg (SLLconst <x.Type> [log64(c/3)] (ADDshiftLL <x.Type> x x [1])))
13749	for {
13750		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13751			x := v_0
13752			if v_1.Op != OpARM64MOVDconst {
13753				continue
13754			}
13755			c := auxIntToInt64(v_1.AuxInt)
13756			if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
13757				continue
13758			}
13759			v.reset(OpARM64MOVWUreg)
13760			v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
13761			v0.AuxInt = int64ToAuxInt(log64(c / 3))
13762			v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
13763			v1.AuxInt = int64ToAuxInt(1)
13764			v1.AddArg2(x, x)
13765			v0.AddArg(v1)
13766			v.AddArg(v0)
13767			return true
13768		}
13769		break
13770	}
13771	// match: (MULW x (MOVDconst [c]))
13772	// cond: c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)
13773	// result: (MOVWUreg (SLLconst <x.Type> [log64(c/5)] (ADDshiftLL <x.Type> x x [2])))
13774	for {
13775		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13776			x := v_0
13777			if v_1.Op != OpARM64MOVDconst {
13778				continue
13779			}
13780			c := auxIntToInt64(v_1.AuxInt)
13781			if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
13782				continue
13783			}
13784			v.reset(OpARM64MOVWUreg)
13785			v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
13786			v0.AuxInt = int64ToAuxInt(log64(c / 5))
13787			v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
13788			v1.AuxInt = int64ToAuxInt(2)
13789			v1.AddArg2(x, x)
13790			v0.AddArg(v1)
13791			v.AddArg(v0)
13792			return true
13793		}
13794		break
13795	}
13796	// match: (MULW x (MOVDconst [c]))
13797	// cond: c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)
13798	// result: (MOVWUreg (SLLconst <x.Type> [log64(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])))
13799	for {
13800		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13801			x := v_0
13802			if v_1.Op != OpARM64MOVDconst {
13803				continue
13804			}
13805			c := auxIntToInt64(v_1.AuxInt)
13806			if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
13807				continue
13808			}
13809			v.reset(OpARM64MOVWUreg)
13810			v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
13811			v0.AuxInt = int64ToAuxInt(log64(c / 7))
13812			v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
13813			v1.AuxInt = int64ToAuxInt(3)
13814			v2 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
13815			v2.AddArg(x)
13816			v1.AddArg2(v2, x)
13817			v0.AddArg(v1)
13818			v.AddArg(v0)
13819			return true
13820		}
13821		break
13822	}
13823	// match: (MULW x (MOVDconst [c]))
13824	// cond: c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)
13825	// result: (MOVWUreg (SLLconst <x.Type> [log64(c/9)] (ADDshiftLL <x.Type> x x [3])))
13826	for {
13827		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13828			x := v_0
13829			if v_1.Op != OpARM64MOVDconst {
13830				continue
13831			}
13832			c := auxIntToInt64(v_1.AuxInt)
13833			if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
13834				continue
13835			}
13836			v.reset(OpARM64MOVWUreg)
13837			v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
13838			v0.AuxInt = int64ToAuxInt(log64(c / 9))
13839			v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
13840			v1.AuxInt = int64ToAuxInt(3)
13841			v1.AddArg2(x, x)
13842			v0.AddArg(v1)
13843			v.AddArg(v0)
13844			return true
13845		}
13846		break
13847	}
13848	// match: (MULW (MOVDconst [c]) (MOVDconst [d]))
13849	// result: (MOVDconst [int64(uint32(c*d))])
13850	for {
13851		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
13852			if v_0.Op != OpARM64MOVDconst {
13853				continue
13854			}
13855			c := auxIntToInt64(v_0.AuxInt)
13856			if v_1.Op != OpARM64MOVDconst {
13857				continue
13858			}
13859			d := auxIntToInt64(v_1.AuxInt)
13860			v.reset(OpARM64MOVDconst)
13861			v.AuxInt = int64ToAuxInt(int64(uint32(c * d)))
13862			return true
13863		}
13864		break
13865	}
13866	return false
13867}
13868func rewriteValueARM64_OpARM64MVN(v *Value) bool {
13869	v_0 := v.Args[0]
13870	// match: (MVN (XOR x y))
13871	// result: (EON x y)
13872	for {
13873		if v_0.Op != OpARM64XOR {
13874			break
13875		}
13876		y := v_0.Args[1]
13877		x := v_0.Args[0]
13878		v.reset(OpARM64EON)
13879		v.AddArg2(x, y)
13880		return true
13881	}
13882	// match: (MVN (MOVDconst [c]))
13883	// result: (MOVDconst [^c])
13884	for {
13885		if v_0.Op != OpARM64MOVDconst {
13886			break
13887		}
13888		c := auxIntToInt64(v_0.AuxInt)
13889		v.reset(OpARM64MOVDconst)
13890		v.AuxInt = int64ToAuxInt(^c)
13891		return true
13892	}
13893	// match: (MVN x:(SLLconst [c] y))
13894	// cond: clobberIfDead(x)
13895	// result: (MVNshiftLL [c] y)
13896	for {
13897		x := v_0
13898		if x.Op != OpARM64SLLconst {
13899			break
13900		}
13901		c := auxIntToInt64(x.AuxInt)
13902		y := x.Args[0]
13903		if !(clobberIfDead(x)) {
13904			break
13905		}
13906		v.reset(OpARM64MVNshiftLL)
13907		v.AuxInt = int64ToAuxInt(c)
13908		v.AddArg(y)
13909		return true
13910	}
13911	// match: (MVN x:(SRLconst [c] y))
13912	// cond: clobberIfDead(x)
13913	// result: (MVNshiftRL [c] y)
13914	for {
13915		x := v_0
13916		if x.Op != OpARM64SRLconst {
13917			break
13918		}
13919		c := auxIntToInt64(x.AuxInt)
13920		y := x.Args[0]
13921		if !(clobberIfDead(x)) {
13922			break
13923		}
13924		v.reset(OpARM64MVNshiftRL)
13925		v.AuxInt = int64ToAuxInt(c)
13926		v.AddArg(y)
13927		return true
13928	}
13929	// match: (MVN x:(SRAconst [c] y))
13930	// cond: clobberIfDead(x)
13931	// result: (MVNshiftRA [c] y)
13932	for {
13933		x := v_0
13934		if x.Op != OpARM64SRAconst {
13935			break
13936		}
13937		c := auxIntToInt64(x.AuxInt)
13938		y := x.Args[0]
13939		if !(clobberIfDead(x)) {
13940			break
13941		}
13942		v.reset(OpARM64MVNshiftRA)
13943		v.AuxInt = int64ToAuxInt(c)
13944		v.AddArg(y)
13945		return true
13946	}
13947	// match: (MVN x:(RORconst [c] y))
13948	// cond: clobberIfDead(x)
13949	// result: (MVNshiftRO [c] y)
13950	for {
13951		x := v_0
13952		if x.Op != OpARM64RORconst {
13953			break
13954		}
13955		c := auxIntToInt64(x.AuxInt)
13956		y := x.Args[0]
13957		if !(clobberIfDead(x)) {
13958			break
13959		}
13960		v.reset(OpARM64MVNshiftRO)
13961		v.AuxInt = int64ToAuxInt(c)
13962		v.AddArg(y)
13963		return true
13964	}
13965	return false
13966}
13967func rewriteValueARM64_OpARM64MVNshiftLL(v *Value) bool {
13968	v_0 := v.Args[0]
13969	// match: (MVNshiftLL (MOVDconst [c]) [d])
13970	// result: (MOVDconst [^int64(uint64(c)<<uint64(d))])
13971	for {
13972		d := auxIntToInt64(v.AuxInt)
13973		if v_0.Op != OpARM64MOVDconst {
13974			break
13975		}
13976		c := auxIntToInt64(v_0.AuxInt)
13977		v.reset(OpARM64MOVDconst)
13978		v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d)))
13979		return true
13980	}
13981	return false
13982}
13983func rewriteValueARM64_OpARM64MVNshiftRA(v *Value) bool {
13984	v_0 := v.Args[0]
13985	// match: (MVNshiftRA (MOVDconst [c]) [d])
13986	// result: (MOVDconst [^(c>>uint64(d))])
13987	for {
13988		d := auxIntToInt64(v.AuxInt)
13989		if v_0.Op != OpARM64MOVDconst {
13990			break
13991		}
13992		c := auxIntToInt64(v_0.AuxInt)
13993		v.reset(OpARM64MOVDconst)
13994		v.AuxInt = int64ToAuxInt(^(c >> uint64(d)))
13995		return true
13996	}
13997	return false
13998}
13999func rewriteValueARM64_OpARM64MVNshiftRL(v *Value) bool {
14000	v_0 := v.Args[0]
14001	// match: (MVNshiftRL (MOVDconst [c]) [d])
14002	// result: (MOVDconst [^int64(uint64(c)>>uint64(d))])
14003	for {
14004		d := auxIntToInt64(v.AuxInt)
14005		if v_0.Op != OpARM64MOVDconst {
14006			break
14007		}
14008		c := auxIntToInt64(v_0.AuxInt)
14009		v.reset(OpARM64MOVDconst)
14010		v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d)))
14011		return true
14012	}
14013	return false
14014}
14015func rewriteValueARM64_OpARM64MVNshiftRO(v *Value) bool {
14016	v_0 := v.Args[0]
14017	// match: (MVNshiftRO (MOVDconst [c]) [d])
14018	// result: (MOVDconst [^rotateRight64(c, d)])
14019	for {
14020		d := auxIntToInt64(v.AuxInt)
14021		if v_0.Op != OpARM64MOVDconst {
14022			break
14023		}
14024		c := auxIntToInt64(v_0.AuxInt)
14025		v.reset(OpARM64MOVDconst)
14026		v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
14027		return true
14028	}
14029	return false
14030}
14031func rewriteValueARM64_OpARM64NEG(v *Value) bool {
14032	v_0 := v.Args[0]
14033	// match: (NEG (MUL x y))
14034	// result: (MNEG x y)
14035	for {
14036		if v_0.Op != OpARM64MUL {
14037			break
14038		}
14039		y := v_0.Args[1]
14040		x := v_0.Args[0]
14041		v.reset(OpARM64MNEG)
14042		v.AddArg2(x, y)
14043		return true
14044	}
14045	// match: (NEG (MULW x y))
14046	// cond: v.Type.Size() <= 4
14047	// result: (MNEGW x y)
14048	for {
14049		if v_0.Op != OpARM64MULW {
14050			break
14051		}
14052		y := v_0.Args[1]
14053		x := v_0.Args[0]
14054		if !(v.Type.Size() <= 4) {
14055			break
14056		}
14057		v.reset(OpARM64MNEGW)
14058		v.AddArg2(x, y)
14059		return true
14060	}
14061	// match: (NEG (NEG x))
14062	// result: x
14063	for {
14064		if v_0.Op != OpARM64NEG {
14065			break
14066		}
14067		x := v_0.Args[0]
14068		v.copyOf(x)
14069		return true
14070	}
14071	// match: (NEG (MOVDconst [c]))
14072	// result: (MOVDconst [-c])
14073	for {
14074		if v_0.Op != OpARM64MOVDconst {
14075			break
14076		}
14077		c := auxIntToInt64(v_0.AuxInt)
14078		v.reset(OpARM64MOVDconst)
14079		v.AuxInt = int64ToAuxInt(-c)
14080		return true
14081	}
14082	// match: (NEG x:(SLLconst [c] y))
14083	// cond: clobberIfDead(x)
14084	// result: (NEGshiftLL [c] y)
14085	for {
14086		x := v_0
14087		if x.Op != OpARM64SLLconst {
14088			break
14089		}
14090		c := auxIntToInt64(x.AuxInt)
14091		y := x.Args[0]
14092		if !(clobberIfDead(x)) {
14093			break
14094		}
14095		v.reset(OpARM64NEGshiftLL)
14096		v.AuxInt = int64ToAuxInt(c)
14097		v.AddArg(y)
14098		return true
14099	}
14100	// match: (NEG x:(SRLconst [c] y))
14101	// cond: clobberIfDead(x)
14102	// result: (NEGshiftRL [c] y)
14103	for {
14104		x := v_0
14105		if x.Op != OpARM64SRLconst {
14106			break
14107		}
14108		c := auxIntToInt64(x.AuxInt)
14109		y := x.Args[0]
14110		if !(clobberIfDead(x)) {
14111			break
14112		}
14113		v.reset(OpARM64NEGshiftRL)
14114		v.AuxInt = int64ToAuxInt(c)
14115		v.AddArg(y)
14116		return true
14117	}
14118	// match: (NEG x:(SRAconst [c] y))
14119	// cond: clobberIfDead(x)
14120	// result: (NEGshiftRA [c] y)
14121	for {
14122		x := v_0
14123		if x.Op != OpARM64SRAconst {
14124			break
14125		}
14126		c := auxIntToInt64(x.AuxInt)
14127		y := x.Args[0]
14128		if !(clobberIfDead(x)) {
14129			break
14130		}
14131		v.reset(OpARM64NEGshiftRA)
14132		v.AuxInt = int64ToAuxInt(c)
14133		v.AddArg(y)
14134		return true
14135	}
14136	return false
14137}
14138func rewriteValueARM64_OpARM64NEGshiftLL(v *Value) bool {
14139	v_0 := v.Args[0]
14140	// match: (NEGshiftLL (MOVDconst [c]) [d])
14141	// result: (MOVDconst [-int64(uint64(c)<<uint64(d))])
14142	for {
14143		d := auxIntToInt64(v.AuxInt)
14144		if v_0.Op != OpARM64MOVDconst {
14145			break
14146		}
14147		c := auxIntToInt64(v_0.AuxInt)
14148		v.reset(OpARM64MOVDconst)
14149		v.AuxInt = int64ToAuxInt(-int64(uint64(c) << uint64(d)))
14150		return true
14151	}
14152	return false
14153}
14154func rewriteValueARM64_OpARM64NEGshiftRA(v *Value) bool {
14155	v_0 := v.Args[0]
14156	// match: (NEGshiftRA (MOVDconst [c]) [d])
14157	// result: (MOVDconst [-(c>>uint64(d))])
14158	for {
14159		d := auxIntToInt64(v.AuxInt)
14160		if v_0.Op != OpARM64MOVDconst {
14161			break
14162		}
14163		c := auxIntToInt64(v_0.AuxInt)
14164		v.reset(OpARM64MOVDconst)
14165		v.AuxInt = int64ToAuxInt(-(c >> uint64(d)))
14166		return true
14167	}
14168	return false
14169}
14170func rewriteValueARM64_OpARM64NEGshiftRL(v *Value) bool {
14171	v_0 := v.Args[0]
14172	// match: (NEGshiftRL (MOVDconst [c]) [d])
14173	// result: (MOVDconst [-int64(uint64(c)>>uint64(d))])
14174	for {
14175		d := auxIntToInt64(v.AuxInt)
14176		if v_0.Op != OpARM64MOVDconst {
14177			break
14178		}
14179		c := auxIntToInt64(v_0.AuxInt)
14180		v.reset(OpARM64MOVDconst)
14181		v.AuxInt = int64ToAuxInt(-int64(uint64(c) >> uint64(d)))
14182		return true
14183	}
14184	return false
14185}
14186func rewriteValueARM64_OpARM64NotEqual(v *Value) bool {
14187	v_0 := v.Args[0]
14188	b := v.Block
14189	// match: (NotEqual (CMPconst [0] z:(AND x y)))
14190	// cond: z.Uses == 1
14191	// result: (NotEqual (TST x y))
14192	for {
14193		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
14194			break
14195		}
14196		z := v_0.Args[0]
14197		if z.Op != OpARM64AND {
14198			break
14199		}
14200		y := z.Args[1]
14201		x := z.Args[0]
14202		if !(z.Uses == 1) {
14203			break
14204		}
14205		v.reset(OpARM64NotEqual)
14206		v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
14207		v0.AddArg2(x, y)
14208		v.AddArg(v0)
14209		return true
14210	}
14211	// match: (NotEqual (CMPWconst [0] x:(ANDconst [c] y)))
14212	// cond: x.Uses == 1
14213	// result: (NotEqual (TSTWconst [int32(c)] y))
14214	for {
14215		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
14216			break
14217		}
14218		x := v_0.Args[0]
14219		if x.Op != OpARM64ANDconst {
14220			break
14221		}
14222		c := auxIntToInt64(x.AuxInt)
14223		y := x.Args[0]
14224		if !(x.Uses == 1) {
14225			break
14226		}
14227		v.reset(OpARM64NotEqual)
14228		v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
14229		v0.AuxInt = int32ToAuxInt(int32(c))
14230		v0.AddArg(y)
14231		v.AddArg(v0)
14232		return true
14233	}
14234	// match: (NotEqual (CMPWconst [0] z:(AND x y)))
14235	// cond: z.Uses == 1
14236	// result: (NotEqual (TSTW x y))
14237	for {
14238		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
14239			break
14240		}
14241		z := v_0.Args[0]
14242		if z.Op != OpARM64AND {
14243			break
14244		}
14245		y := z.Args[1]
14246		x := z.Args[0]
14247		if !(z.Uses == 1) {
14248			break
14249		}
14250		v.reset(OpARM64NotEqual)
14251		v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
14252		v0.AddArg2(x, y)
14253		v.AddArg(v0)
14254		return true
14255	}
14256	// match: (NotEqual (CMPconst [0] x:(ANDconst [c] y)))
14257	// cond: x.Uses == 1
14258	// result: (NotEqual (TSTconst [c] y))
14259	for {
14260		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
14261			break
14262		}
14263		x := v_0.Args[0]
14264		if x.Op != OpARM64ANDconst {
14265			break
14266		}
14267		c := auxIntToInt64(x.AuxInt)
14268		y := x.Args[0]
14269		if !(x.Uses == 1) {
14270			break
14271		}
14272		v.reset(OpARM64NotEqual)
14273		v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
14274		v0.AuxInt = int64ToAuxInt(c)
14275		v0.AddArg(y)
14276		v.AddArg(v0)
14277		return true
14278	}
14279	// match: (NotEqual (CMP x z:(NEG y)))
14280	// cond: z.Uses == 1
14281	// result: (NotEqual (CMN x y))
14282	for {
14283		if v_0.Op != OpARM64CMP {
14284			break
14285		}
14286		_ = v_0.Args[1]
14287		x := v_0.Args[0]
14288		z := v_0.Args[1]
14289		if z.Op != OpARM64NEG {
14290			break
14291		}
14292		y := z.Args[0]
14293		if !(z.Uses == 1) {
14294			break
14295		}
14296		v.reset(OpARM64NotEqual)
14297		v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
14298		v0.AddArg2(x, y)
14299		v.AddArg(v0)
14300		return true
14301	}
14302	// match: (NotEqual (CMPW x z:(NEG y)))
14303	// cond: z.Uses == 1
14304	// result: (NotEqual (CMNW x y))
14305	for {
14306		if v_0.Op != OpARM64CMPW {
14307			break
14308		}
14309		_ = v_0.Args[1]
14310		x := v_0.Args[0]
14311		z := v_0.Args[1]
14312		if z.Op != OpARM64NEG {
14313			break
14314		}
14315		y := z.Args[0]
14316		if !(z.Uses == 1) {
14317			break
14318		}
14319		v.reset(OpARM64NotEqual)
14320		v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
14321		v0.AddArg2(x, y)
14322		v.AddArg(v0)
14323		return true
14324	}
14325	// match: (NotEqual (CMPconst [0] x:(ADDconst [c] y)))
14326	// cond: x.Uses == 1
14327	// result: (NotEqual (CMNconst [c] y))
14328	for {
14329		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
14330			break
14331		}
14332		x := v_0.Args[0]
14333		if x.Op != OpARM64ADDconst {
14334			break
14335		}
14336		c := auxIntToInt64(x.AuxInt)
14337		y := x.Args[0]
14338		if !(x.Uses == 1) {
14339			break
14340		}
14341		v.reset(OpARM64NotEqual)
14342		v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags)
14343		v0.AuxInt = int64ToAuxInt(c)
14344		v0.AddArg(y)
14345		v.AddArg(v0)
14346		return true
14347	}
14348	// match: (NotEqual (CMPWconst [0] x:(ADDconst [c] y)))
14349	// cond: x.Uses == 1
14350	// result: (NotEqual (CMNWconst [int32(c)] y))
14351	for {
14352		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
14353			break
14354		}
14355		x := v_0.Args[0]
14356		if x.Op != OpARM64ADDconst {
14357			break
14358		}
14359		c := auxIntToInt64(x.AuxInt)
14360		y := x.Args[0]
14361		if !(x.Uses == 1) {
14362			break
14363		}
14364		v.reset(OpARM64NotEqual)
14365		v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags)
14366		v0.AuxInt = int32ToAuxInt(int32(c))
14367		v0.AddArg(y)
14368		v.AddArg(v0)
14369		return true
14370	}
14371	// match: (NotEqual (CMPconst [0] z:(ADD x y)))
14372	// cond: z.Uses == 1
14373	// result: (NotEqual (CMN x y))
14374	for {
14375		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
14376			break
14377		}
14378		z := v_0.Args[0]
14379		if z.Op != OpARM64ADD {
14380			break
14381		}
14382		y := z.Args[1]
14383		x := z.Args[0]
14384		if !(z.Uses == 1) {
14385			break
14386		}
14387		v.reset(OpARM64NotEqual)
14388		v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
14389		v0.AddArg2(x, y)
14390		v.AddArg(v0)
14391		return true
14392	}
14393	// match: (NotEqual (CMPWconst [0] z:(ADD x y)))
14394	// cond: z.Uses == 1
14395	// result: (NotEqual (CMNW x y))
14396	for {
14397		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
14398			break
14399		}
14400		z := v_0.Args[0]
14401		if z.Op != OpARM64ADD {
14402			break
14403		}
14404		y := z.Args[1]
14405		x := z.Args[0]
14406		if !(z.Uses == 1) {
14407			break
14408		}
14409		v.reset(OpARM64NotEqual)
14410		v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
14411		v0.AddArg2(x, y)
14412		v.AddArg(v0)
14413		return true
14414	}
14415	// match: (NotEqual (CMPconst [0] z:(MADD a x y)))
14416	// cond: z.Uses == 1
14417	// result: (NotEqual (CMN a (MUL <x.Type> x y)))
14418	for {
14419		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
14420			break
14421		}
14422		z := v_0.Args[0]
14423		if z.Op != OpARM64MADD {
14424			break
14425		}
14426		y := z.Args[2]
14427		a := z.Args[0]
14428		x := z.Args[1]
14429		if !(z.Uses == 1) {
14430			break
14431		}
14432		v.reset(OpARM64NotEqual)
14433		v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
14434		v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
14435		v1.AddArg2(x, y)
14436		v0.AddArg2(a, v1)
14437		v.AddArg(v0)
14438		return true
14439	}
14440	// match: (NotEqual (CMPconst [0] z:(MSUB a x y)))
14441	// cond: z.Uses == 1
14442	// result: (NotEqual (CMP a (MUL <x.Type> x y)))
14443	for {
14444		if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
14445			break
14446		}
14447		z := v_0.Args[0]
14448		if z.Op != OpARM64MSUB {
14449			break
14450		}
14451		y := z.Args[2]
14452		a := z.Args[0]
14453		x := z.Args[1]
14454		if !(z.Uses == 1) {
14455			break
14456		}
14457		v.reset(OpARM64NotEqual)
14458		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
14459		v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
14460		v1.AddArg2(x, y)
14461		v0.AddArg2(a, v1)
14462		v.AddArg(v0)
14463		return true
14464	}
14465	// match: (NotEqual (CMPWconst [0] z:(MADDW a x y)))
14466	// cond: z.Uses == 1
14467	// result: (NotEqual (CMNW a (MULW <x.Type> x y)))
14468	for {
14469		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
14470			break
14471		}
14472		z := v_0.Args[0]
14473		if z.Op != OpARM64MADDW {
14474			break
14475		}
14476		y := z.Args[2]
14477		a := z.Args[0]
14478		x := z.Args[1]
14479		if !(z.Uses == 1) {
14480			break
14481		}
14482		v.reset(OpARM64NotEqual)
14483		v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
14484		v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
14485		v1.AddArg2(x, y)
14486		v0.AddArg2(a, v1)
14487		v.AddArg(v0)
14488		return true
14489	}
14490	// match: (NotEqual (CMPWconst [0] z:(MSUBW a x y)))
14491	// cond: z.Uses == 1
14492	// result: (NotEqual (CMPW a (MULW <x.Type> x y)))
14493	for {
14494		if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
14495			break
14496		}
14497		z := v_0.Args[0]
14498		if z.Op != OpARM64MSUBW {
14499			break
14500		}
14501		y := z.Args[2]
14502		a := z.Args[0]
14503		x := z.Args[1]
14504		if !(z.Uses == 1) {
14505			break
14506		}
14507		v.reset(OpARM64NotEqual)
14508		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
14509		v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
14510		v1.AddArg2(x, y)
14511		v0.AddArg2(a, v1)
14512		v.AddArg(v0)
14513		return true
14514	}
14515	// match: (NotEqual (FlagConstant [fc]))
14516	// result: (MOVDconst [b2i(fc.ne())])
14517	for {
14518		if v_0.Op != OpARM64FlagConstant {
14519			break
14520		}
14521		fc := auxIntToFlagConstant(v_0.AuxInt)
14522		v.reset(OpARM64MOVDconst)
14523		v.AuxInt = int64ToAuxInt(b2i(fc.ne()))
14524		return true
14525	}
14526	// match: (NotEqual (InvertFlags x))
14527	// result: (NotEqual x)
14528	for {
14529		if v_0.Op != OpARM64InvertFlags {
14530			break
14531		}
14532		x := v_0.Args[0]
14533		v.reset(OpARM64NotEqual)
14534		v.AddArg(x)
14535		return true
14536	}
14537	return false
14538}
14539func rewriteValueARM64_OpARM64OR(v *Value) bool {
14540	v_1 := v.Args[1]
14541	v_0 := v.Args[0]
14542	// match: (OR x (MOVDconst [c]))
14543	// result: (ORconst [c] x)
14544	for {
14545		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
14546			x := v_0
14547			if v_1.Op != OpARM64MOVDconst {
14548				continue
14549			}
14550			c := auxIntToInt64(v_1.AuxInt)
14551			v.reset(OpARM64ORconst)
14552			v.AuxInt = int64ToAuxInt(c)
14553			v.AddArg(x)
14554			return true
14555		}
14556		break
14557	}
14558	// match: (OR x x)
14559	// result: x
14560	for {
14561		x := v_0
14562		if x != v_1 {
14563			break
14564		}
14565		v.copyOf(x)
14566		return true
14567	}
14568	// match: (OR x (MVN y))
14569	// result: (ORN x y)
14570	for {
14571		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
14572			x := v_0
14573			if v_1.Op != OpARM64MVN {
14574				continue
14575			}
14576			y := v_1.Args[0]
14577			v.reset(OpARM64ORN)
14578			v.AddArg2(x, y)
14579			return true
14580		}
14581		break
14582	}
14583	// match: (OR x0 x1:(SLLconst [c] y))
14584	// cond: clobberIfDead(x1)
14585	// result: (ORshiftLL x0 y [c])
14586	for {
14587		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
14588			x0 := v_0
14589			x1 := v_1
14590			if x1.Op != OpARM64SLLconst {
14591				continue
14592			}
14593			c := auxIntToInt64(x1.AuxInt)
14594			y := x1.Args[0]
14595			if !(clobberIfDead(x1)) {
14596				continue
14597			}
14598			v.reset(OpARM64ORshiftLL)
14599			v.AuxInt = int64ToAuxInt(c)
14600			v.AddArg2(x0, y)
14601			return true
14602		}
14603		break
14604	}
14605	// match: (OR x0 x1:(SRLconst [c] y))
14606	// cond: clobberIfDead(x1)
14607	// result: (ORshiftRL x0 y [c])
14608	for {
14609		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
14610			x0 := v_0
14611			x1 := v_1
14612			if x1.Op != OpARM64SRLconst {
14613				continue
14614			}
14615			c := auxIntToInt64(x1.AuxInt)
14616			y := x1.Args[0]
14617			if !(clobberIfDead(x1)) {
14618				continue
14619			}
14620			v.reset(OpARM64ORshiftRL)
14621			v.AuxInt = int64ToAuxInt(c)
14622			v.AddArg2(x0, y)
14623			return true
14624		}
14625		break
14626	}
14627	// match: (OR x0 x1:(SRAconst [c] y))
14628	// cond: clobberIfDead(x1)
14629	// result: (ORshiftRA x0 y [c])
14630	for {
14631		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
14632			x0 := v_0
14633			x1 := v_1
14634			if x1.Op != OpARM64SRAconst {
14635				continue
14636			}
14637			c := auxIntToInt64(x1.AuxInt)
14638			y := x1.Args[0]
14639			if !(clobberIfDead(x1)) {
14640				continue
14641			}
14642			v.reset(OpARM64ORshiftRA)
14643			v.AuxInt = int64ToAuxInt(c)
14644			v.AddArg2(x0, y)
14645			return true
14646		}
14647		break
14648	}
14649	// match: (OR x0 x1:(RORconst [c] y))
14650	// cond: clobberIfDead(x1)
14651	// result: (ORshiftRO x0 y [c])
14652	for {
14653		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
14654			x0 := v_0
14655			x1 := v_1
14656			if x1.Op != OpARM64RORconst {
14657				continue
14658			}
14659			c := auxIntToInt64(x1.AuxInt)
14660			y := x1.Args[0]
14661			if !(clobberIfDead(x1)) {
14662				continue
14663			}
14664			v.reset(OpARM64ORshiftRO)
14665			v.AuxInt = int64ToAuxInt(c)
14666			v.AddArg2(x0, y)
14667			return true
14668		}
14669		break
14670	}
14671	// match: (OR (UBFIZ [bfc] x) (ANDconst [ac] y))
14672	// cond: ac == ^((1<<uint(bfc.getARM64BFwidth())-1) << uint(bfc.getARM64BFlsb()))
14673	// result: (BFI [bfc] y x)
14674	for {
14675		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
14676			if v_0.Op != OpARM64UBFIZ {
14677				continue
14678			}
14679			bfc := auxIntToArm64BitField(v_0.AuxInt)
14680			x := v_0.Args[0]
14681			if v_1.Op != OpARM64ANDconst {
14682				continue
14683			}
14684			ac := auxIntToInt64(v_1.AuxInt)
14685			y := v_1.Args[0]
14686			if !(ac == ^((1<<uint(bfc.getARM64BFwidth()) - 1) << uint(bfc.getARM64BFlsb()))) {
14687				continue
14688			}
14689			v.reset(OpARM64BFI)
14690			v.AuxInt = arm64BitFieldToAuxInt(bfc)
14691			v.AddArg2(y, x)
14692			return true
14693		}
14694		break
14695	}
14696	// match: (OR (UBFX [bfc] x) (ANDconst [ac] y))
14697	// cond: ac == ^(1<<uint(bfc.getARM64BFwidth())-1)
14698	// result: (BFXIL [bfc] y x)
14699	for {
14700		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
14701			if v_0.Op != OpARM64UBFX {
14702				continue
14703			}
14704			bfc := auxIntToArm64BitField(v_0.AuxInt)
14705			x := v_0.Args[0]
14706			if v_1.Op != OpARM64ANDconst {
14707				continue
14708			}
14709			ac := auxIntToInt64(v_1.AuxInt)
14710			y := v_1.Args[0]
14711			if !(ac == ^(1<<uint(bfc.getARM64BFwidth()) - 1)) {
14712				continue
14713			}
14714			v.reset(OpARM64BFXIL)
14715			v.AuxInt = arm64BitFieldToAuxInt(bfc)
14716			v.AddArg2(y, x)
14717			return true
14718		}
14719		break
14720	}
14721	return false
14722}
14723func rewriteValueARM64_OpARM64ORN(v *Value) bool {
14724	v_1 := v.Args[1]
14725	v_0 := v.Args[0]
14726	// match: (ORN x (MOVDconst [c]))
14727	// result: (ORconst [^c] x)
14728	for {
14729		x := v_0
14730		if v_1.Op != OpARM64MOVDconst {
14731			break
14732		}
14733		c := auxIntToInt64(v_1.AuxInt)
14734		v.reset(OpARM64ORconst)
14735		v.AuxInt = int64ToAuxInt(^c)
14736		v.AddArg(x)
14737		return true
14738	}
14739	// match: (ORN x x)
14740	// result: (MOVDconst [-1])
14741	for {
14742		x := v_0
14743		if x != v_1 {
14744			break
14745		}
14746		v.reset(OpARM64MOVDconst)
14747		v.AuxInt = int64ToAuxInt(-1)
14748		return true
14749	}
14750	// match: (ORN x0 x1:(SLLconst [c] y))
14751	// cond: clobberIfDead(x1)
14752	// result: (ORNshiftLL x0 y [c])
14753	for {
14754		x0 := v_0
14755		x1 := v_1
14756		if x1.Op != OpARM64SLLconst {
14757			break
14758		}
14759		c := auxIntToInt64(x1.AuxInt)
14760		y := x1.Args[0]
14761		if !(clobberIfDead(x1)) {
14762			break
14763		}
14764		v.reset(OpARM64ORNshiftLL)
14765		v.AuxInt = int64ToAuxInt(c)
14766		v.AddArg2(x0, y)
14767		return true
14768	}
14769	// match: (ORN x0 x1:(SRLconst [c] y))
14770	// cond: clobberIfDead(x1)
14771	// result: (ORNshiftRL x0 y [c])
14772	for {
14773		x0 := v_0
14774		x1 := v_1
14775		if x1.Op != OpARM64SRLconst {
14776			break
14777		}
14778		c := auxIntToInt64(x1.AuxInt)
14779		y := x1.Args[0]
14780		if !(clobberIfDead(x1)) {
14781			break
14782		}
14783		v.reset(OpARM64ORNshiftRL)
14784		v.AuxInt = int64ToAuxInt(c)
14785		v.AddArg2(x0, y)
14786		return true
14787	}
14788	// match: (ORN x0 x1:(SRAconst [c] y))
14789	// cond: clobberIfDead(x1)
14790	// result: (ORNshiftRA x0 y [c])
14791	for {
14792		x0 := v_0
14793		x1 := v_1
14794		if x1.Op != OpARM64SRAconst {
14795			break
14796		}
14797		c := auxIntToInt64(x1.AuxInt)
14798		y := x1.Args[0]
14799		if !(clobberIfDead(x1)) {
14800			break
14801		}
14802		v.reset(OpARM64ORNshiftRA)
14803		v.AuxInt = int64ToAuxInt(c)
14804		v.AddArg2(x0, y)
14805		return true
14806	}
14807	// match: (ORN x0 x1:(RORconst [c] y))
14808	// cond: clobberIfDead(x1)
14809	// result: (ORNshiftRO x0 y [c])
14810	for {
14811		x0 := v_0
14812		x1 := v_1
14813		if x1.Op != OpARM64RORconst {
14814			break
14815		}
14816		c := auxIntToInt64(x1.AuxInt)
14817		y := x1.Args[0]
14818		if !(clobberIfDead(x1)) {
14819			break
14820		}
14821		v.reset(OpARM64ORNshiftRO)
14822		v.AuxInt = int64ToAuxInt(c)
14823		v.AddArg2(x0, y)
14824		return true
14825	}
14826	return false
14827}
14828func rewriteValueARM64_OpARM64ORNshiftLL(v *Value) bool {
14829	v_1 := v.Args[1]
14830	v_0 := v.Args[0]
14831	// match: (ORNshiftLL x (MOVDconst [c]) [d])
14832	// result: (ORconst x [^int64(uint64(c)<<uint64(d))])
14833	for {
14834		d := auxIntToInt64(v.AuxInt)
14835		x := v_0
14836		if v_1.Op != OpARM64MOVDconst {
14837			break
14838		}
14839		c := auxIntToInt64(v_1.AuxInt)
14840		v.reset(OpARM64ORconst)
14841		v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d)))
14842		v.AddArg(x)
14843		return true
14844	}
14845	// match: (ORNshiftLL (SLLconst x [c]) x [c])
14846	// result: (MOVDconst [-1])
14847	for {
14848		c := auxIntToInt64(v.AuxInt)
14849		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
14850			break
14851		}
14852		x := v_0.Args[0]
14853		if x != v_1 {
14854			break
14855		}
14856		v.reset(OpARM64MOVDconst)
14857		v.AuxInt = int64ToAuxInt(-1)
14858		return true
14859	}
14860	return false
14861}
14862func rewriteValueARM64_OpARM64ORNshiftRA(v *Value) bool {
14863	v_1 := v.Args[1]
14864	v_0 := v.Args[0]
14865	// match: (ORNshiftRA x (MOVDconst [c]) [d])
14866	// result: (ORconst x [^(c>>uint64(d))])
14867	for {
14868		d := auxIntToInt64(v.AuxInt)
14869		x := v_0
14870		if v_1.Op != OpARM64MOVDconst {
14871			break
14872		}
14873		c := auxIntToInt64(v_1.AuxInt)
14874		v.reset(OpARM64ORconst)
14875		v.AuxInt = int64ToAuxInt(^(c >> uint64(d)))
14876		v.AddArg(x)
14877		return true
14878	}
14879	// match: (ORNshiftRA (SRAconst x [c]) x [c])
14880	// result: (MOVDconst [-1])
14881	for {
14882		c := auxIntToInt64(v.AuxInt)
14883		if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c {
14884			break
14885		}
14886		x := v_0.Args[0]
14887		if x != v_1 {
14888			break
14889		}
14890		v.reset(OpARM64MOVDconst)
14891		v.AuxInt = int64ToAuxInt(-1)
14892		return true
14893	}
14894	return false
14895}
14896func rewriteValueARM64_OpARM64ORNshiftRL(v *Value) bool {
14897	v_1 := v.Args[1]
14898	v_0 := v.Args[0]
14899	// match: (ORNshiftRL x (MOVDconst [c]) [d])
14900	// result: (ORconst x [^int64(uint64(c)>>uint64(d))])
14901	for {
14902		d := auxIntToInt64(v.AuxInt)
14903		x := v_0
14904		if v_1.Op != OpARM64MOVDconst {
14905			break
14906		}
14907		c := auxIntToInt64(v_1.AuxInt)
14908		v.reset(OpARM64ORconst)
14909		v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d)))
14910		v.AddArg(x)
14911		return true
14912	}
14913	// match: (ORNshiftRL (SRLconst x [c]) x [c])
14914	// result: (MOVDconst [-1])
14915	for {
14916		c := auxIntToInt64(v.AuxInt)
14917		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
14918			break
14919		}
14920		x := v_0.Args[0]
14921		if x != v_1 {
14922			break
14923		}
14924		v.reset(OpARM64MOVDconst)
14925		v.AuxInt = int64ToAuxInt(-1)
14926		return true
14927	}
14928	return false
14929}
14930func rewriteValueARM64_OpARM64ORNshiftRO(v *Value) bool {
14931	v_1 := v.Args[1]
14932	v_0 := v.Args[0]
14933	// match: (ORNshiftRO x (MOVDconst [c]) [d])
14934	// result: (ORconst x [^rotateRight64(c, d)])
14935	for {
14936		d := auxIntToInt64(v.AuxInt)
14937		x := v_0
14938		if v_1.Op != OpARM64MOVDconst {
14939			break
14940		}
14941		c := auxIntToInt64(v_1.AuxInt)
14942		v.reset(OpARM64ORconst)
14943		v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
14944		v.AddArg(x)
14945		return true
14946	}
14947	// match: (ORNshiftRO (RORconst x [c]) x [c])
14948	// result: (MOVDconst [-1])
14949	for {
14950		c := auxIntToInt64(v.AuxInt)
14951		if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c {
14952			break
14953		}
14954		x := v_0.Args[0]
14955		if x != v_1 {
14956			break
14957		}
14958		v.reset(OpARM64MOVDconst)
14959		v.AuxInt = int64ToAuxInt(-1)
14960		return true
14961	}
14962	return false
14963}
14964func rewriteValueARM64_OpARM64ORconst(v *Value) bool {
14965	v_0 := v.Args[0]
14966	// match: (ORconst [0] x)
14967	// result: x
14968	for {
14969		if auxIntToInt64(v.AuxInt) != 0 {
14970			break
14971		}
14972		x := v_0
14973		v.copyOf(x)
14974		return true
14975	}
14976	// match: (ORconst [-1] _)
14977	// result: (MOVDconst [-1])
14978	for {
14979		if auxIntToInt64(v.AuxInt) != -1 {
14980			break
14981		}
14982		v.reset(OpARM64MOVDconst)
14983		v.AuxInt = int64ToAuxInt(-1)
14984		return true
14985	}
14986	// match: (ORconst [c] (MOVDconst [d]))
14987	// result: (MOVDconst [c|d])
14988	for {
14989		c := auxIntToInt64(v.AuxInt)
14990		if v_0.Op != OpARM64MOVDconst {
14991			break
14992		}
14993		d := auxIntToInt64(v_0.AuxInt)
14994		v.reset(OpARM64MOVDconst)
14995		v.AuxInt = int64ToAuxInt(c | d)
14996		return true
14997	}
14998	// match: (ORconst [c] (ORconst [d] x))
14999	// result: (ORconst [c|d] x)
15000	for {
15001		c := auxIntToInt64(v.AuxInt)
15002		if v_0.Op != OpARM64ORconst {
15003			break
15004		}
15005		d := auxIntToInt64(v_0.AuxInt)
15006		x := v_0.Args[0]
15007		v.reset(OpARM64ORconst)
15008		v.AuxInt = int64ToAuxInt(c | d)
15009		v.AddArg(x)
15010		return true
15011	}
15012	// match: (ORconst [c1] (ANDconst [c2] x))
15013	// cond: c2|c1 == ^0
15014	// result: (ORconst [c1] x)
15015	for {
15016		c1 := auxIntToInt64(v.AuxInt)
15017		if v_0.Op != OpARM64ANDconst {
15018			break
15019		}
15020		c2 := auxIntToInt64(v_0.AuxInt)
15021		x := v_0.Args[0]
15022		if !(c2|c1 == ^0) {
15023			break
15024		}
15025		v.reset(OpARM64ORconst)
15026		v.AuxInt = int64ToAuxInt(c1)
15027		v.AddArg(x)
15028		return true
15029	}
15030	return false
15031}
15032func rewriteValueARM64_OpARM64ORshiftLL(v *Value) bool {
15033	v_1 := v.Args[1]
15034	v_0 := v.Args[0]
15035	b := v.Block
15036	typ := &b.Func.Config.Types
15037	// match: (ORshiftLL (MOVDconst [c]) x [d])
15038	// result: (ORconst [c] (SLLconst <x.Type> x [d]))
15039	for {
15040		d := auxIntToInt64(v.AuxInt)
15041		if v_0.Op != OpARM64MOVDconst {
15042			break
15043		}
15044		c := auxIntToInt64(v_0.AuxInt)
15045		x := v_1
15046		v.reset(OpARM64ORconst)
15047		v.AuxInt = int64ToAuxInt(c)
15048		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
15049		v0.AuxInt = int64ToAuxInt(d)
15050		v0.AddArg(x)
15051		v.AddArg(v0)
15052		return true
15053	}
15054	// match: (ORshiftLL x (MOVDconst [c]) [d])
15055	// result: (ORconst x [int64(uint64(c)<<uint64(d))])
15056	for {
15057		d := auxIntToInt64(v.AuxInt)
15058		x := v_0
15059		if v_1.Op != OpARM64MOVDconst {
15060			break
15061		}
15062		c := auxIntToInt64(v_1.AuxInt)
15063		v.reset(OpARM64ORconst)
15064		v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
15065		v.AddArg(x)
15066		return true
15067	}
15068	// match: (ORshiftLL y:(SLLconst x [c]) x [c])
15069	// result: y
15070	for {
15071		c := auxIntToInt64(v.AuxInt)
15072		y := v_0
15073		if y.Op != OpARM64SLLconst || auxIntToInt64(y.AuxInt) != c {
15074			break
15075		}
15076		x := y.Args[0]
15077		if x != v_1 {
15078			break
15079		}
15080		v.copyOf(y)
15081		return true
15082	}
15083	// match: (ORshiftLL <typ.UInt16> [8] (UBFX <typ.UInt16> [armBFAuxInt(8, 8)] x) x)
15084	// result: (REV16W x)
15085	for {
15086		if v.Type != typ.UInt16 || auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || v_0.Type != typ.UInt16 || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 8) {
15087			break
15088		}
15089		x := v_0.Args[0]
15090		if x != v_1 {
15091			break
15092		}
15093		v.reset(OpARM64REV16W)
15094		v.AddArg(x)
15095		return true
15096	}
15097	// match: (ORshiftLL [8] (UBFX [armBFAuxInt(8, 24)] (ANDconst [c1] x)) (ANDconst [c2] x))
15098	// cond: uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff
15099	// result: (REV16W x)
15100	for {
15101		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 24) {
15102			break
15103		}
15104		v_0_0 := v_0.Args[0]
15105		if v_0_0.Op != OpARM64ANDconst {
15106			break
15107		}
15108		c1 := auxIntToInt64(v_0_0.AuxInt)
15109		x := v_0_0.Args[0]
15110		if v_1.Op != OpARM64ANDconst {
15111			break
15112		}
15113		c2 := auxIntToInt64(v_1.AuxInt)
15114		if x != v_1.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) {
15115			break
15116		}
15117		v.reset(OpARM64REV16W)
15118		v.AddArg(x)
15119		return true
15120	}
15121	// match: (ORshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x))
15122	// cond: (uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff)
15123	// result: (REV16 x)
15124	for {
15125		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
15126			break
15127		}
15128		v_0_0 := v_0.Args[0]
15129		if v_0_0.Op != OpARM64ANDconst {
15130			break
15131		}
15132		c1 := auxIntToInt64(v_0_0.AuxInt)
15133		x := v_0_0.Args[0]
15134		if v_1.Op != OpARM64ANDconst {
15135			break
15136		}
15137		c2 := auxIntToInt64(v_1.AuxInt)
15138		if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) {
15139			break
15140		}
15141		v.reset(OpARM64REV16)
15142		v.AddArg(x)
15143		return true
15144	}
15145	// match: (ORshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x))
15146	// cond: (uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff)
15147	// result: (REV16 (ANDconst <x.Type> [0xffffffff] x))
15148	for {
15149		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
15150			break
15151		}
15152		v_0_0 := v_0.Args[0]
15153		if v_0_0.Op != OpARM64ANDconst {
15154			break
15155		}
15156		c1 := auxIntToInt64(v_0_0.AuxInt)
15157		x := v_0_0.Args[0]
15158		if v_1.Op != OpARM64ANDconst {
15159			break
15160		}
15161		c2 := auxIntToInt64(v_1.AuxInt)
15162		if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) {
15163			break
15164		}
15165		v.reset(OpARM64REV16)
15166		v0 := b.NewValue0(v.Pos, OpARM64ANDconst, x.Type)
15167		v0.AuxInt = int64ToAuxInt(0xffffffff)
15168		v0.AddArg(x)
15169		v.AddArg(v0)
15170		return true
15171	}
15172	// match: ( ORshiftLL [c] (SRLconst x [64-c]) x2)
15173	// result: (EXTRconst [64-c] x2 x)
15174	for {
15175		c := auxIntToInt64(v.AuxInt)
15176		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 64-c {
15177			break
15178		}
15179		x := v_0.Args[0]
15180		x2 := v_1
15181		v.reset(OpARM64EXTRconst)
15182		v.AuxInt = int64ToAuxInt(64 - c)
15183		v.AddArg2(x2, x)
15184		return true
15185	}
15186	// match: ( ORshiftLL <t> [c] (UBFX [bfc] x) x2)
15187	// cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)
15188	// result: (EXTRWconst [32-c] x2 x)
15189	for {
15190		t := v.Type
15191		c := auxIntToInt64(v.AuxInt)
15192		if v_0.Op != OpARM64UBFX {
15193			break
15194		}
15195		bfc := auxIntToArm64BitField(v_0.AuxInt)
15196		x := v_0.Args[0]
15197		x2 := v_1
15198		if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
15199			break
15200		}
15201		v.reset(OpARM64EXTRWconst)
15202		v.AuxInt = int64ToAuxInt(32 - c)
15203		v.AddArg2(x2, x)
15204		return true
15205	}
15206	// match: (ORshiftLL [sc] (UBFX [bfc] x) (SRLconst [sc] y))
15207	// cond: sc == bfc.getARM64BFwidth()
15208	// result: (BFXIL [bfc] y x)
15209	for {
15210		sc := auxIntToInt64(v.AuxInt)
15211		if v_0.Op != OpARM64UBFX {
15212			break
15213		}
15214		bfc := auxIntToArm64BitField(v_0.AuxInt)
15215		x := v_0.Args[0]
15216		if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != sc {
15217			break
15218		}
15219		y := v_1.Args[0]
15220		if !(sc == bfc.getARM64BFwidth()) {
15221			break
15222		}
15223		v.reset(OpARM64BFXIL)
15224		v.AuxInt = arm64BitFieldToAuxInt(bfc)
15225		v.AddArg2(y, x)
15226		return true
15227	}
15228	return false
15229}
15230func rewriteValueARM64_OpARM64ORshiftRA(v *Value) bool {
15231	v_1 := v.Args[1]
15232	v_0 := v.Args[0]
15233	b := v.Block
15234	// match: (ORshiftRA (MOVDconst [c]) x [d])
15235	// result: (ORconst [c] (SRAconst <x.Type> x [d]))
15236	for {
15237		d := auxIntToInt64(v.AuxInt)
15238		if v_0.Op != OpARM64MOVDconst {
15239			break
15240		}
15241		c := auxIntToInt64(v_0.AuxInt)
15242		x := v_1
15243		v.reset(OpARM64ORconst)
15244		v.AuxInt = int64ToAuxInt(c)
15245		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
15246		v0.AuxInt = int64ToAuxInt(d)
15247		v0.AddArg(x)
15248		v.AddArg(v0)
15249		return true
15250	}
15251	// match: (ORshiftRA x (MOVDconst [c]) [d])
15252	// result: (ORconst x [c>>uint64(d)])
15253	for {
15254		d := auxIntToInt64(v.AuxInt)
15255		x := v_0
15256		if v_1.Op != OpARM64MOVDconst {
15257			break
15258		}
15259		c := auxIntToInt64(v_1.AuxInt)
15260		v.reset(OpARM64ORconst)
15261		v.AuxInt = int64ToAuxInt(c >> uint64(d))
15262		v.AddArg(x)
15263		return true
15264	}
15265	// match: (ORshiftRA y:(SRAconst x [c]) x [c])
15266	// result: y
15267	for {
15268		c := auxIntToInt64(v.AuxInt)
15269		y := v_0
15270		if y.Op != OpARM64SRAconst || auxIntToInt64(y.AuxInt) != c {
15271			break
15272		}
15273		x := y.Args[0]
15274		if x != v_1 {
15275			break
15276		}
15277		v.copyOf(y)
15278		return true
15279	}
15280	return false
15281}
15282func rewriteValueARM64_OpARM64ORshiftRL(v *Value) bool {
15283	v_1 := v.Args[1]
15284	v_0 := v.Args[0]
15285	b := v.Block
15286	// match: (ORshiftRL (MOVDconst [c]) x [d])
15287	// result: (ORconst [c] (SRLconst <x.Type> x [d]))
15288	for {
15289		d := auxIntToInt64(v.AuxInt)
15290		if v_0.Op != OpARM64MOVDconst {
15291			break
15292		}
15293		c := auxIntToInt64(v_0.AuxInt)
15294		x := v_1
15295		v.reset(OpARM64ORconst)
15296		v.AuxInt = int64ToAuxInt(c)
15297		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
15298		v0.AuxInt = int64ToAuxInt(d)
15299		v0.AddArg(x)
15300		v.AddArg(v0)
15301		return true
15302	}
15303	// match: (ORshiftRL x (MOVDconst [c]) [d])
15304	// result: (ORconst x [int64(uint64(c)>>uint64(d))])
15305	for {
15306		d := auxIntToInt64(v.AuxInt)
15307		x := v_0
15308		if v_1.Op != OpARM64MOVDconst {
15309			break
15310		}
15311		c := auxIntToInt64(v_1.AuxInt)
15312		v.reset(OpARM64ORconst)
15313		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
15314		v.AddArg(x)
15315		return true
15316	}
15317	// match: (ORshiftRL y:(SRLconst x [c]) x [c])
15318	// result: y
15319	for {
15320		c := auxIntToInt64(v.AuxInt)
15321		y := v_0
15322		if y.Op != OpARM64SRLconst || auxIntToInt64(y.AuxInt) != c {
15323			break
15324		}
15325		x := y.Args[0]
15326		if x != v_1 {
15327			break
15328		}
15329		v.copyOf(y)
15330		return true
15331	}
15332	// match: (ORshiftRL [rc] (ANDconst [ac] x) (SLLconst [lc] y))
15333	// cond: lc > rc && ac == ^((1<<uint(64-lc)-1) << uint64(lc-rc))
15334	// result: (BFI [armBFAuxInt(lc-rc, 64-lc)] x y)
15335	for {
15336		rc := auxIntToInt64(v.AuxInt)
15337		if v_0.Op != OpARM64ANDconst {
15338			break
15339		}
15340		ac := auxIntToInt64(v_0.AuxInt)
15341		x := v_0.Args[0]
15342		if v_1.Op != OpARM64SLLconst {
15343			break
15344		}
15345		lc := auxIntToInt64(v_1.AuxInt)
15346		y := v_1.Args[0]
15347		if !(lc > rc && ac == ^((1<<uint(64-lc)-1)<<uint64(lc-rc))) {
15348			break
15349		}
15350		v.reset(OpARM64BFI)
15351		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc-rc, 64-lc))
15352		v.AddArg2(x, y)
15353		return true
15354	}
15355	// match: (ORshiftRL [rc] (ANDconst [ac] y) (SLLconst [lc] x))
15356	// cond: lc < rc && ac == ^((1<<uint(64-rc)-1))
15357	// result: (BFXIL [armBFAuxInt(rc-lc, 64-rc)] y x)
15358	for {
15359		rc := auxIntToInt64(v.AuxInt)
15360		if v_0.Op != OpARM64ANDconst {
15361			break
15362		}
15363		ac := auxIntToInt64(v_0.AuxInt)
15364		y := v_0.Args[0]
15365		if v_1.Op != OpARM64SLLconst {
15366			break
15367		}
15368		lc := auxIntToInt64(v_1.AuxInt)
15369		x := v_1.Args[0]
15370		if !(lc < rc && ac == ^(1<<uint(64-rc)-1)) {
15371			break
15372		}
15373		v.reset(OpARM64BFXIL)
15374		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc-lc, 64-rc))
15375		v.AddArg2(y, x)
15376		return true
15377	}
15378	return false
15379}
15380func rewriteValueARM64_OpARM64ORshiftRO(v *Value) bool {
15381	v_1 := v.Args[1]
15382	v_0 := v.Args[0]
15383	b := v.Block
15384	// match: (ORshiftRO (MOVDconst [c]) x [d])
15385	// result: (ORconst [c] (RORconst <x.Type> x [d]))
15386	for {
15387		d := auxIntToInt64(v.AuxInt)
15388		if v_0.Op != OpARM64MOVDconst {
15389			break
15390		}
15391		c := auxIntToInt64(v_0.AuxInt)
15392		x := v_1
15393		v.reset(OpARM64ORconst)
15394		v.AuxInt = int64ToAuxInt(c)
15395		v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type)
15396		v0.AuxInt = int64ToAuxInt(d)
15397		v0.AddArg(x)
15398		v.AddArg(v0)
15399		return true
15400	}
15401	// match: (ORshiftRO x (MOVDconst [c]) [d])
15402	// result: (ORconst x [rotateRight64(c, d)])
15403	for {
15404		d := auxIntToInt64(v.AuxInt)
15405		x := v_0
15406		if v_1.Op != OpARM64MOVDconst {
15407			break
15408		}
15409		c := auxIntToInt64(v_1.AuxInt)
15410		v.reset(OpARM64ORconst)
15411		v.AuxInt = int64ToAuxInt(rotateRight64(c, d))
15412		v.AddArg(x)
15413		return true
15414	}
15415	// match: (ORshiftRO y:(RORconst x [c]) x [c])
15416	// result: y
15417	for {
15418		c := auxIntToInt64(v.AuxInt)
15419		y := v_0
15420		if y.Op != OpARM64RORconst || auxIntToInt64(y.AuxInt) != c {
15421			break
15422		}
15423		x := y.Args[0]
15424		if x != v_1 {
15425			break
15426		}
15427		v.copyOf(y)
15428		return true
15429	}
15430	return false
15431}
15432func rewriteValueARM64_OpARM64REV(v *Value) bool {
15433	v_0 := v.Args[0]
15434	// match: (REV (REV p))
15435	// result: p
15436	for {
15437		if v_0.Op != OpARM64REV {
15438			break
15439		}
15440		p := v_0.Args[0]
15441		v.copyOf(p)
15442		return true
15443	}
15444	return false
15445}
15446func rewriteValueARM64_OpARM64REVW(v *Value) bool {
15447	v_0 := v.Args[0]
15448	// match: (REVW (REVW p))
15449	// result: p
15450	for {
15451		if v_0.Op != OpARM64REVW {
15452			break
15453		}
15454		p := v_0.Args[0]
15455		v.copyOf(p)
15456		return true
15457	}
15458	return false
15459}
15460func rewriteValueARM64_OpARM64ROR(v *Value) bool {
15461	v_1 := v.Args[1]
15462	v_0 := v.Args[0]
15463	// match: (ROR x (MOVDconst [c]))
15464	// result: (RORconst x [c&63])
15465	for {
15466		x := v_0
15467		if v_1.Op != OpARM64MOVDconst {
15468			break
15469		}
15470		c := auxIntToInt64(v_1.AuxInt)
15471		v.reset(OpARM64RORconst)
15472		v.AuxInt = int64ToAuxInt(c & 63)
15473		v.AddArg(x)
15474		return true
15475	}
15476	return false
15477}
15478func rewriteValueARM64_OpARM64RORW(v *Value) bool {
15479	v_1 := v.Args[1]
15480	v_0 := v.Args[0]
15481	// match: (RORW x (MOVDconst [c]))
15482	// result: (RORWconst x [c&31])
15483	for {
15484		x := v_0
15485		if v_1.Op != OpARM64MOVDconst {
15486			break
15487		}
15488		c := auxIntToInt64(v_1.AuxInt)
15489		v.reset(OpARM64RORWconst)
15490		v.AuxInt = int64ToAuxInt(c & 31)
15491		v.AddArg(x)
15492		return true
15493	}
15494	return false
15495}
15496func rewriteValueARM64_OpARM64SBCSflags(v *Value) bool {
15497	v_2 := v.Args[2]
15498	v_1 := v.Args[1]
15499	v_0 := v.Args[0]
15500	b := v.Block
15501	typ := &b.Func.Config.Types
15502	// match: (SBCSflags x y (Select1 <types.TypeFlags> (NEGSflags (NEG <typ.UInt64> (NGCzerocarry <typ.UInt64> bo)))))
15503	// result: (SBCSflags x y bo)
15504	for {
15505		x := v_0
15506		y := v_1
15507		if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags {
15508			break
15509		}
15510		v_2_0 := v_2.Args[0]
15511		if v_2_0.Op != OpARM64NEGSflags {
15512			break
15513		}
15514		v_2_0_0 := v_2_0.Args[0]
15515		if v_2_0_0.Op != OpARM64NEG || v_2_0_0.Type != typ.UInt64 {
15516			break
15517		}
15518		v_2_0_0_0 := v_2_0_0.Args[0]
15519		if v_2_0_0_0.Op != OpARM64NGCzerocarry || v_2_0_0_0.Type != typ.UInt64 {
15520			break
15521		}
15522		bo := v_2_0_0_0.Args[0]
15523		v.reset(OpARM64SBCSflags)
15524		v.AddArg3(x, y, bo)
15525		return true
15526	}
15527	// match: (SBCSflags x y (Select1 <types.TypeFlags> (NEGSflags (MOVDconst [0]))))
15528	// result: (SUBSflags x y)
15529	for {
15530		x := v_0
15531		y := v_1
15532		if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags {
15533			break
15534		}
15535		v_2_0 := v_2.Args[0]
15536		if v_2_0.Op != OpARM64NEGSflags {
15537			break
15538		}
15539		v_2_0_0 := v_2_0.Args[0]
15540		if v_2_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_2_0_0.AuxInt) != 0 {
15541			break
15542		}
15543		v.reset(OpARM64SUBSflags)
15544		v.AddArg2(x, y)
15545		return true
15546	}
15547	return false
15548}
15549func rewriteValueARM64_OpARM64SLL(v *Value) bool {
15550	v_1 := v.Args[1]
15551	v_0 := v.Args[0]
15552	// match: (SLL x (MOVDconst [c]))
15553	// result: (SLLconst x [c&63])
15554	for {
15555		x := v_0
15556		if v_1.Op != OpARM64MOVDconst {
15557			break
15558		}
15559		c := auxIntToInt64(v_1.AuxInt)
15560		v.reset(OpARM64SLLconst)
15561		v.AuxInt = int64ToAuxInt(c & 63)
15562		v.AddArg(x)
15563		return true
15564	}
15565	// match: (SLL x (ANDconst [63] y))
15566	// result: (SLL x y)
15567	for {
15568		x := v_0
15569		if v_1.Op != OpARM64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
15570			break
15571		}
15572		y := v_1.Args[0]
15573		v.reset(OpARM64SLL)
15574		v.AddArg2(x, y)
15575		return true
15576	}
15577	return false
15578}
15579func rewriteValueARM64_OpARM64SLLconst(v *Value) bool {
15580	v_0 := v.Args[0]
15581	// match: (SLLconst [c] (MOVDconst [d]))
15582	// result: (MOVDconst [d<<uint64(c)])
15583	for {
15584		c := auxIntToInt64(v.AuxInt)
15585		if v_0.Op != OpARM64MOVDconst {
15586			break
15587		}
15588		d := auxIntToInt64(v_0.AuxInt)
15589		v.reset(OpARM64MOVDconst)
15590		v.AuxInt = int64ToAuxInt(d << uint64(c))
15591		return true
15592	}
15593	// match: (SLLconst [c] (SRLconst [c] x))
15594	// cond: 0 < c && c < 64
15595	// result: (ANDconst [^(1<<uint(c)-1)] x)
15596	for {
15597		c := auxIntToInt64(v.AuxInt)
15598		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
15599			break
15600		}
15601		x := v_0.Args[0]
15602		if !(0 < c && c < 64) {
15603			break
15604		}
15605		v.reset(OpARM64ANDconst)
15606		v.AuxInt = int64ToAuxInt(^(1<<uint(c) - 1))
15607		v.AddArg(x)
15608		return true
15609	}
15610	// match: (SLLconst [lc] (MOVWreg x))
15611	// result: (SBFIZ [armBFAuxInt(lc, min(32, 64-lc))] x)
15612	for {
15613		lc := auxIntToInt64(v.AuxInt)
15614		if v_0.Op != OpARM64MOVWreg {
15615			break
15616		}
15617		x := v_0.Args[0]
15618		v.reset(OpARM64SBFIZ)
15619		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(32, 64-lc)))
15620		v.AddArg(x)
15621		return true
15622	}
15623	// match: (SLLconst [lc] (MOVHreg x))
15624	// result: (SBFIZ [armBFAuxInt(lc, min(16, 64-lc))] x)
15625	for {
15626		lc := auxIntToInt64(v.AuxInt)
15627		if v_0.Op != OpARM64MOVHreg {
15628			break
15629		}
15630		x := v_0.Args[0]
15631		v.reset(OpARM64SBFIZ)
15632		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(16, 64-lc)))
15633		v.AddArg(x)
15634		return true
15635	}
15636	// match: (SLLconst [lc] (MOVBreg x))
15637	// result: (SBFIZ [armBFAuxInt(lc, min(8, 64-lc))] x)
15638	for {
15639		lc := auxIntToInt64(v.AuxInt)
15640		if v_0.Op != OpARM64MOVBreg {
15641			break
15642		}
15643		x := v_0.Args[0]
15644		v.reset(OpARM64SBFIZ)
15645		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(8, 64-lc)))
15646		v.AddArg(x)
15647		return true
15648	}
15649	// match: (SLLconst [lc] (MOVWUreg x))
15650	// result: (UBFIZ [armBFAuxInt(lc, min(32, 64-lc))] x)
15651	for {
15652		lc := auxIntToInt64(v.AuxInt)
15653		if v_0.Op != OpARM64MOVWUreg {
15654			break
15655		}
15656		x := v_0.Args[0]
15657		v.reset(OpARM64UBFIZ)
15658		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(32, 64-lc)))
15659		v.AddArg(x)
15660		return true
15661	}
15662	// match: (SLLconst [lc] (MOVHUreg x))
15663	// result: (UBFIZ [armBFAuxInt(lc, min(16, 64-lc))] x)
15664	for {
15665		lc := auxIntToInt64(v.AuxInt)
15666		if v_0.Op != OpARM64MOVHUreg {
15667			break
15668		}
15669		x := v_0.Args[0]
15670		v.reset(OpARM64UBFIZ)
15671		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(16, 64-lc)))
15672		v.AddArg(x)
15673		return true
15674	}
15675	// match: (SLLconst [lc] (MOVBUreg x))
15676	// result: (UBFIZ [armBFAuxInt(lc, min(8, 64-lc))] x)
15677	for {
15678		lc := auxIntToInt64(v.AuxInt)
15679		if v_0.Op != OpARM64MOVBUreg {
15680			break
15681		}
15682		x := v_0.Args[0]
15683		v.reset(OpARM64UBFIZ)
15684		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(8, 64-lc)))
15685		v.AddArg(x)
15686		return true
15687	}
15688	// match: (SLLconst [sc] (ANDconst [ac] x))
15689	// cond: isARM64BFMask(sc, ac, 0)
15690	// result: (UBFIZ [armBFAuxInt(sc, arm64BFWidth(ac, 0))] x)
15691	for {
15692		sc := auxIntToInt64(v.AuxInt)
15693		if v_0.Op != OpARM64ANDconst {
15694			break
15695		}
15696		ac := auxIntToInt64(v_0.AuxInt)
15697		x := v_0.Args[0]
15698		if !(isARM64BFMask(sc, ac, 0)) {
15699			break
15700		}
15701		v.reset(OpARM64UBFIZ)
15702		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, 0)))
15703		v.AddArg(x)
15704		return true
15705	}
15706	// match: (SLLconst [sc] (UBFIZ [bfc] x))
15707	// cond: sc+bfc.getARM64BFwidth()+bfc.getARM64BFlsb() < 64
15708	// result: (UBFIZ [armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth())] x)
15709	for {
15710		sc := auxIntToInt64(v.AuxInt)
15711		if v_0.Op != OpARM64UBFIZ {
15712			break
15713		}
15714		bfc := auxIntToArm64BitField(v_0.AuxInt)
15715		x := v_0.Args[0]
15716		if !(sc+bfc.getARM64BFwidth()+bfc.getARM64BFlsb() < 64) {
15717			break
15718		}
15719		v.reset(OpARM64UBFIZ)
15720		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()))
15721		v.AddArg(x)
15722		return true
15723	}
15724	return false
15725}
15726func rewriteValueARM64_OpARM64SRA(v *Value) bool {
15727	v_1 := v.Args[1]
15728	v_0 := v.Args[0]
15729	// match: (SRA x (MOVDconst [c]))
15730	// result: (SRAconst x [c&63])
15731	for {
15732		x := v_0
15733		if v_1.Op != OpARM64MOVDconst {
15734			break
15735		}
15736		c := auxIntToInt64(v_1.AuxInt)
15737		v.reset(OpARM64SRAconst)
15738		v.AuxInt = int64ToAuxInt(c & 63)
15739		v.AddArg(x)
15740		return true
15741	}
15742	// match: (SRA x (ANDconst [63] y))
15743	// result: (SRA x y)
15744	for {
15745		x := v_0
15746		if v_1.Op != OpARM64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
15747			break
15748		}
15749		y := v_1.Args[0]
15750		v.reset(OpARM64SRA)
15751		v.AddArg2(x, y)
15752		return true
15753	}
15754	return false
15755}
15756func rewriteValueARM64_OpARM64SRAconst(v *Value) bool {
15757	v_0 := v.Args[0]
15758	// match: (SRAconst [c] (MOVDconst [d]))
15759	// result: (MOVDconst [d>>uint64(c)])
15760	for {
15761		c := auxIntToInt64(v.AuxInt)
15762		if v_0.Op != OpARM64MOVDconst {
15763			break
15764		}
15765		d := auxIntToInt64(v_0.AuxInt)
15766		v.reset(OpARM64MOVDconst)
15767		v.AuxInt = int64ToAuxInt(d >> uint64(c))
15768		return true
15769	}
15770	// match: (SRAconst [rc] (SLLconst [lc] x))
15771	// cond: lc > rc
15772	// result: (SBFIZ [armBFAuxInt(lc-rc, 64-lc)] x)
15773	for {
15774		rc := auxIntToInt64(v.AuxInt)
15775		if v_0.Op != OpARM64SLLconst {
15776			break
15777		}
15778		lc := auxIntToInt64(v_0.AuxInt)
15779		x := v_0.Args[0]
15780		if !(lc > rc) {
15781			break
15782		}
15783		v.reset(OpARM64SBFIZ)
15784		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc-rc, 64-lc))
15785		v.AddArg(x)
15786		return true
15787	}
15788	// match: (SRAconst [rc] (SLLconst [lc] x))
15789	// cond: lc <= rc
15790	// result: (SBFX [armBFAuxInt(rc-lc, 64-rc)] x)
15791	for {
15792		rc := auxIntToInt64(v.AuxInt)
15793		if v_0.Op != OpARM64SLLconst {
15794			break
15795		}
15796		lc := auxIntToInt64(v_0.AuxInt)
15797		x := v_0.Args[0]
15798		if !(lc <= rc) {
15799			break
15800		}
15801		v.reset(OpARM64SBFX)
15802		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc-lc, 64-rc))
15803		v.AddArg(x)
15804		return true
15805	}
15806	// match: (SRAconst [rc] (MOVWreg x))
15807	// cond: rc < 32
15808	// result: (SBFX [armBFAuxInt(rc, 32-rc)] x)
15809	for {
15810		rc := auxIntToInt64(v.AuxInt)
15811		if v_0.Op != OpARM64MOVWreg {
15812			break
15813		}
15814		x := v_0.Args[0]
15815		if !(rc < 32) {
15816			break
15817		}
15818		v.reset(OpARM64SBFX)
15819		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 32-rc))
15820		v.AddArg(x)
15821		return true
15822	}
15823	// match: (SRAconst [rc] (MOVHreg x))
15824	// cond: rc < 16
15825	// result: (SBFX [armBFAuxInt(rc, 16-rc)] x)
15826	for {
15827		rc := auxIntToInt64(v.AuxInt)
15828		if v_0.Op != OpARM64MOVHreg {
15829			break
15830		}
15831		x := v_0.Args[0]
15832		if !(rc < 16) {
15833			break
15834		}
15835		v.reset(OpARM64SBFX)
15836		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 16-rc))
15837		v.AddArg(x)
15838		return true
15839	}
15840	// match: (SRAconst [rc] (MOVBreg x))
15841	// cond: rc < 8
15842	// result: (SBFX [armBFAuxInt(rc, 8-rc)] x)
15843	for {
15844		rc := auxIntToInt64(v.AuxInt)
15845		if v_0.Op != OpARM64MOVBreg {
15846			break
15847		}
15848		x := v_0.Args[0]
15849		if !(rc < 8) {
15850			break
15851		}
15852		v.reset(OpARM64SBFX)
15853		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 8-rc))
15854		v.AddArg(x)
15855		return true
15856	}
15857	// match: (SRAconst [sc] (SBFIZ [bfc] x))
15858	// cond: sc < bfc.getARM64BFlsb()
15859	// result: (SBFIZ [armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth())] x)
15860	for {
15861		sc := auxIntToInt64(v.AuxInt)
15862		if v_0.Op != OpARM64SBFIZ {
15863			break
15864		}
15865		bfc := auxIntToArm64BitField(v_0.AuxInt)
15866		x := v_0.Args[0]
15867		if !(sc < bfc.getARM64BFlsb()) {
15868			break
15869		}
15870		v.reset(OpARM64SBFIZ)
15871		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth()))
15872		v.AddArg(x)
15873		return true
15874	}
15875	// match: (SRAconst [sc] (SBFIZ [bfc] x))
15876	// cond: sc >= bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()
15877	// result: (SBFX [armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc)] x)
15878	for {
15879		sc := auxIntToInt64(v.AuxInt)
15880		if v_0.Op != OpARM64SBFIZ {
15881			break
15882		}
15883		bfc := auxIntToArm64BitField(v_0.AuxInt)
15884		x := v_0.Args[0]
15885		if !(sc >= bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()) {
15886			break
15887		}
15888		v.reset(OpARM64SBFX)
15889		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc))
15890		v.AddArg(x)
15891		return true
15892	}
15893	return false
15894}
15895func rewriteValueARM64_OpARM64SRL(v *Value) bool {
15896	v_1 := v.Args[1]
15897	v_0 := v.Args[0]
15898	// match: (SRL x (MOVDconst [c]))
15899	// result: (SRLconst x [c&63])
15900	for {
15901		x := v_0
15902		if v_1.Op != OpARM64MOVDconst {
15903			break
15904		}
15905		c := auxIntToInt64(v_1.AuxInt)
15906		v.reset(OpARM64SRLconst)
15907		v.AuxInt = int64ToAuxInt(c & 63)
15908		v.AddArg(x)
15909		return true
15910	}
15911	// match: (SRL x (ANDconst [63] y))
15912	// result: (SRL x y)
15913	for {
15914		x := v_0
15915		if v_1.Op != OpARM64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
15916			break
15917		}
15918		y := v_1.Args[0]
15919		v.reset(OpARM64SRL)
15920		v.AddArg2(x, y)
15921		return true
15922	}
15923	return false
15924}
15925func rewriteValueARM64_OpARM64SRLconst(v *Value) bool {
15926	v_0 := v.Args[0]
15927	// match: (SRLconst [c] (MOVDconst [d]))
15928	// result: (MOVDconst [int64(uint64(d)>>uint64(c))])
15929	for {
15930		c := auxIntToInt64(v.AuxInt)
15931		if v_0.Op != OpARM64MOVDconst {
15932			break
15933		}
15934		d := auxIntToInt64(v_0.AuxInt)
15935		v.reset(OpARM64MOVDconst)
15936		v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
15937		return true
15938	}
15939	// match: (SRLconst [c] (SLLconst [c] x))
15940	// cond: 0 < c && c < 64
15941	// result: (ANDconst [1<<uint(64-c)-1] x)
15942	for {
15943		c := auxIntToInt64(v.AuxInt)
15944		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
15945			break
15946		}
15947		x := v_0.Args[0]
15948		if !(0 < c && c < 64) {
15949			break
15950		}
15951		v.reset(OpARM64ANDconst)
15952		v.AuxInt = int64ToAuxInt(1<<uint(64-c) - 1)
15953		v.AddArg(x)
15954		return true
15955	}
15956	// match: (SRLconst [rc] (MOVWUreg x))
15957	// cond: rc >= 32
15958	// result: (MOVDconst [0])
15959	for {
15960		rc := auxIntToInt64(v.AuxInt)
15961		if v_0.Op != OpARM64MOVWUreg {
15962			break
15963		}
15964		if !(rc >= 32) {
15965			break
15966		}
15967		v.reset(OpARM64MOVDconst)
15968		v.AuxInt = int64ToAuxInt(0)
15969		return true
15970	}
15971	// match: (SRLconst [rc] (MOVHUreg x))
15972	// cond: rc >= 16
15973	// result: (MOVDconst [0])
15974	for {
15975		rc := auxIntToInt64(v.AuxInt)
15976		if v_0.Op != OpARM64MOVHUreg {
15977			break
15978		}
15979		if !(rc >= 16) {
15980			break
15981		}
15982		v.reset(OpARM64MOVDconst)
15983		v.AuxInt = int64ToAuxInt(0)
15984		return true
15985	}
15986	// match: (SRLconst [rc] (MOVBUreg x))
15987	// cond: rc >= 8
15988	// result: (MOVDconst [0])
15989	for {
15990		rc := auxIntToInt64(v.AuxInt)
15991		if v_0.Op != OpARM64MOVBUreg {
15992			break
15993		}
15994		if !(rc >= 8) {
15995			break
15996		}
15997		v.reset(OpARM64MOVDconst)
15998		v.AuxInt = int64ToAuxInt(0)
15999		return true
16000	}
16001	// match: (SRLconst [rc] (SLLconst [lc] x))
16002	// cond: lc > rc
16003	// result: (UBFIZ [armBFAuxInt(lc-rc, 64-lc)] x)
16004	for {
16005		rc := auxIntToInt64(v.AuxInt)
16006		if v_0.Op != OpARM64SLLconst {
16007			break
16008		}
16009		lc := auxIntToInt64(v_0.AuxInt)
16010		x := v_0.Args[0]
16011		if !(lc > rc) {
16012			break
16013		}
16014		v.reset(OpARM64UBFIZ)
16015		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc-rc, 64-lc))
16016		v.AddArg(x)
16017		return true
16018	}
16019	// match: (SRLconst [rc] (SLLconst [lc] x))
16020	// cond: lc < rc
16021	// result: (UBFX [armBFAuxInt(rc-lc, 64-rc)] x)
16022	for {
16023		rc := auxIntToInt64(v.AuxInt)
16024		if v_0.Op != OpARM64SLLconst {
16025			break
16026		}
16027		lc := auxIntToInt64(v_0.AuxInt)
16028		x := v_0.Args[0]
16029		if !(lc < rc) {
16030			break
16031		}
16032		v.reset(OpARM64UBFX)
16033		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc-lc, 64-rc))
16034		v.AddArg(x)
16035		return true
16036	}
16037	// match: (SRLconst [rc] (MOVWUreg x))
16038	// cond: rc < 32
16039	// result: (UBFX [armBFAuxInt(rc, 32-rc)] x)
16040	for {
16041		rc := auxIntToInt64(v.AuxInt)
16042		if v_0.Op != OpARM64MOVWUreg {
16043			break
16044		}
16045		x := v_0.Args[0]
16046		if !(rc < 32) {
16047			break
16048		}
16049		v.reset(OpARM64UBFX)
16050		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 32-rc))
16051		v.AddArg(x)
16052		return true
16053	}
16054	// match: (SRLconst [rc] (MOVHUreg x))
16055	// cond: rc < 16
16056	// result: (UBFX [armBFAuxInt(rc, 16-rc)] x)
16057	for {
16058		rc := auxIntToInt64(v.AuxInt)
16059		if v_0.Op != OpARM64MOVHUreg {
16060			break
16061		}
16062		x := v_0.Args[0]
16063		if !(rc < 16) {
16064			break
16065		}
16066		v.reset(OpARM64UBFX)
16067		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 16-rc))
16068		v.AddArg(x)
16069		return true
16070	}
16071	// match: (SRLconst [rc] (MOVBUreg x))
16072	// cond: rc < 8
16073	// result: (UBFX [armBFAuxInt(rc, 8-rc)] x)
16074	for {
16075		rc := auxIntToInt64(v.AuxInt)
16076		if v_0.Op != OpARM64MOVBUreg {
16077			break
16078		}
16079		x := v_0.Args[0]
16080		if !(rc < 8) {
16081			break
16082		}
16083		v.reset(OpARM64UBFX)
16084		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 8-rc))
16085		v.AddArg(x)
16086		return true
16087	}
16088	// match: (SRLconst [sc] (ANDconst [ac] x))
16089	// cond: isARM64BFMask(sc, ac, sc)
16090	// result: (UBFX [armBFAuxInt(sc, arm64BFWidth(ac, sc))] x)
16091	for {
16092		sc := auxIntToInt64(v.AuxInt)
16093		if v_0.Op != OpARM64ANDconst {
16094			break
16095		}
16096		ac := auxIntToInt64(v_0.AuxInt)
16097		x := v_0.Args[0]
16098		if !(isARM64BFMask(sc, ac, sc)) {
16099			break
16100		}
16101		v.reset(OpARM64UBFX)
16102		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, sc)))
16103		v.AddArg(x)
16104		return true
16105	}
16106	// match: (SRLconst [sc] (UBFX [bfc] x))
16107	// cond: sc < bfc.getARM64BFwidth()
16108	// result: (UBFX [armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()-sc)] x)
16109	for {
16110		sc := auxIntToInt64(v.AuxInt)
16111		if v_0.Op != OpARM64UBFX {
16112			break
16113		}
16114		bfc := auxIntToArm64BitField(v_0.AuxInt)
16115		x := v_0.Args[0]
16116		if !(sc < bfc.getARM64BFwidth()) {
16117			break
16118		}
16119		v.reset(OpARM64UBFX)
16120		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()-sc))
16121		v.AddArg(x)
16122		return true
16123	}
16124	// match: (SRLconst [sc] (UBFIZ [bfc] x))
16125	// cond: sc == bfc.getARM64BFlsb()
16126	// result: (ANDconst [1<<uint(bfc.getARM64BFwidth())-1] x)
16127	for {
16128		sc := auxIntToInt64(v.AuxInt)
16129		if v_0.Op != OpARM64UBFIZ {
16130			break
16131		}
16132		bfc := auxIntToArm64BitField(v_0.AuxInt)
16133		x := v_0.Args[0]
16134		if !(sc == bfc.getARM64BFlsb()) {
16135			break
16136		}
16137		v.reset(OpARM64ANDconst)
16138		v.AuxInt = int64ToAuxInt(1<<uint(bfc.getARM64BFwidth()) - 1)
16139		v.AddArg(x)
16140		return true
16141	}
16142	// match: (SRLconst [sc] (UBFIZ [bfc] x))
16143	// cond: sc < bfc.getARM64BFlsb()
16144	// result: (UBFIZ [armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth())] x)
16145	for {
16146		sc := auxIntToInt64(v.AuxInt)
16147		if v_0.Op != OpARM64UBFIZ {
16148			break
16149		}
16150		bfc := auxIntToArm64BitField(v_0.AuxInt)
16151		x := v_0.Args[0]
16152		if !(sc < bfc.getARM64BFlsb()) {
16153			break
16154		}
16155		v.reset(OpARM64UBFIZ)
16156		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth()))
16157		v.AddArg(x)
16158		return true
16159	}
16160	// match: (SRLconst [sc] (UBFIZ [bfc] x))
16161	// cond: sc > bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()
16162	// result: (UBFX [armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc)] x)
16163	for {
16164		sc := auxIntToInt64(v.AuxInt)
16165		if v_0.Op != OpARM64UBFIZ {
16166			break
16167		}
16168		bfc := auxIntToArm64BitField(v_0.AuxInt)
16169		x := v_0.Args[0]
16170		if !(sc > bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()) {
16171			break
16172		}
16173		v.reset(OpARM64UBFX)
16174		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc))
16175		v.AddArg(x)
16176		return true
16177	}
16178	return false
16179}
16180func rewriteValueARM64_OpARM64STP(v *Value) bool {
16181	v_3 := v.Args[3]
16182	v_2 := v.Args[2]
16183	v_1 := v.Args[1]
16184	v_0 := v.Args[0]
16185	b := v.Block
16186	config := b.Func.Config
16187	// match: (STP [off1] {sym} (ADDconst [off2] ptr) val1 val2 mem)
16188	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
16189	// result: (STP [off1+int32(off2)] {sym} ptr val1 val2 mem)
16190	for {
16191		off1 := auxIntToInt32(v.AuxInt)
16192		sym := auxToSym(v.Aux)
16193		if v_0.Op != OpARM64ADDconst {
16194			break
16195		}
16196		off2 := auxIntToInt64(v_0.AuxInt)
16197		ptr := v_0.Args[0]
16198		val1 := v_1
16199		val2 := v_2
16200		mem := v_3
16201		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
16202			break
16203		}
16204		v.reset(OpARM64STP)
16205		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
16206		v.Aux = symToAux(sym)
16207		v.AddArg4(ptr, val1, val2, mem)
16208		return true
16209	}
16210	// match: (STP [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val1 val2 mem)
16211	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
16212	// result: (STP [off1+off2] {mergeSym(sym1,sym2)} ptr val1 val2 mem)
16213	for {
16214		off1 := auxIntToInt32(v.AuxInt)
16215		sym1 := auxToSym(v.Aux)
16216		if v_0.Op != OpARM64MOVDaddr {
16217			break
16218		}
16219		off2 := auxIntToInt32(v_0.AuxInt)
16220		sym2 := auxToSym(v_0.Aux)
16221		ptr := v_0.Args[0]
16222		val1 := v_1
16223		val2 := v_2
16224		mem := v_3
16225		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
16226			break
16227		}
16228		v.reset(OpARM64STP)
16229		v.AuxInt = int32ToAuxInt(off1 + off2)
16230		v.Aux = symToAux(mergeSym(sym1, sym2))
16231		v.AddArg4(ptr, val1, val2, mem)
16232		return true
16233	}
16234	// match: (STP [off] {sym} ptr (MOVDconst [0]) (MOVDconst [0]) mem)
16235	// result: (MOVQstorezero [off] {sym} ptr mem)
16236	for {
16237		off := auxIntToInt32(v.AuxInt)
16238		sym := auxToSym(v.Aux)
16239		ptr := v_0
16240		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 || v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
16241			break
16242		}
16243		mem := v_3
16244		v.reset(OpARM64MOVQstorezero)
16245		v.AuxInt = int32ToAuxInt(off)
16246		v.Aux = symToAux(sym)
16247		v.AddArg2(ptr, mem)
16248		return true
16249	}
16250	return false
16251}
16252func rewriteValueARM64_OpARM64SUB(v *Value) bool {
16253	v_1 := v.Args[1]
16254	v_0 := v.Args[0]
16255	b := v.Block
16256	// match: (SUB x (MOVDconst [c]))
16257	// result: (SUBconst [c] x)
16258	for {
16259		x := v_0
16260		if v_1.Op != OpARM64MOVDconst {
16261			break
16262		}
16263		c := auxIntToInt64(v_1.AuxInt)
16264		v.reset(OpARM64SUBconst)
16265		v.AuxInt = int64ToAuxInt(c)
16266		v.AddArg(x)
16267		return true
16268	}
16269	// match: (SUB a l:(MUL x y))
16270	// cond: l.Uses==1 && clobber(l)
16271	// result: (MSUB a x y)
16272	for {
16273		a := v_0
16274		l := v_1
16275		if l.Op != OpARM64MUL {
16276			break
16277		}
16278		y := l.Args[1]
16279		x := l.Args[0]
16280		if !(l.Uses == 1 && clobber(l)) {
16281			break
16282		}
16283		v.reset(OpARM64MSUB)
16284		v.AddArg3(a, x, y)
16285		return true
16286	}
16287	// match: (SUB a l:(MNEG x y))
16288	// cond: l.Uses==1 && clobber(l)
16289	// result: (MADD a x y)
16290	for {
16291		a := v_0
16292		l := v_1
16293		if l.Op != OpARM64MNEG {
16294			break
16295		}
16296		y := l.Args[1]
16297		x := l.Args[0]
16298		if !(l.Uses == 1 && clobber(l)) {
16299			break
16300		}
16301		v.reset(OpARM64MADD)
16302		v.AddArg3(a, x, y)
16303		return true
16304	}
16305	// match: (SUB a l:(MULW x y))
16306	// cond: v.Type.Size() <= 4 && l.Uses==1 && clobber(l)
16307	// result: (MSUBW a x y)
16308	for {
16309		a := v_0
16310		l := v_1
16311		if l.Op != OpARM64MULW {
16312			break
16313		}
16314		y := l.Args[1]
16315		x := l.Args[0]
16316		if !(v.Type.Size() <= 4 && l.Uses == 1 && clobber(l)) {
16317			break
16318		}
16319		v.reset(OpARM64MSUBW)
16320		v.AddArg3(a, x, y)
16321		return true
16322	}
16323	// match: (SUB a l:(MNEGW x y))
16324	// cond: v.Type.Size() <= 4 && l.Uses==1 && clobber(l)
16325	// result: (MADDW a x y)
16326	for {
16327		a := v_0
16328		l := v_1
16329		if l.Op != OpARM64MNEGW {
16330			break
16331		}
16332		y := l.Args[1]
16333		x := l.Args[0]
16334		if !(v.Type.Size() <= 4 && l.Uses == 1 && clobber(l)) {
16335			break
16336		}
16337		v.reset(OpARM64MADDW)
16338		v.AddArg3(a, x, y)
16339		return true
16340	}
16341	// match: (SUB x x)
16342	// result: (MOVDconst [0])
16343	for {
16344		x := v_0
16345		if x != v_1 {
16346			break
16347		}
16348		v.reset(OpARM64MOVDconst)
16349		v.AuxInt = int64ToAuxInt(0)
16350		return true
16351	}
16352	// match: (SUB x (SUB y z))
16353	// result: (SUB (ADD <v.Type> x z) y)
16354	for {
16355		x := v_0
16356		if v_1.Op != OpARM64SUB {
16357			break
16358		}
16359		z := v_1.Args[1]
16360		y := v_1.Args[0]
16361		v.reset(OpARM64SUB)
16362		v0 := b.NewValue0(v.Pos, OpARM64ADD, v.Type)
16363		v0.AddArg2(x, z)
16364		v.AddArg2(v0, y)
16365		return true
16366	}
16367	// match: (SUB (SUB x y) z)
16368	// result: (SUB x (ADD <y.Type> y z))
16369	for {
16370		if v_0.Op != OpARM64SUB {
16371			break
16372		}
16373		y := v_0.Args[1]
16374		x := v_0.Args[0]
16375		z := v_1
16376		v.reset(OpARM64SUB)
16377		v0 := b.NewValue0(v.Pos, OpARM64ADD, y.Type)
16378		v0.AddArg2(y, z)
16379		v.AddArg2(x, v0)
16380		return true
16381	}
16382	// match: (SUB x0 x1:(SLLconst [c] y))
16383	// cond: clobberIfDead(x1)
16384	// result: (SUBshiftLL x0 y [c])
16385	for {
16386		x0 := v_0
16387		x1 := v_1
16388		if x1.Op != OpARM64SLLconst {
16389			break
16390		}
16391		c := auxIntToInt64(x1.AuxInt)
16392		y := x1.Args[0]
16393		if !(clobberIfDead(x1)) {
16394			break
16395		}
16396		v.reset(OpARM64SUBshiftLL)
16397		v.AuxInt = int64ToAuxInt(c)
16398		v.AddArg2(x0, y)
16399		return true
16400	}
16401	// match: (SUB x0 x1:(SRLconst [c] y))
16402	// cond: clobberIfDead(x1)
16403	// result: (SUBshiftRL x0 y [c])
16404	for {
16405		x0 := v_0
16406		x1 := v_1
16407		if x1.Op != OpARM64SRLconst {
16408			break
16409		}
16410		c := auxIntToInt64(x1.AuxInt)
16411		y := x1.Args[0]
16412		if !(clobberIfDead(x1)) {
16413			break
16414		}
16415		v.reset(OpARM64SUBshiftRL)
16416		v.AuxInt = int64ToAuxInt(c)
16417		v.AddArg2(x0, y)
16418		return true
16419	}
16420	// match: (SUB x0 x1:(SRAconst [c] y))
16421	// cond: clobberIfDead(x1)
16422	// result: (SUBshiftRA x0 y [c])
16423	for {
16424		x0 := v_0
16425		x1 := v_1
16426		if x1.Op != OpARM64SRAconst {
16427			break
16428		}
16429		c := auxIntToInt64(x1.AuxInt)
16430		y := x1.Args[0]
16431		if !(clobberIfDead(x1)) {
16432			break
16433		}
16434		v.reset(OpARM64SUBshiftRA)
16435		v.AuxInt = int64ToAuxInt(c)
16436		v.AddArg2(x0, y)
16437		return true
16438	}
16439	return false
16440}
16441func rewriteValueARM64_OpARM64SUBconst(v *Value) bool {
16442	v_0 := v.Args[0]
16443	// match: (SUBconst [0] x)
16444	// result: x
16445	for {
16446		if auxIntToInt64(v.AuxInt) != 0 {
16447			break
16448		}
16449		x := v_0
16450		v.copyOf(x)
16451		return true
16452	}
16453	// match: (SUBconst [c] (MOVDconst [d]))
16454	// result: (MOVDconst [d-c])
16455	for {
16456		c := auxIntToInt64(v.AuxInt)
16457		if v_0.Op != OpARM64MOVDconst {
16458			break
16459		}
16460		d := auxIntToInt64(v_0.AuxInt)
16461		v.reset(OpARM64MOVDconst)
16462		v.AuxInt = int64ToAuxInt(d - c)
16463		return true
16464	}
16465	// match: (SUBconst [c] (SUBconst [d] x))
16466	// result: (ADDconst [-c-d] x)
16467	for {
16468		c := auxIntToInt64(v.AuxInt)
16469		if v_0.Op != OpARM64SUBconst {
16470			break
16471		}
16472		d := auxIntToInt64(v_0.AuxInt)
16473		x := v_0.Args[0]
16474		v.reset(OpARM64ADDconst)
16475		v.AuxInt = int64ToAuxInt(-c - d)
16476		v.AddArg(x)
16477		return true
16478	}
16479	// match: (SUBconst [c] (ADDconst [d] x))
16480	// result: (ADDconst [-c+d] x)
16481	for {
16482		c := auxIntToInt64(v.AuxInt)
16483		if v_0.Op != OpARM64ADDconst {
16484			break
16485		}
16486		d := auxIntToInt64(v_0.AuxInt)
16487		x := v_0.Args[0]
16488		v.reset(OpARM64ADDconst)
16489		v.AuxInt = int64ToAuxInt(-c + d)
16490		v.AddArg(x)
16491		return true
16492	}
16493	return false
16494}
16495func rewriteValueARM64_OpARM64SUBshiftLL(v *Value) bool {
16496	v_1 := v.Args[1]
16497	v_0 := v.Args[0]
16498	// match: (SUBshiftLL x (MOVDconst [c]) [d])
16499	// result: (SUBconst x [int64(uint64(c)<<uint64(d))])
16500	for {
16501		d := auxIntToInt64(v.AuxInt)
16502		x := v_0
16503		if v_1.Op != OpARM64MOVDconst {
16504			break
16505		}
16506		c := auxIntToInt64(v_1.AuxInt)
16507		v.reset(OpARM64SUBconst)
16508		v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
16509		v.AddArg(x)
16510		return true
16511	}
16512	// match: (SUBshiftLL (SLLconst x [c]) x [c])
16513	// result: (MOVDconst [0])
16514	for {
16515		c := auxIntToInt64(v.AuxInt)
16516		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
16517			break
16518		}
16519		x := v_0.Args[0]
16520		if x != v_1 {
16521			break
16522		}
16523		v.reset(OpARM64MOVDconst)
16524		v.AuxInt = int64ToAuxInt(0)
16525		return true
16526	}
16527	return false
16528}
16529func rewriteValueARM64_OpARM64SUBshiftRA(v *Value) bool {
16530	v_1 := v.Args[1]
16531	v_0 := v.Args[0]
16532	// match: (SUBshiftRA x (MOVDconst [c]) [d])
16533	// result: (SUBconst x [c>>uint64(d)])
16534	for {
16535		d := auxIntToInt64(v.AuxInt)
16536		x := v_0
16537		if v_1.Op != OpARM64MOVDconst {
16538			break
16539		}
16540		c := auxIntToInt64(v_1.AuxInt)
16541		v.reset(OpARM64SUBconst)
16542		v.AuxInt = int64ToAuxInt(c >> uint64(d))
16543		v.AddArg(x)
16544		return true
16545	}
16546	// match: (SUBshiftRA (SRAconst x [c]) x [c])
16547	// result: (MOVDconst [0])
16548	for {
16549		c := auxIntToInt64(v.AuxInt)
16550		if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c {
16551			break
16552		}
16553		x := v_0.Args[0]
16554		if x != v_1 {
16555			break
16556		}
16557		v.reset(OpARM64MOVDconst)
16558		v.AuxInt = int64ToAuxInt(0)
16559		return true
16560	}
16561	return false
16562}
16563func rewriteValueARM64_OpARM64SUBshiftRL(v *Value) bool {
16564	v_1 := v.Args[1]
16565	v_0 := v.Args[0]
16566	// match: (SUBshiftRL x (MOVDconst [c]) [d])
16567	// result: (SUBconst x [int64(uint64(c)>>uint64(d))])
16568	for {
16569		d := auxIntToInt64(v.AuxInt)
16570		x := v_0
16571		if v_1.Op != OpARM64MOVDconst {
16572			break
16573		}
16574		c := auxIntToInt64(v_1.AuxInt)
16575		v.reset(OpARM64SUBconst)
16576		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
16577		v.AddArg(x)
16578		return true
16579	}
16580	// match: (SUBshiftRL (SRLconst x [c]) x [c])
16581	// result: (MOVDconst [0])
16582	for {
16583		c := auxIntToInt64(v.AuxInt)
16584		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
16585			break
16586		}
16587		x := v_0.Args[0]
16588		if x != v_1 {
16589			break
16590		}
16591		v.reset(OpARM64MOVDconst)
16592		v.AuxInt = int64ToAuxInt(0)
16593		return true
16594	}
16595	return false
16596}
16597func rewriteValueARM64_OpARM64TST(v *Value) bool {
16598	v_1 := v.Args[1]
16599	v_0 := v.Args[0]
16600	// match: (TST x (MOVDconst [c]))
16601	// result: (TSTconst [c] x)
16602	for {
16603		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16604			x := v_0
16605			if v_1.Op != OpARM64MOVDconst {
16606				continue
16607			}
16608			c := auxIntToInt64(v_1.AuxInt)
16609			v.reset(OpARM64TSTconst)
16610			v.AuxInt = int64ToAuxInt(c)
16611			v.AddArg(x)
16612			return true
16613		}
16614		break
16615	}
16616	// match: (TST x0 x1:(SLLconst [c] y))
16617	// cond: clobberIfDead(x1)
16618	// result: (TSTshiftLL x0 y [c])
16619	for {
16620		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16621			x0 := v_0
16622			x1 := v_1
16623			if x1.Op != OpARM64SLLconst {
16624				continue
16625			}
16626			c := auxIntToInt64(x1.AuxInt)
16627			y := x1.Args[0]
16628			if !(clobberIfDead(x1)) {
16629				continue
16630			}
16631			v.reset(OpARM64TSTshiftLL)
16632			v.AuxInt = int64ToAuxInt(c)
16633			v.AddArg2(x0, y)
16634			return true
16635		}
16636		break
16637	}
16638	// match: (TST x0 x1:(SRLconst [c] y))
16639	// cond: clobberIfDead(x1)
16640	// result: (TSTshiftRL x0 y [c])
16641	for {
16642		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16643			x0 := v_0
16644			x1 := v_1
16645			if x1.Op != OpARM64SRLconst {
16646				continue
16647			}
16648			c := auxIntToInt64(x1.AuxInt)
16649			y := x1.Args[0]
16650			if !(clobberIfDead(x1)) {
16651				continue
16652			}
16653			v.reset(OpARM64TSTshiftRL)
16654			v.AuxInt = int64ToAuxInt(c)
16655			v.AddArg2(x0, y)
16656			return true
16657		}
16658		break
16659	}
16660	// match: (TST x0 x1:(SRAconst [c] y))
16661	// cond: clobberIfDead(x1)
16662	// result: (TSTshiftRA x0 y [c])
16663	for {
16664		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16665			x0 := v_0
16666			x1 := v_1
16667			if x1.Op != OpARM64SRAconst {
16668				continue
16669			}
16670			c := auxIntToInt64(x1.AuxInt)
16671			y := x1.Args[0]
16672			if !(clobberIfDead(x1)) {
16673				continue
16674			}
16675			v.reset(OpARM64TSTshiftRA)
16676			v.AuxInt = int64ToAuxInt(c)
16677			v.AddArg2(x0, y)
16678			return true
16679		}
16680		break
16681	}
16682	// match: (TST x0 x1:(RORconst [c] y))
16683	// cond: clobberIfDead(x1)
16684	// result: (TSTshiftRO x0 y [c])
16685	for {
16686		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16687			x0 := v_0
16688			x1 := v_1
16689			if x1.Op != OpARM64RORconst {
16690				continue
16691			}
16692			c := auxIntToInt64(x1.AuxInt)
16693			y := x1.Args[0]
16694			if !(clobberIfDead(x1)) {
16695				continue
16696			}
16697			v.reset(OpARM64TSTshiftRO)
16698			v.AuxInt = int64ToAuxInt(c)
16699			v.AddArg2(x0, y)
16700			return true
16701		}
16702		break
16703	}
16704	return false
16705}
16706func rewriteValueARM64_OpARM64TSTW(v *Value) bool {
16707	v_1 := v.Args[1]
16708	v_0 := v.Args[0]
16709	// match: (TSTW x (MOVDconst [c]))
16710	// result: (TSTWconst [int32(c)] x)
16711	for {
16712		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16713			x := v_0
16714			if v_1.Op != OpARM64MOVDconst {
16715				continue
16716			}
16717			c := auxIntToInt64(v_1.AuxInt)
16718			v.reset(OpARM64TSTWconst)
16719			v.AuxInt = int32ToAuxInt(int32(c))
16720			v.AddArg(x)
16721			return true
16722		}
16723		break
16724	}
16725	return false
16726}
16727func rewriteValueARM64_OpARM64TSTWconst(v *Value) bool {
16728	v_0 := v.Args[0]
16729	// match: (TSTWconst (MOVDconst [x]) [y])
16730	// result: (FlagConstant [logicFlags32(int32(x)&y)])
16731	for {
16732		y := auxIntToInt32(v.AuxInt)
16733		if v_0.Op != OpARM64MOVDconst {
16734			break
16735		}
16736		x := auxIntToInt64(v_0.AuxInt)
16737		v.reset(OpARM64FlagConstant)
16738		v.AuxInt = flagConstantToAuxInt(logicFlags32(int32(x) & y))
16739		return true
16740	}
16741	return false
16742}
16743func rewriteValueARM64_OpARM64TSTconst(v *Value) bool {
16744	v_0 := v.Args[0]
16745	// match: (TSTconst (MOVDconst [x]) [y])
16746	// result: (FlagConstant [logicFlags64(x&y)])
16747	for {
16748		y := auxIntToInt64(v.AuxInt)
16749		if v_0.Op != OpARM64MOVDconst {
16750			break
16751		}
16752		x := auxIntToInt64(v_0.AuxInt)
16753		v.reset(OpARM64FlagConstant)
16754		v.AuxInt = flagConstantToAuxInt(logicFlags64(x & y))
16755		return true
16756	}
16757	return false
16758}
16759func rewriteValueARM64_OpARM64TSTshiftLL(v *Value) bool {
16760	v_1 := v.Args[1]
16761	v_0 := v.Args[0]
16762	b := v.Block
16763	// match: (TSTshiftLL (MOVDconst [c]) x [d])
16764	// result: (TSTconst [c] (SLLconst <x.Type> x [d]))
16765	for {
16766		d := auxIntToInt64(v.AuxInt)
16767		if v_0.Op != OpARM64MOVDconst {
16768			break
16769		}
16770		c := auxIntToInt64(v_0.AuxInt)
16771		x := v_1
16772		v.reset(OpARM64TSTconst)
16773		v.AuxInt = int64ToAuxInt(c)
16774		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
16775		v0.AuxInt = int64ToAuxInt(d)
16776		v0.AddArg(x)
16777		v.AddArg(v0)
16778		return true
16779	}
16780	// match: (TSTshiftLL x (MOVDconst [c]) [d])
16781	// result: (TSTconst x [int64(uint64(c)<<uint64(d))])
16782	for {
16783		d := auxIntToInt64(v.AuxInt)
16784		x := v_0
16785		if v_1.Op != OpARM64MOVDconst {
16786			break
16787		}
16788		c := auxIntToInt64(v_1.AuxInt)
16789		v.reset(OpARM64TSTconst)
16790		v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
16791		v.AddArg(x)
16792		return true
16793	}
16794	return false
16795}
16796func rewriteValueARM64_OpARM64TSTshiftRA(v *Value) bool {
16797	v_1 := v.Args[1]
16798	v_0 := v.Args[0]
16799	b := v.Block
16800	// match: (TSTshiftRA (MOVDconst [c]) x [d])
16801	// result: (TSTconst [c] (SRAconst <x.Type> x [d]))
16802	for {
16803		d := auxIntToInt64(v.AuxInt)
16804		if v_0.Op != OpARM64MOVDconst {
16805			break
16806		}
16807		c := auxIntToInt64(v_0.AuxInt)
16808		x := v_1
16809		v.reset(OpARM64TSTconst)
16810		v.AuxInt = int64ToAuxInt(c)
16811		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
16812		v0.AuxInt = int64ToAuxInt(d)
16813		v0.AddArg(x)
16814		v.AddArg(v0)
16815		return true
16816	}
16817	// match: (TSTshiftRA x (MOVDconst [c]) [d])
16818	// result: (TSTconst x [c>>uint64(d)])
16819	for {
16820		d := auxIntToInt64(v.AuxInt)
16821		x := v_0
16822		if v_1.Op != OpARM64MOVDconst {
16823			break
16824		}
16825		c := auxIntToInt64(v_1.AuxInt)
16826		v.reset(OpARM64TSTconst)
16827		v.AuxInt = int64ToAuxInt(c >> uint64(d))
16828		v.AddArg(x)
16829		return true
16830	}
16831	return false
16832}
16833func rewriteValueARM64_OpARM64TSTshiftRL(v *Value) bool {
16834	v_1 := v.Args[1]
16835	v_0 := v.Args[0]
16836	b := v.Block
16837	// match: (TSTshiftRL (MOVDconst [c]) x [d])
16838	// result: (TSTconst [c] (SRLconst <x.Type> x [d]))
16839	for {
16840		d := auxIntToInt64(v.AuxInt)
16841		if v_0.Op != OpARM64MOVDconst {
16842			break
16843		}
16844		c := auxIntToInt64(v_0.AuxInt)
16845		x := v_1
16846		v.reset(OpARM64TSTconst)
16847		v.AuxInt = int64ToAuxInt(c)
16848		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
16849		v0.AuxInt = int64ToAuxInt(d)
16850		v0.AddArg(x)
16851		v.AddArg(v0)
16852		return true
16853	}
16854	// match: (TSTshiftRL x (MOVDconst [c]) [d])
16855	// result: (TSTconst x [int64(uint64(c)>>uint64(d))])
16856	for {
16857		d := auxIntToInt64(v.AuxInt)
16858		x := v_0
16859		if v_1.Op != OpARM64MOVDconst {
16860			break
16861		}
16862		c := auxIntToInt64(v_1.AuxInt)
16863		v.reset(OpARM64TSTconst)
16864		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
16865		v.AddArg(x)
16866		return true
16867	}
16868	return false
16869}
16870func rewriteValueARM64_OpARM64TSTshiftRO(v *Value) bool {
16871	v_1 := v.Args[1]
16872	v_0 := v.Args[0]
16873	b := v.Block
16874	// match: (TSTshiftRO (MOVDconst [c]) x [d])
16875	// result: (TSTconst [c] (RORconst <x.Type> x [d]))
16876	for {
16877		d := auxIntToInt64(v.AuxInt)
16878		if v_0.Op != OpARM64MOVDconst {
16879			break
16880		}
16881		c := auxIntToInt64(v_0.AuxInt)
16882		x := v_1
16883		v.reset(OpARM64TSTconst)
16884		v.AuxInt = int64ToAuxInt(c)
16885		v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type)
16886		v0.AuxInt = int64ToAuxInt(d)
16887		v0.AddArg(x)
16888		v.AddArg(v0)
16889		return true
16890	}
16891	// match: (TSTshiftRO x (MOVDconst [c]) [d])
16892	// result: (TSTconst x [rotateRight64(c, d)])
16893	for {
16894		d := auxIntToInt64(v.AuxInt)
16895		x := v_0
16896		if v_1.Op != OpARM64MOVDconst {
16897			break
16898		}
16899		c := auxIntToInt64(v_1.AuxInt)
16900		v.reset(OpARM64TSTconst)
16901		v.AuxInt = int64ToAuxInt(rotateRight64(c, d))
16902		v.AddArg(x)
16903		return true
16904	}
16905	return false
16906}
16907func rewriteValueARM64_OpARM64UBFIZ(v *Value) bool {
16908	v_0 := v.Args[0]
16909	// match: (UBFIZ [bfc] (SLLconst [sc] x))
16910	// cond: sc < bfc.getARM64BFwidth()
16911	// result: (UBFIZ [armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()-sc)] x)
16912	for {
16913		bfc := auxIntToArm64BitField(v.AuxInt)
16914		if v_0.Op != OpARM64SLLconst {
16915			break
16916		}
16917		sc := auxIntToInt64(v_0.AuxInt)
16918		x := v_0.Args[0]
16919		if !(sc < bfc.getARM64BFwidth()) {
16920			break
16921		}
16922		v.reset(OpARM64UBFIZ)
16923		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()-sc))
16924		v.AddArg(x)
16925		return true
16926	}
16927	return false
16928}
16929func rewriteValueARM64_OpARM64UBFX(v *Value) bool {
16930	v_0 := v.Args[0]
16931	// match: (UBFX [bfc] (ANDconst [c] x))
16932	// cond: isARM64BFMask(0, c, 0) && bfc.getARM64BFlsb() + bfc.getARM64BFwidth() <= arm64BFWidth(c, 0)
16933	// result: (UBFX [bfc] x)
16934	for {
16935		bfc := auxIntToArm64BitField(v.AuxInt)
16936		if v_0.Op != OpARM64ANDconst {
16937			break
16938		}
16939		c := auxIntToInt64(v_0.AuxInt)
16940		x := v_0.Args[0]
16941		if !(isARM64BFMask(0, c, 0) && bfc.getARM64BFlsb()+bfc.getARM64BFwidth() <= arm64BFWidth(c, 0)) {
16942			break
16943		}
16944		v.reset(OpARM64UBFX)
16945		v.AuxInt = arm64BitFieldToAuxInt(bfc)
16946		v.AddArg(x)
16947		return true
16948	}
16949	// match: (UBFX [bfc] (SRLconst [sc] x))
16950	// cond: sc+bfc.getARM64BFwidth()+bfc.getARM64BFlsb() < 64
16951	// result: (UBFX [armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth())] x)
16952	for {
16953		bfc := auxIntToArm64BitField(v.AuxInt)
16954		if v_0.Op != OpARM64SRLconst {
16955			break
16956		}
16957		sc := auxIntToInt64(v_0.AuxInt)
16958		x := v_0.Args[0]
16959		if !(sc+bfc.getARM64BFwidth()+bfc.getARM64BFlsb() < 64) {
16960			break
16961		}
16962		v.reset(OpARM64UBFX)
16963		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()))
16964		v.AddArg(x)
16965		return true
16966	}
16967	// match: (UBFX [bfc] (SLLconst [sc] x))
16968	// cond: sc == bfc.getARM64BFlsb()
16969	// result: (ANDconst [1<<uint(bfc.getARM64BFwidth())-1] x)
16970	for {
16971		bfc := auxIntToArm64BitField(v.AuxInt)
16972		if v_0.Op != OpARM64SLLconst {
16973			break
16974		}
16975		sc := auxIntToInt64(v_0.AuxInt)
16976		x := v_0.Args[0]
16977		if !(sc == bfc.getARM64BFlsb()) {
16978			break
16979		}
16980		v.reset(OpARM64ANDconst)
16981		v.AuxInt = int64ToAuxInt(1<<uint(bfc.getARM64BFwidth()) - 1)
16982		v.AddArg(x)
16983		return true
16984	}
16985	// match: (UBFX [bfc] (SLLconst [sc] x))
16986	// cond: sc < bfc.getARM64BFlsb()
16987	// result: (UBFX [armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth())] x)
16988	for {
16989		bfc := auxIntToArm64BitField(v.AuxInt)
16990		if v_0.Op != OpARM64SLLconst {
16991			break
16992		}
16993		sc := auxIntToInt64(v_0.AuxInt)
16994		x := v_0.Args[0]
16995		if !(sc < bfc.getARM64BFlsb()) {
16996			break
16997		}
16998		v.reset(OpARM64UBFX)
16999		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth()))
17000		v.AddArg(x)
17001		return true
17002	}
17003	// match: (UBFX [bfc] (SLLconst [sc] x))
17004	// cond: sc > bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()
17005	// result: (UBFIZ [armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc)] x)
17006	for {
17007		bfc := auxIntToArm64BitField(v.AuxInt)
17008		if v_0.Op != OpARM64SLLconst {
17009			break
17010		}
17011		sc := auxIntToInt64(v_0.AuxInt)
17012		x := v_0.Args[0]
17013		if !(sc > bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()) {
17014			break
17015		}
17016		v.reset(OpARM64UBFIZ)
17017		v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc))
17018		v.AddArg(x)
17019		return true
17020	}
17021	return false
17022}
17023func rewriteValueARM64_OpARM64UDIV(v *Value) bool {
17024	v_1 := v.Args[1]
17025	v_0 := v.Args[0]
17026	// match: (UDIV x (MOVDconst [1]))
17027	// result: x
17028	for {
17029		x := v_0
17030		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
17031			break
17032		}
17033		v.copyOf(x)
17034		return true
17035	}
17036	// match: (UDIV x (MOVDconst [c]))
17037	// cond: isPowerOfTwo64(c)
17038	// result: (SRLconst [log64(c)] x)
17039	for {
17040		x := v_0
17041		if v_1.Op != OpARM64MOVDconst {
17042			break
17043		}
17044		c := auxIntToInt64(v_1.AuxInt)
17045		if !(isPowerOfTwo64(c)) {
17046			break
17047		}
17048		v.reset(OpARM64SRLconst)
17049		v.AuxInt = int64ToAuxInt(log64(c))
17050		v.AddArg(x)
17051		return true
17052	}
17053	// match: (UDIV (MOVDconst [c]) (MOVDconst [d]))
17054	// cond: d != 0
17055	// result: (MOVDconst [int64(uint64(c)/uint64(d))])
17056	for {
17057		if v_0.Op != OpARM64MOVDconst {
17058			break
17059		}
17060		c := auxIntToInt64(v_0.AuxInt)
17061		if v_1.Op != OpARM64MOVDconst {
17062			break
17063		}
17064		d := auxIntToInt64(v_1.AuxInt)
17065		if !(d != 0) {
17066			break
17067		}
17068		v.reset(OpARM64MOVDconst)
17069		v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
17070		return true
17071	}
17072	return false
17073}
17074func rewriteValueARM64_OpARM64UDIVW(v *Value) bool {
17075	v_1 := v.Args[1]
17076	v_0 := v.Args[0]
17077	b := v.Block
17078	// match: (UDIVW x (MOVDconst [c]))
17079	// cond: uint32(c)==1
17080	// result: (MOVWUreg x)
17081	for {
17082		x := v_0
17083		if v_1.Op != OpARM64MOVDconst {
17084			break
17085		}
17086		c := auxIntToInt64(v_1.AuxInt)
17087		if !(uint32(c) == 1) {
17088			break
17089		}
17090		v.reset(OpARM64MOVWUreg)
17091		v.AddArg(x)
17092		return true
17093	}
17094	// match: (UDIVW x (MOVDconst [c]))
17095	// cond: isPowerOfTwo64(c) && is32Bit(c)
17096	// result: (SRLconst [log64(c)] (MOVWUreg <v.Type> x))
17097	for {
17098		x := v_0
17099		if v_1.Op != OpARM64MOVDconst {
17100			break
17101		}
17102		c := auxIntToInt64(v_1.AuxInt)
17103		if !(isPowerOfTwo64(c) && is32Bit(c)) {
17104			break
17105		}
17106		v.reset(OpARM64SRLconst)
17107		v.AuxInt = int64ToAuxInt(log64(c))
17108		v0 := b.NewValue0(v.Pos, OpARM64MOVWUreg, v.Type)
17109		v0.AddArg(x)
17110		v.AddArg(v0)
17111		return true
17112	}
17113	// match: (UDIVW (MOVDconst [c]) (MOVDconst [d]))
17114	// cond: d != 0
17115	// result: (MOVDconst [int64(uint32(c)/uint32(d))])
17116	for {
17117		if v_0.Op != OpARM64MOVDconst {
17118			break
17119		}
17120		c := auxIntToInt64(v_0.AuxInt)
17121		if v_1.Op != OpARM64MOVDconst {
17122			break
17123		}
17124		d := auxIntToInt64(v_1.AuxInt)
17125		if !(d != 0) {
17126			break
17127		}
17128		v.reset(OpARM64MOVDconst)
17129		v.AuxInt = int64ToAuxInt(int64(uint32(c) / uint32(d)))
17130		return true
17131	}
17132	return false
17133}
17134func rewriteValueARM64_OpARM64UMOD(v *Value) bool {
17135	v_1 := v.Args[1]
17136	v_0 := v.Args[0]
17137	b := v.Block
17138	typ := &b.Func.Config.Types
17139	// match: (UMOD <typ.UInt64> x y)
17140	// result: (MSUB <typ.UInt64> x y (UDIV <typ.UInt64> x y))
17141	for {
17142		if v.Type != typ.UInt64 {
17143			break
17144		}
17145		x := v_0
17146		y := v_1
17147		v.reset(OpARM64MSUB)
17148		v.Type = typ.UInt64
17149		v0 := b.NewValue0(v.Pos, OpARM64UDIV, typ.UInt64)
17150		v0.AddArg2(x, y)
17151		v.AddArg3(x, y, v0)
17152		return true
17153	}
17154	// match: (UMOD _ (MOVDconst [1]))
17155	// result: (MOVDconst [0])
17156	for {
17157		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
17158			break
17159		}
17160		v.reset(OpARM64MOVDconst)
17161		v.AuxInt = int64ToAuxInt(0)
17162		return true
17163	}
17164	// match: (UMOD x (MOVDconst [c]))
17165	// cond: isPowerOfTwo64(c)
17166	// result: (ANDconst [c-1] x)
17167	for {
17168		x := v_0
17169		if v_1.Op != OpARM64MOVDconst {
17170			break
17171		}
17172		c := auxIntToInt64(v_1.AuxInt)
17173		if !(isPowerOfTwo64(c)) {
17174			break
17175		}
17176		v.reset(OpARM64ANDconst)
17177		v.AuxInt = int64ToAuxInt(c - 1)
17178		v.AddArg(x)
17179		return true
17180	}
17181	// match: (UMOD (MOVDconst [c]) (MOVDconst [d]))
17182	// cond: d != 0
17183	// result: (MOVDconst [int64(uint64(c)%uint64(d))])
17184	for {
17185		if v_0.Op != OpARM64MOVDconst {
17186			break
17187		}
17188		c := auxIntToInt64(v_0.AuxInt)
17189		if v_1.Op != OpARM64MOVDconst {
17190			break
17191		}
17192		d := auxIntToInt64(v_1.AuxInt)
17193		if !(d != 0) {
17194			break
17195		}
17196		v.reset(OpARM64MOVDconst)
17197		v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
17198		return true
17199	}
17200	return false
17201}
17202func rewriteValueARM64_OpARM64UMODW(v *Value) bool {
17203	v_1 := v.Args[1]
17204	v_0 := v.Args[0]
17205	b := v.Block
17206	typ := &b.Func.Config.Types
17207	// match: (UMODW <typ.UInt32> x y)
17208	// result: (MSUBW <typ.UInt32> x y (UDIVW <typ.UInt32> x y))
17209	for {
17210		if v.Type != typ.UInt32 {
17211			break
17212		}
17213		x := v_0
17214		y := v_1
17215		v.reset(OpARM64MSUBW)
17216		v.Type = typ.UInt32
17217		v0 := b.NewValue0(v.Pos, OpARM64UDIVW, typ.UInt32)
17218		v0.AddArg2(x, y)
17219		v.AddArg3(x, y, v0)
17220		return true
17221	}
17222	// match: (UMODW _ (MOVDconst [c]))
17223	// cond: uint32(c)==1
17224	// result: (MOVDconst [0])
17225	for {
17226		if v_1.Op != OpARM64MOVDconst {
17227			break
17228		}
17229		c := auxIntToInt64(v_1.AuxInt)
17230		if !(uint32(c) == 1) {
17231			break
17232		}
17233		v.reset(OpARM64MOVDconst)
17234		v.AuxInt = int64ToAuxInt(0)
17235		return true
17236	}
17237	// match: (UMODW x (MOVDconst [c]))
17238	// cond: isPowerOfTwo64(c) && is32Bit(c)
17239	// result: (ANDconst [c-1] x)
17240	for {
17241		x := v_0
17242		if v_1.Op != OpARM64MOVDconst {
17243			break
17244		}
17245		c := auxIntToInt64(v_1.AuxInt)
17246		if !(isPowerOfTwo64(c) && is32Bit(c)) {
17247			break
17248		}
17249		v.reset(OpARM64ANDconst)
17250		v.AuxInt = int64ToAuxInt(c - 1)
17251		v.AddArg(x)
17252		return true
17253	}
17254	// match: (UMODW (MOVDconst [c]) (MOVDconst [d]))
17255	// cond: d != 0
17256	// result: (MOVDconst [int64(uint32(c)%uint32(d))])
17257	for {
17258		if v_0.Op != OpARM64MOVDconst {
17259			break
17260		}
17261		c := auxIntToInt64(v_0.AuxInt)
17262		if v_1.Op != OpARM64MOVDconst {
17263			break
17264		}
17265		d := auxIntToInt64(v_1.AuxInt)
17266		if !(d != 0) {
17267			break
17268		}
17269		v.reset(OpARM64MOVDconst)
17270		v.AuxInt = int64ToAuxInt(int64(uint32(c) % uint32(d)))
17271		return true
17272	}
17273	return false
17274}
17275func rewriteValueARM64_OpARM64XOR(v *Value) bool {
17276	v_1 := v.Args[1]
17277	v_0 := v.Args[0]
17278	// match: (XOR x (MOVDconst [c]))
17279	// result: (XORconst [c] x)
17280	for {
17281		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17282			x := v_0
17283			if v_1.Op != OpARM64MOVDconst {
17284				continue
17285			}
17286			c := auxIntToInt64(v_1.AuxInt)
17287			v.reset(OpARM64XORconst)
17288			v.AuxInt = int64ToAuxInt(c)
17289			v.AddArg(x)
17290			return true
17291		}
17292		break
17293	}
17294	// match: (XOR x x)
17295	// result: (MOVDconst [0])
17296	for {
17297		x := v_0
17298		if x != v_1 {
17299			break
17300		}
17301		v.reset(OpARM64MOVDconst)
17302		v.AuxInt = int64ToAuxInt(0)
17303		return true
17304	}
17305	// match: (XOR x (MVN y))
17306	// result: (EON x y)
17307	for {
17308		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17309			x := v_0
17310			if v_1.Op != OpARM64MVN {
17311				continue
17312			}
17313			y := v_1.Args[0]
17314			v.reset(OpARM64EON)
17315			v.AddArg2(x, y)
17316			return true
17317		}
17318		break
17319	}
17320	// match: (XOR x0 x1:(SLLconst [c] y))
17321	// cond: clobberIfDead(x1)
17322	// result: (XORshiftLL x0 y [c])
17323	for {
17324		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17325			x0 := v_0
17326			x1 := v_1
17327			if x1.Op != OpARM64SLLconst {
17328				continue
17329			}
17330			c := auxIntToInt64(x1.AuxInt)
17331			y := x1.Args[0]
17332			if !(clobberIfDead(x1)) {
17333				continue
17334			}
17335			v.reset(OpARM64XORshiftLL)
17336			v.AuxInt = int64ToAuxInt(c)
17337			v.AddArg2(x0, y)
17338			return true
17339		}
17340		break
17341	}
17342	// match: (XOR x0 x1:(SRLconst [c] y))
17343	// cond: clobberIfDead(x1)
17344	// result: (XORshiftRL x0 y [c])
17345	for {
17346		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17347			x0 := v_0
17348			x1 := v_1
17349			if x1.Op != OpARM64SRLconst {
17350				continue
17351			}
17352			c := auxIntToInt64(x1.AuxInt)
17353			y := x1.Args[0]
17354			if !(clobberIfDead(x1)) {
17355				continue
17356			}
17357			v.reset(OpARM64XORshiftRL)
17358			v.AuxInt = int64ToAuxInt(c)
17359			v.AddArg2(x0, y)
17360			return true
17361		}
17362		break
17363	}
17364	// match: (XOR x0 x1:(SRAconst [c] y))
17365	// cond: clobberIfDead(x1)
17366	// result: (XORshiftRA x0 y [c])
17367	for {
17368		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17369			x0 := v_0
17370			x1 := v_1
17371			if x1.Op != OpARM64SRAconst {
17372				continue
17373			}
17374			c := auxIntToInt64(x1.AuxInt)
17375			y := x1.Args[0]
17376			if !(clobberIfDead(x1)) {
17377				continue
17378			}
17379			v.reset(OpARM64XORshiftRA)
17380			v.AuxInt = int64ToAuxInt(c)
17381			v.AddArg2(x0, y)
17382			return true
17383		}
17384		break
17385	}
17386	// match: (XOR x0 x1:(RORconst [c] y))
17387	// cond: clobberIfDead(x1)
17388	// result: (XORshiftRO x0 y [c])
17389	for {
17390		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17391			x0 := v_0
17392			x1 := v_1
17393			if x1.Op != OpARM64RORconst {
17394				continue
17395			}
17396			c := auxIntToInt64(x1.AuxInt)
17397			y := x1.Args[0]
17398			if !(clobberIfDead(x1)) {
17399				continue
17400			}
17401			v.reset(OpARM64XORshiftRO)
17402			v.AuxInt = int64ToAuxInt(c)
17403			v.AddArg2(x0, y)
17404			return true
17405		}
17406		break
17407	}
17408	return false
17409}
17410func rewriteValueARM64_OpARM64XORconst(v *Value) bool {
17411	v_0 := v.Args[0]
17412	// match: (XORconst [0] x)
17413	// result: x
17414	for {
17415		if auxIntToInt64(v.AuxInt) != 0 {
17416			break
17417		}
17418		x := v_0
17419		v.copyOf(x)
17420		return true
17421	}
17422	// match: (XORconst [-1] x)
17423	// result: (MVN x)
17424	for {
17425		if auxIntToInt64(v.AuxInt) != -1 {
17426			break
17427		}
17428		x := v_0
17429		v.reset(OpARM64MVN)
17430		v.AddArg(x)
17431		return true
17432	}
17433	// match: (XORconst [c] (MOVDconst [d]))
17434	// result: (MOVDconst [c^d])
17435	for {
17436		c := auxIntToInt64(v.AuxInt)
17437		if v_0.Op != OpARM64MOVDconst {
17438			break
17439		}
17440		d := auxIntToInt64(v_0.AuxInt)
17441		v.reset(OpARM64MOVDconst)
17442		v.AuxInt = int64ToAuxInt(c ^ d)
17443		return true
17444	}
17445	// match: (XORconst [c] (XORconst [d] x))
17446	// result: (XORconst [c^d] x)
17447	for {
17448		c := auxIntToInt64(v.AuxInt)
17449		if v_0.Op != OpARM64XORconst {
17450			break
17451		}
17452		d := auxIntToInt64(v_0.AuxInt)
17453		x := v_0.Args[0]
17454		v.reset(OpARM64XORconst)
17455		v.AuxInt = int64ToAuxInt(c ^ d)
17456		v.AddArg(x)
17457		return true
17458	}
17459	return false
17460}
17461func rewriteValueARM64_OpARM64XORshiftLL(v *Value) bool {
17462	v_1 := v.Args[1]
17463	v_0 := v.Args[0]
17464	b := v.Block
17465	typ := &b.Func.Config.Types
17466	// match: (XORshiftLL (MOVDconst [c]) x [d])
17467	// result: (XORconst [c] (SLLconst <x.Type> x [d]))
17468	for {
17469		d := auxIntToInt64(v.AuxInt)
17470		if v_0.Op != OpARM64MOVDconst {
17471			break
17472		}
17473		c := auxIntToInt64(v_0.AuxInt)
17474		x := v_1
17475		v.reset(OpARM64XORconst)
17476		v.AuxInt = int64ToAuxInt(c)
17477		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
17478		v0.AuxInt = int64ToAuxInt(d)
17479		v0.AddArg(x)
17480		v.AddArg(v0)
17481		return true
17482	}
17483	// match: (XORshiftLL x (MOVDconst [c]) [d])
17484	// result: (XORconst x [int64(uint64(c)<<uint64(d))])
17485	for {
17486		d := auxIntToInt64(v.AuxInt)
17487		x := v_0
17488		if v_1.Op != OpARM64MOVDconst {
17489			break
17490		}
17491		c := auxIntToInt64(v_1.AuxInt)
17492		v.reset(OpARM64XORconst)
17493		v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
17494		v.AddArg(x)
17495		return true
17496	}
17497	// match: (XORshiftLL (SLLconst x [c]) x [c])
17498	// result: (MOVDconst [0])
17499	for {
17500		c := auxIntToInt64(v.AuxInt)
17501		if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
17502			break
17503		}
17504		x := v_0.Args[0]
17505		if x != v_1 {
17506			break
17507		}
17508		v.reset(OpARM64MOVDconst)
17509		v.AuxInt = int64ToAuxInt(0)
17510		return true
17511	}
17512	// match: (XORshiftLL <typ.UInt16> [8] (UBFX <typ.UInt16> [armBFAuxInt(8, 8)] x) x)
17513	// result: (REV16W x)
17514	for {
17515		if v.Type != typ.UInt16 || auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || v_0.Type != typ.UInt16 || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 8) {
17516			break
17517		}
17518		x := v_0.Args[0]
17519		if x != v_1 {
17520			break
17521		}
17522		v.reset(OpARM64REV16W)
17523		v.AddArg(x)
17524		return true
17525	}
17526	// match: (XORshiftLL [8] (UBFX [armBFAuxInt(8, 24)] (ANDconst [c1] x)) (ANDconst [c2] x))
17527	// cond: uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff
17528	// result: (REV16W x)
17529	for {
17530		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 24) {
17531			break
17532		}
17533		v_0_0 := v_0.Args[0]
17534		if v_0_0.Op != OpARM64ANDconst {
17535			break
17536		}
17537		c1 := auxIntToInt64(v_0_0.AuxInt)
17538		x := v_0_0.Args[0]
17539		if v_1.Op != OpARM64ANDconst {
17540			break
17541		}
17542		c2 := auxIntToInt64(v_1.AuxInt)
17543		if x != v_1.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) {
17544			break
17545		}
17546		v.reset(OpARM64REV16W)
17547		v.AddArg(x)
17548		return true
17549	}
17550	// match: (XORshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x))
17551	// cond: (uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff)
17552	// result: (REV16 x)
17553	for {
17554		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
17555			break
17556		}
17557		v_0_0 := v_0.Args[0]
17558		if v_0_0.Op != OpARM64ANDconst {
17559			break
17560		}
17561		c1 := auxIntToInt64(v_0_0.AuxInt)
17562		x := v_0_0.Args[0]
17563		if v_1.Op != OpARM64ANDconst {
17564			break
17565		}
17566		c2 := auxIntToInt64(v_1.AuxInt)
17567		if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) {
17568			break
17569		}
17570		v.reset(OpARM64REV16)
17571		v.AddArg(x)
17572		return true
17573	}
17574	// match: (XORshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x))
17575	// cond: (uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff)
17576	// result: (REV16 (ANDconst <x.Type> [0xffffffff] x))
17577	for {
17578		if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
17579			break
17580		}
17581		v_0_0 := v_0.Args[0]
17582		if v_0_0.Op != OpARM64ANDconst {
17583			break
17584		}
17585		c1 := auxIntToInt64(v_0_0.AuxInt)
17586		x := v_0_0.Args[0]
17587		if v_1.Op != OpARM64ANDconst {
17588			break
17589		}
17590		c2 := auxIntToInt64(v_1.AuxInt)
17591		if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) {
17592			break
17593		}
17594		v.reset(OpARM64REV16)
17595		v0 := b.NewValue0(v.Pos, OpARM64ANDconst, x.Type)
17596		v0.AuxInt = int64ToAuxInt(0xffffffff)
17597		v0.AddArg(x)
17598		v.AddArg(v0)
17599		return true
17600	}
17601	// match: (XORshiftLL [c] (SRLconst x [64-c]) x2)
17602	// result: (EXTRconst [64-c] x2 x)
17603	for {
17604		c := auxIntToInt64(v.AuxInt)
17605		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 64-c {
17606			break
17607		}
17608		x := v_0.Args[0]
17609		x2 := v_1
17610		v.reset(OpARM64EXTRconst)
17611		v.AuxInt = int64ToAuxInt(64 - c)
17612		v.AddArg2(x2, x)
17613		return true
17614	}
17615	// match: (XORshiftLL <t> [c] (UBFX [bfc] x) x2)
17616	// cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)
17617	// result: (EXTRWconst [32-c] x2 x)
17618	for {
17619		t := v.Type
17620		c := auxIntToInt64(v.AuxInt)
17621		if v_0.Op != OpARM64UBFX {
17622			break
17623		}
17624		bfc := auxIntToArm64BitField(v_0.AuxInt)
17625		x := v_0.Args[0]
17626		x2 := v_1
17627		if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
17628			break
17629		}
17630		v.reset(OpARM64EXTRWconst)
17631		v.AuxInt = int64ToAuxInt(32 - c)
17632		v.AddArg2(x2, x)
17633		return true
17634	}
17635	return false
17636}
17637func rewriteValueARM64_OpARM64XORshiftRA(v *Value) bool {
17638	v_1 := v.Args[1]
17639	v_0 := v.Args[0]
17640	b := v.Block
17641	// match: (XORshiftRA (MOVDconst [c]) x [d])
17642	// result: (XORconst [c] (SRAconst <x.Type> x [d]))
17643	for {
17644		d := auxIntToInt64(v.AuxInt)
17645		if v_0.Op != OpARM64MOVDconst {
17646			break
17647		}
17648		c := auxIntToInt64(v_0.AuxInt)
17649		x := v_1
17650		v.reset(OpARM64XORconst)
17651		v.AuxInt = int64ToAuxInt(c)
17652		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
17653		v0.AuxInt = int64ToAuxInt(d)
17654		v0.AddArg(x)
17655		v.AddArg(v0)
17656		return true
17657	}
17658	// match: (XORshiftRA x (MOVDconst [c]) [d])
17659	// result: (XORconst x [c>>uint64(d)])
17660	for {
17661		d := auxIntToInt64(v.AuxInt)
17662		x := v_0
17663		if v_1.Op != OpARM64MOVDconst {
17664			break
17665		}
17666		c := auxIntToInt64(v_1.AuxInt)
17667		v.reset(OpARM64XORconst)
17668		v.AuxInt = int64ToAuxInt(c >> uint64(d))
17669		v.AddArg(x)
17670		return true
17671	}
17672	// match: (XORshiftRA (SRAconst x [c]) x [c])
17673	// result: (MOVDconst [0])
17674	for {
17675		c := auxIntToInt64(v.AuxInt)
17676		if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c {
17677			break
17678		}
17679		x := v_0.Args[0]
17680		if x != v_1 {
17681			break
17682		}
17683		v.reset(OpARM64MOVDconst)
17684		v.AuxInt = int64ToAuxInt(0)
17685		return true
17686	}
17687	return false
17688}
17689func rewriteValueARM64_OpARM64XORshiftRL(v *Value) bool {
17690	v_1 := v.Args[1]
17691	v_0 := v.Args[0]
17692	b := v.Block
17693	// match: (XORshiftRL (MOVDconst [c]) x [d])
17694	// result: (XORconst [c] (SRLconst <x.Type> x [d]))
17695	for {
17696		d := auxIntToInt64(v.AuxInt)
17697		if v_0.Op != OpARM64MOVDconst {
17698			break
17699		}
17700		c := auxIntToInt64(v_0.AuxInt)
17701		x := v_1
17702		v.reset(OpARM64XORconst)
17703		v.AuxInt = int64ToAuxInt(c)
17704		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
17705		v0.AuxInt = int64ToAuxInt(d)
17706		v0.AddArg(x)
17707		v.AddArg(v0)
17708		return true
17709	}
17710	// match: (XORshiftRL x (MOVDconst [c]) [d])
17711	// result: (XORconst x [int64(uint64(c)>>uint64(d))])
17712	for {
17713		d := auxIntToInt64(v.AuxInt)
17714		x := v_0
17715		if v_1.Op != OpARM64MOVDconst {
17716			break
17717		}
17718		c := auxIntToInt64(v_1.AuxInt)
17719		v.reset(OpARM64XORconst)
17720		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
17721		v.AddArg(x)
17722		return true
17723	}
17724	// match: (XORshiftRL (SRLconst x [c]) x [c])
17725	// result: (MOVDconst [0])
17726	for {
17727		c := auxIntToInt64(v.AuxInt)
17728		if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
17729			break
17730		}
17731		x := v_0.Args[0]
17732		if x != v_1 {
17733			break
17734		}
17735		v.reset(OpARM64MOVDconst)
17736		v.AuxInt = int64ToAuxInt(0)
17737		return true
17738	}
17739	return false
17740}
17741func rewriteValueARM64_OpARM64XORshiftRO(v *Value) bool {
17742	v_1 := v.Args[1]
17743	v_0 := v.Args[0]
17744	b := v.Block
17745	// match: (XORshiftRO (MOVDconst [c]) x [d])
17746	// result: (XORconst [c] (RORconst <x.Type> x [d]))
17747	for {
17748		d := auxIntToInt64(v.AuxInt)
17749		if v_0.Op != OpARM64MOVDconst {
17750			break
17751		}
17752		c := auxIntToInt64(v_0.AuxInt)
17753		x := v_1
17754		v.reset(OpARM64XORconst)
17755		v.AuxInt = int64ToAuxInt(c)
17756		v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type)
17757		v0.AuxInt = int64ToAuxInt(d)
17758		v0.AddArg(x)
17759		v.AddArg(v0)
17760		return true
17761	}
17762	// match: (XORshiftRO x (MOVDconst [c]) [d])
17763	// result: (XORconst x [rotateRight64(c, d)])
17764	for {
17765		d := auxIntToInt64(v.AuxInt)
17766		x := v_0
17767		if v_1.Op != OpARM64MOVDconst {
17768			break
17769		}
17770		c := auxIntToInt64(v_1.AuxInt)
17771		v.reset(OpARM64XORconst)
17772		v.AuxInt = int64ToAuxInt(rotateRight64(c, d))
17773		v.AddArg(x)
17774		return true
17775	}
17776	// match: (XORshiftRO (RORconst x [c]) x [c])
17777	// result: (MOVDconst [0])
17778	for {
17779		c := auxIntToInt64(v.AuxInt)
17780		if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c {
17781			break
17782		}
17783		x := v_0.Args[0]
17784		if x != v_1 {
17785			break
17786		}
17787		v.reset(OpARM64MOVDconst)
17788		v.AuxInt = int64ToAuxInt(0)
17789		return true
17790	}
17791	return false
17792}
17793func rewriteValueARM64_OpAddr(v *Value) bool {
17794	v_0 := v.Args[0]
17795	// match: (Addr {sym} base)
17796	// result: (MOVDaddr {sym} base)
17797	for {
17798		sym := auxToSym(v.Aux)
17799		base := v_0
17800		v.reset(OpARM64MOVDaddr)
17801		v.Aux = symToAux(sym)
17802		v.AddArg(base)
17803		return true
17804	}
17805}
17806func rewriteValueARM64_OpAvg64u(v *Value) bool {
17807	v_1 := v.Args[1]
17808	v_0 := v.Args[0]
17809	b := v.Block
17810	// match: (Avg64u <t> x y)
17811	// result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y)
17812	for {
17813		t := v.Type
17814		x := v_0
17815		y := v_1
17816		v.reset(OpARM64ADD)
17817		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, t)
17818		v0.AuxInt = int64ToAuxInt(1)
17819		v1 := b.NewValue0(v.Pos, OpARM64SUB, t)
17820		v1.AddArg2(x, y)
17821		v0.AddArg(v1)
17822		v.AddArg2(v0, y)
17823		return true
17824	}
17825}
17826func rewriteValueARM64_OpBitLen32(v *Value) bool {
17827	v_0 := v.Args[0]
17828	b := v.Block
17829	typ := &b.Func.Config.Types
17830	// match: (BitLen32 x)
17831	// result: (SUB (MOVDconst [32]) (CLZW <typ.Int> x))
17832	for {
17833		x := v_0
17834		v.reset(OpARM64SUB)
17835		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
17836		v0.AuxInt = int64ToAuxInt(32)
17837		v1 := b.NewValue0(v.Pos, OpARM64CLZW, typ.Int)
17838		v1.AddArg(x)
17839		v.AddArg2(v0, v1)
17840		return true
17841	}
17842}
17843func rewriteValueARM64_OpBitLen64(v *Value) bool {
17844	v_0 := v.Args[0]
17845	b := v.Block
17846	typ := &b.Func.Config.Types
17847	// match: (BitLen64 x)
17848	// result: (SUB (MOVDconst [64]) (CLZ <typ.Int> x))
17849	for {
17850		x := v_0
17851		v.reset(OpARM64SUB)
17852		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
17853		v0.AuxInt = int64ToAuxInt(64)
17854		v1 := b.NewValue0(v.Pos, OpARM64CLZ, typ.Int)
17855		v1.AddArg(x)
17856		v.AddArg2(v0, v1)
17857		return true
17858	}
17859}
17860func rewriteValueARM64_OpBitRev16(v *Value) bool {
17861	v_0 := v.Args[0]
17862	b := v.Block
17863	typ := &b.Func.Config.Types
17864	// match: (BitRev16 x)
17865	// result: (SRLconst [48] (RBIT <typ.UInt64> x))
17866	for {
17867		x := v_0
17868		v.reset(OpARM64SRLconst)
17869		v.AuxInt = int64ToAuxInt(48)
17870		v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64)
17871		v0.AddArg(x)
17872		v.AddArg(v0)
17873		return true
17874	}
17875}
17876func rewriteValueARM64_OpBitRev8(v *Value) bool {
17877	v_0 := v.Args[0]
17878	b := v.Block
17879	typ := &b.Func.Config.Types
17880	// match: (BitRev8 x)
17881	// result: (SRLconst [56] (RBIT <typ.UInt64> x))
17882	for {
17883		x := v_0
17884		v.reset(OpARM64SRLconst)
17885		v.AuxInt = int64ToAuxInt(56)
17886		v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64)
17887		v0.AddArg(x)
17888		v.AddArg(v0)
17889		return true
17890	}
17891}
17892func rewriteValueARM64_OpCondSelect(v *Value) bool {
17893	v_2 := v.Args[2]
17894	v_1 := v.Args[1]
17895	v_0 := v.Args[0]
17896	b := v.Block
17897	// match: (CondSelect x y boolval)
17898	// cond: flagArg(boolval) != nil
17899	// result: (CSEL [boolval.Op] x y flagArg(boolval))
17900	for {
17901		x := v_0
17902		y := v_1
17903		boolval := v_2
17904		if !(flagArg(boolval) != nil) {
17905			break
17906		}
17907		v.reset(OpARM64CSEL)
17908		v.AuxInt = opToAuxInt(boolval.Op)
17909		v.AddArg3(x, y, flagArg(boolval))
17910		return true
17911	}
17912	// match: (CondSelect x y boolval)
17913	// cond: flagArg(boolval) == nil
17914	// result: (CSEL [OpARM64NotEqual] x y (TSTWconst [1] boolval))
17915	for {
17916		x := v_0
17917		y := v_1
17918		boolval := v_2
17919		if !(flagArg(boolval) == nil) {
17920			break
17921		}
17922		v.reset(OpARM64CSEL)
17923		v.AuxInt = opToAuxInt(OpARM64NotEqual)
17924		v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
17925		v0.AuxInt = int32ToAuxInt(1)
17926		v0.AddArg(boolval)
17927		v.AddArg3(x, y, v0)
17928		return true
17929	}
17930	return false
17931}
17932func rewriteValueARM64_OpConst16(v *Value) bool {
17933	// match: (Const16 [val])
17934	// result: (MOVDconst [int64(val)])
17935	for {
17936		val := auxIntToInt16(v.AuxInt)
17937		v.reset(OpARM64MOVDconst)
17938		v.AuxInt = int64ToAuxInt(int64(val))
17939		return true
17940	}
17941}
17942func rewriteValueARM64_OpConst32(v *Value) bool {
17943	// match: (Const32 [val])
17944	// result: (MOVDconst [int64(val)])
17945	for {
17946		val := auxIntToInt32(v.AuxInt)
17947		v.reset(OpARM64MOVDconst)
17948		v.AuxInt = int64ToAuxInt(int64(val))
17949		return true
17950	}
17951}
17952func rewriteValueARM64_OpConst32F(v *Value) bool {
17953	// match: (Const32F [val])
17954	// result: (FMOVSconst [float64(val)])
17955	for {
17956		val := auxIntToFloat32(v.AuxInt)
17957		v.reset(OpARM64FMOVSconst)
17958		v.AuxInt = float64ToAuxInt(float64(val))
17959		return true
17960	}
17961}
17962func rewriteValueARM64_OpConst64(v *Value) bool {
17963	// match: (Const64 [val])
17964	// result: (MOVDconst [int64(val)])
17965	for {
17966		val := auxIntToInt64(v.AuxInt)
17967		v.reset(OpARM64MOVDconst)
17968		v.AuxInt = int64ToAuxInt(int64(val))
17969		return true
17970	}
17971}
17972func rewriteValueARM64_OpConst64F(v *Value) bool {
17973	// match: (Const64F [val])
17974	// result: (FMOVDconst [float64(val)])
17975	for {
17976		val := auxIntToFloat64(v.AuxInt)
17977		v.reset(OpARM64FMOVDconst)
17978		v.AuxInt = float64ToAuxInt(float64(val))
17979		return true
17980	}
17981}
17982func rewriteValueARM64_OpConst8(v *Value) bool {
17983	// match: (Const8 [val])
17984	// result: (MOVDconst [int64(val)])
17985	for {
17986		val := auxIntToInt8(v.AuxInt)
17987		v.reset(OpARM64MOVDconst)
17988		v.AuxInt = int64ToAuxInt(int64(val))
17989		return true
17990	}
17991}
17992func rewriteValueARM64_OpConstBool(v *Value) bool {
17993	// match: (ConstBool [t])
17994	// result: (MOVDconst [b2i(t)])
17995	for {
17996		t := auxIntToBool(v.AuxInt)
17997		v.reset(OpARM64MOVDconst)
17998		v.AuxInt = int64ToAuxInt(b2i(t))
17999		return true
18000	}
18001}
18002func rewriteValueARM64_OpConstNil(v *Value) bool {
18003	// match: (ConstNil)
18004	// result: (MOVDconst [0])
18005	for {
18006		v.reset(OpARM64MOVDconst)
18007		v.AuxInt = int64ToAuxInt(0)
18008		return true
18009	}
18010}
18011func rewriteValueARM64_OpCtz16(v *Value) bool {
18012	v_0 := v.Args[0]
18013	b := v.Block
18014	typ := &b.Func.Config.Types
18015	// match: (Ctz16 <t> x)
18016	// result: (CLZW <t> (RBITW <typ.UInt32> (ORconst <typ.UInt32> [0x10000] x)))
18017	for {
18018		t := v.Type
18019		x := v_0
18020		v.reset(OpARM64CLZW)
18021		v.Type = t
18022		v0 := b.NewValue0(v.Pos, OpARM64RBITW, typ.UInt32)
18023		v1 := b.NewValue0(v.Pos, OpARM64ORconst, typ.UInt32)
18024		v1.AuxInt = int64ToAuxInt(0x10000)
18025		v1.AddArg(x)
18026		v0.AddArg(v1)
18027		v.AddArg(v0)
18028		return true
18029	}
18030}
18031func rewriteValueARM64_OpCtz32(v *Value) bool {
18032	v_0 := v.Args[0]
18033	b := v.Block
18034	// match: (Ctz32 <t> x)
18035	// result: (CLZW (RBITW <t> x))
18036	for {
18037		t := v.Type
18038		x := v_0
18039		v.reset(OpARM64CLZW)
18040		v0 := b.NewValue0(v.Pos, OpARM64RBITW, t)
18041		v0.AddArg(x)
18042		v.AddArg(v0)
18043		return true
18044	}
18045}
18046func rewriteValueARM64_OpCtz64(v *Value) bool {
18047	v_0 := v.Args[0]
18048	b := v.Block
18049	// match: (Ctz64 <t> x)
18050	// result: (CLZ (RBIT <t> x))
18051	for {
18052		t := v.Type
18053		x := v_0
18054		v.reset(OpARM64CLZ)
18055		v0 := b.NewValue0(v.Pos, OpARM64RBIT, t)
18056		v0.AddArg(x)
18057		v.AddArg(v0)
18058		return true
18059	}
18060}
18061func rewriteValueARM64_OpCtz8(v *Value) bool {
18062	v_0 := v.Args[0]
18063	b := v.Block
18064	typ := &b.Func.Config.Types
18065	// match: (Ctz8 <t> x)
18066	// result: (CLZW <t> (RBITW <typ.UInt32> (ORconst <typ.UInt32> [0x100] x)))
18067	for {
18068		t := v.Type
18069		x := v_0
18070		v.reset(OpARM64CLZW)
18071		v.Type = t
18072		v0 := b.NewValue0(v.Pos, OpARM64RBITW, typ.UInt32)
18073		v1 := b.NewValue0(v.Pos, OpARM64ORconst, typ.UInt32)
18074		v1.AuxInt = int64ToAuxInt(0x100)
18075		v1.AddArg(x)
18076		v0.AddArg(v1)
18077		v.AddArg(v0)
18078		return true
18079	}
18080}
18081func rewriteValueARM64_OpDiv16(v *Value) bool {
18082	v_1 := v.Args[1]
18083	v_0 := v.Args[0]
18084	b := v.Block
18085	typ := &b.Func.Config.Types
18086	// match: (Div16 [false] x y)
18087	// result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
18088	for {
18089		if auxIntToBool(v.AuxInt) != false {
18090			break
18091		}
18092		x := v_0
18093		y := v_1
18094		v.reset(OpARM64DIVW)
18095		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
18096		v0.AddArg(x)
18097		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
18098		v1.AddArg(y)
18099		v.AddArg2(v0, v1)
18100		return true
18101	}
18102	return false
18103}
18104func rewriteValueARM64_OpDiv16u(v *Value) bool {
18105	v_1 := v.Args[1]
18106	v_0 := v.Args[0]
18107	b := v.Block
18108	typ := &b.Func.Config.Types
18109	// match: (Div16u x y)
18110	// result: (UDIVW (ZeroExt16to32 x) (ZeroExt16to32 y))
18111	for {
18112		x := v_0
18113		y := v_1
18114		v.reset(OpARM64UDIVW)
18115		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
18116		v0.AddArg(x)
18117		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
18118		v1.AddArg(y)
18119		v.AddArg2(v0, v1)
18120		return true
18121	}
18122}
18123func rewriteValueARM64_OpDiv32(v *Value) bool {
18124	v_1 := v.Args[1]
18125	v_0 := v.Args[0]
18126	// match: (Div32 [false] x y)
18127	// result: (DIVW x y)
18128	for {
18129		if auxIntToBool(v.AuxInt) != false {
18130			break
18131		}
18132		x := v_0
18133		y := v_1
18134		v.reset(OpARM64DIVW)
18135		v.AddArg2(x, y)
18136		return true
18137	}
18138	return false
18139}
18140func rewriteValueARM64_OpDiv64(v *Value) bool {
18141	v_1 := v.Args[1]
18142	v_0 := v.Args[0]
18143	// match: (Div64 [false] x y)
18144	// result: (DIV x y)
18145	for {
18146		if auxIntToBool(v.AuxInt) != false {
18147			break
18148		}
18149		x := v_0
18150		y := v_1
18151		v.reset(OpARM64DIV)
18152		v.AddArg2(x, y)
18153		return true
18154	}
18155	return false
18156}
18157func rewriteValueARM64_OpDiv8(v *Value) bool {
18158	v_1 := v.Args[1]
18159	v_0 := v.Args[0]
18160	b := v.Block
18161	typ := &b.Func.Config.Types
18162	// match: (Div8 x y)
18163	// result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
18164	for {
18165		x := v_0
18166		y := v_1
18167		v.reset(OpARM64DIVW)
18168		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
18169		v0.AddArg(x)
18170		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
18171		v1.AddArg(y)
18172		v.AddArg2(v0, v1)
18173		return true
18174	}
18175}
18176func rewriteValueARM64_OpDiv8u(v *Value) bool {
18177	v_1 := v.Args[1]
18178	v_0 := v.Args[0]
18179	b := v.Block
18180	typ := &b.Func.Config.Types
18181	// match: (Div8u x y)
18182	// result: (UDIVW (ZeroExt8to32 x) (ZeroExt8to32 y))
18183	for {
18184		x := v_0
18185		y := v_1
18186		v.reset(OpARM64UDIVW)
18187		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
18188		v0.AddArg(x)
18189		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
18190		v1.AddArg(y)
18191		v.AddArg2(v0, v1)
18192		return true
18193	}
18194}
18195func rewriteValueARM64_OpEq16(v *Value) bool {
18196	v_1 := v.Args[1]
18197	v_0 := v.Args[0]
18198	b := v.Block
18199	typ := &b.Func.Config.Types
18200	// match: (Eq16 x y)
18201	// result: (Equal (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
18202	for {
18203		x := v_0
18204		y := v_1
18205		v.reset(OpARM64Equal)
18206		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
18207		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
18208		v1.AddArg(x)
18209		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
18210		v2.AddArg(y)
18211		v0.AddArg2(v1, v2)
18212		v.AddArg(v0)
18213		return true
18214	}
18215}
18216func rewriteValueARM64_OpEq32(v *Value) bool {
18217	v_1 := v.Args[1]
18218	v_0 := v.Args[0]
18219	b := v.Block
18220	// match: (Eq32 x y)
18221	// result: (Equal (CMPW x y))
18222	for {
18223		x := v_0
18224		y := v_1
18225		v.reset(OpARM64Equal)
18226		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
18227		v0.AddArg2(x, y)
18228		v.AddArg(v0)
18229		return true
18230	}
18231}
18232func rewriteValueARM64_OpEq32F(v *Value) bool {
18233	v_1 := v.Args[1]
18234	v_0 := v.Args[0]
18235	b := v.Block
18236	// match: (Eq32F x y)
18237	// result: (Equal (FCMPS x y))
18238	for {
18239		x := v_0
18240		y := v_1
18241		v.reset(OpARM64Equal)
18242		v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
18243		v0.AddArg2(x, y)
18244		v.AddArg(v0)
18245		return true
18246	}
18247}
18248func rewriteValueARM64_OpEq64(v *Value) bool {
18249	v_1 := v.Args[1]
18250	v_0 := v.Args[0]
18251	b := v.Block
18252	// match: (Eq64 x y)
18253	// result: (Equal (CMP x y))
18254	for {
18255		x := v_0
18256		y := v_1
18257		v.reset(OpARM64Equal)
18258		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
18259		v0.AddArg2(x, y)
18260		v.AddArg(v0)
18261		return true
18262	}
18263}
18264func rewriteValueARM64_OpEq64F(v *Value) bool {
18265	v_1 := v.Args[1]
18266	v_0 := v.Args[0]
18267	b := v.Block
18268	// match: (Eq64F x y)
18269	// result: (Equal (FCMPD x y))
18270	for {
18271		x := v_0
18272		y := v_1
18273		v.reset(OpARM64Equal)
18274		v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
18275		v0.AddArg2(x, y)
18276		v.AddArg(v0)
18277		return true
18278	}
18279}
18280func rewriteValueARM64_OpEq8(v *Value) bool {
18281	v_1 := v.Args[1]
18282	v_0 := v.Args[0]
18283	b := v.Block
18284	typ := &b.Func.Config.Types
18285	// match: (Eq8 x y)
18286	// result: (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
18287	for {
18288		x := v_0
18289		y := v_1
18290		v.reset(OpARM64Equal)
18291		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
18292		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
18293		v1.AddArg(x)
18294		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
18295		v2.AddArg(y)
18296		v0.AddArg2(v1, v2)
18297		v.AddArg(v0)
18298		return true
18299	}
18300}
18301func rewriteValueARM64_OpEqB(v *Value) bool {
18302	v_1 := v.Args[1]
18303	v_0 := v.Args[0]
18304	b := v.Block
18305	typ := &b.Func.Config.Types
18306	// match: (EqB x y)
18307	// result: (XOR (MOVDconst [1]) (XOR <typ.Bool> x y))
18308	for {
18309		x := v_0
18310		y := v_1
18311		v.reset(OpARM64XOR)
18312		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
18313		v0.AuxInt = int64ToAuxInt(1)
18314		v1 := b.NewValue0(v.Pos, OpARM64XOR, typ.Bool)
18315		v1.AddArg2(x, y)
18316		v.AddArg2(v0, v1)
18317		return true
18318	}
18319}
18320func rewriteValueARM64_OpEqPtr(v *Value) bool {
18321	v_1 := v.Args[1]
18322	v_0 := v.Args[0]
18323	b := v.Block
18324	// match: (EqPtr x y)
18325	// result: (Equal (CMP x y))
18326	for {
18327		x := v_0
18328		y := v_1
18329		v.reset(OpARM64Equal)
18330		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
18331		v0.AddArg2(x, y)
18332		v.AddArg(v0)
18333		return true
18334	}
18335}
18336func rewriteValueARM64_OpFMA(v *Value) bool {
18337	v_2 := v.Args[2]
18338	v_1 := v.Args[1]
18339	v_0 := v.Args[0]
18340	// match: (FMA x y z)
18341	// result: (FMADDD z x y)
18342	for {
18343		x := v_0
18344		y := v_1
18345		z := v_2
18346		v.reset(OpARM64FMADDD)
18347		v.AddArg3(z, x, y)
18348		return true
18349	}
18350}
18351func rewriteValueARM64_OpHmul32(v *Value) bool {
18352	v_1 := v.Args[1]
18353	v_0 := v.Args[0]
18354	b := v.Block
18355	typ := &b.Func.Config.Types
18356	// match: (Hmul32 x y)
18357	// result: (SRAconst (MULL <typ.Int64> x y) [32])
18358	for {
18359		x := v_0
18360		y := v_1
18361		v.reset(OpARM64SRAconst)
18362		v.AuxInt = int64ToAuxInt(32)
18363		v0 := b.NewValue0(v.Pos, OpARM64MULL, typ.Int64)
18364		v0.AddArg2(x, y)
18365		v.AddArg(v0)
18366		return true
18367	}
18368}
18369func rewriteValueARM64_OpHmul32u(v *Value) bool {
18370	v_1 := v.Args[1]
18371	v_0 := v.Args[0]
18372	b := v.Block
18373	typ := &b.Func.Config.Types
18374	// match: (Hmul32u x y)
18375	// result: (SRAconst (UMULL <typ.UInt64> x y) [32])
18376	for {
18377		x := v_0
18378		y := v_1
18379		v.reset(OpARM64SRAconst)
18380		v.AuxInt = int64ToAuxInt(32)
18381		v0 := b.NewValue0(v.Pos, OpARM64UMULL, typ.UInt64)
18382		v0.AddArg2(x, y)
18383		v.AddArg(v0)
18384		return true
18385	}
18386}
18387func rewriteValueARM64_OpIsInBounds(v *Value) bool {
18388	v_1 := v.Args[1]
18389	v_0 := v.Args[0]
18390	b := v.Block
18391	// match: (IsInBounds idx len)
18392	// result: (LessThanU (CMP idx len))
18393	for {
18394		idx := v_0
18395		len := v_1
18396		v.reset(OpARM64LessThanU)
18397		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
18398		v0.AddArg2(idx, len)
18399		v.AddArg(v0)
18400		return true
18401	}
18402}
18403func rewriteValueARM64_OpIsNonNil(v *Value) bool {
18404	v_0 := v.Args[0]
18405	b := v.Block
18406	// match: (IsNonNil ptr)
18407	// result: (NotEqual (CMPconst [0] ptr))
18408	for {
18409		ptr := v_0
18410		v.reset(OpARM64NotEqual)
18411		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
18412		v0.AuxInt = int64ToAuxInt(0)
18413		v0.AddArg(ptr)
18414		v.AddArg(v0)
18415		return true
18416	}
18417}
18418func rewriteValueARM64_OpIsSliceInBounds(v *Value) bool {
18419	v_1 := v.Args[1]
18420	v_0 := v.Args[0]
18421	b := v.Block
18422	// match: (IsSliceInBounds idx len)
18423	// result: (LessEqualU (CMP idx len))
18424	for {
18425		idx := v_0
18426		len := v_1
18427		v.reset(OpARM64LessEqualU)
18428		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
18429		v0.AddArg2(idx, len)
18430		v.AddArg(v0)
18431		return true
18432	}
18433}
18434func rewriteValueARM64_OpLeq16(v *Value) bool {
18435	v_1 := v.Args[1]
18436	v_0 := v.Args[0]
18437	b := v.Block
18438	typ := &b.Func.Config.Types
18439	// match: (Leq16 x y)
18440	// result: (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y)))
18441	for {
18442		x := v_0
18443		y := v_1
18444		v.reset(OpARM64LessEqual)
18445		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
18446		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
18447		v1.AddArg(x)
18448		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
18449		v2.AddArg(y)
18450		v0.AddArg2(v1, v2)
18451		v.AddArg(v0)
18452		return true
18453	}
18454}
18455func rewriteValueARM64_OpLeq16U(v *Value) bool {
18456	v_1 := v.Args[1]
18457	v_0 := v.Args[0]
18458	b := v.Block
18459	typ := &b.Func.Config.Types
18460	// match: (Leq16U x zero:(MOVDconst [0]))
18461	// result: (Eq16 x zero)
18462	for {
18463		x := v_0
18464		zero := v_1
18465		if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
18466			break
18467		}
18468		v.reset(OpEq16)
18469		v.AddArg2(x, zero)
18470		return true
18471	}
18472	// match: (Leq16U (MOVDconst [1]) x)
18473	// result: (Neq16 (MOVDconst [0]) x)
18474	for {
18475		if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 {
18476			break
18477		}
18478		x := v_1
18479		v.reset(OpNeq16)
18480		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
18481		v0.AuxInt = int64ToAuxInt(0)
18482		v.AddArg2(v0, x)
18483		return true
18484	}
18485	// match: (Leq16U x y)
18486	// result: (LessEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
18487	for {
18488		x := v_0
18489		y := v_1
18490		v.reset(OpARM64LessEqualU)
18491		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
18492		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
18493		v1.AddArg(x)
18494		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
18495		v2.AddArg(y)
18496		v0.AddArg2(v1, v2)
18497		v.AddArg(v0)
18498		return true
18499	}
18500}
18501func rewriteValueARM64_OpLeq32(v *Value) bool {
18502	v_1 := v.Args[1]
18503	v_0 := v.Args[0]
18504	b := v.Block
18505	// match: (Leq32 x y)
18506	// result: (LessEqual (CMPW x y))
18507	for {
18508		x := v_0
18509		y := v_1
18510		v.reset(OpARM64LessEqual)
18511		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
18512		v0.AddArg2(x, y)
18513		v.AddArg(v0)
18514		return true
18515	}
18516}
18517func rewriteValueARM64_OpLeq32F(v *Value) bool {
18518	v_1 := v.Args[1]
18519	v_0 := v.Args[0]
18520	b := v.Block
18521	// match: (Leq32F x y)
18522	// result: (LessEqualF (FCMPS x y))
18523	for {
18524		x := v_0
18525		y := v_1
18526		v.reset(OpARM64LessEqualF)
18527		v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
18528		v0.AddArg2(x, y)
18529		v.AddArg(v0)
18530		return true
18531	}
18532}
18533func rewriteValueARM64_OpLeq32U(v *Value) bool {
18534	v_1 := v.Args[1]
18535	v_0 := v.Args[0]
18536	b := v.Block
18537	typ := &b.Func.Config.Types
18538	// match: (Leq32U x zero:(MOVDconst [0]))
18539	// result: (Eq32 x zero)
18540	for {
18541		x := v_0
18542		zero := v_1
18543		if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
18544			break
18545		}
18546		v.reset(OpEq32)
18547		v.AddArg2(x, zero)
18548		return true
18549	}
18550	// match: (Leq32U (MOVDconst [1]) x)
18551	// result: (Neq32 (MOVDconst [0]) x)
18552	for {
18553		if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 {
18554			break
18555		}
18556		x := v_1
18557		v.reset(OpNeq32)
18558		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
18559		v0.AuxInt = int64ToAuxInt(0)
18560		v.AddArg2(v0, x)
18561		return true
18562	}
18563	// match: (Leq32U x y)
18564	// result: (LessEqualU (CMPW x y))
18565	for {
18566		x := v_0
18567		y := v_1
18568		v.reset(OpARM64LessEqualU)
18569		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
18570		v0.AddArg2(x, y)
18571		v.AddArg(v0)
18572		return true
18573	}
18574}
18575func rewriteValueARM64_OpLeq64(v *Value) bool {
18576	v_1 := v.Args[1]
18577	v_0 := v.Args[0]
18578	b := v.Block
18579	// match: (Leq64 x y)
18580	// result: (LessEqual (CMP x y))
18581	for {
18582		x := v_0
18583		y := v_1
18584		v.reset(OpARM64LessEqual)
18585		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
18586		v0.AddArg2(x, y)
18587		v.AddArg(v0)
18588		return true
18589	}
18590}
18591func rewriteValueARM64_OpLeq64F(v *Value) bool {
18592	v_1 := v.Args[1]
18593	v_0 := v.Args[0]
18594	b := v.Block
18595	// match: (Leq64F x y)
18596	// result: (LessEqualF (FCMPD x y))
18597	for {
18598		x := v_0
18599		y := v_1
18600		v.reset(OpARM64LessEqualF)
18601		v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
18602		v0.AddArg2(x, y)
18603		v.AddArg(v0)
18604		return true
18605	}
18606}
18607func rewriteValueARM64_OpLeq64U(v *Value) bool {
18608	v_1 := v.Args[1]
18609	v_0 := v.Args[0]
18610	b := v.Block
18611	typ := &b.Func.Config.Types
18612	// match: (Leq64U x zero:(MOVDconst [0]))
18613	// result: (Eq64 x zero)
18614	for {
18615		x := v_0
18616		zero := v_1
18617		if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
18618			break
18619		}
18620		v.reset(OpEq64)
18621		v.AddArg2(x, zero)
18622		return true
18623	}
18624	// match: (Leq64U (MOVDconst [1]) x)
18625	// result: (Neq64 (MOVDconst [0]) x)
18626	for {
18627		if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 {
18628			break
18629		}
18630		x := v_1
18631		v.reset(OpNeq64)
18632		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
18633		v0.AuxInt = int64ToAuxInt(0)
18634		v.AddArg2(v0, x)
18635		return true
18636	}
18637	// match: (Leq64U x y)
18638	// result: (LessEqualU (CMP x y))
18639	for {
18640		x := v_0
18641		y := v_1
18642		v.reset(OpARM64LessEqualU)
18643		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
18644		v0.AddArg2(x, y)
18645		v.AddArg(v0)
18646		return true
18647	}
18648}
18649func rewriteValueARM64_OpLeq8(v *Value) bool {
18650	v_1 := v.Args[1]
18651	v_0 := v.Args[0]
18652	b := v.Block
18653	typ := &b.Func.Config.Types
18654	// match: (Leq8 x y)
18655	// result: (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y)))
18656	for {
18657		x := v_0
18658		y := v_1
18659		v.reset(OpARM64LessEqual)
18660		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
18661		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
18662		v1.AddArg(x)
18663		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
18664		v2.AddArg(y)
18665		v0.AddArg2(v1, v2)
18666		v.AddArg(v0)
18667		return true
18668	}
18669}
18670func rewriteValueARM64_OpLeq8U(v *Value) bool {
18671	v_1 := v.Args[1]
18672	v_0 := v.Args[0]
18673	b := v.Block
18674	typ := &b.Func.Config.Types
18675	// match: (Leq8U x zero:(MOVDconst [0]))
18676	// result: (Eq8 x zero)
18677	for {
18678		x := v_0
18679		zero := v_1
18680		if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
18681			break
18682		}
18683		v.reset(OpEq8)
18684		v.AddArg2(x, zero)
18685		return true
18686	}
18687	// match: (Leq8U (MOVDconst [1]) x)
18688	// result: (Neq8 (MOVDconst [0]) x)
18689	for {
18690		if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 {
18691			break
18692		}
18693		x := v_1
18694		v.reset(OpNeq8)
18695		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
18696		v0.AuxInt = int64ToAuxInt(0)
18697		v.AddArg2(v0, x)
18698		return true
18699	}
18700	// match: (Leq8U x y)
18701	// result: (LessEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
18702	for {
18703		x := v_0
18704		y := v_1
18705		v.reset(OpARM64LessEqualU)
18706		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
18707		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
18708		v1.AddArg(x)
18709		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
18710		v2.AddArg(y)
18711		v0.AddArg2(v1, v2)
18712		v.AddArg(v0)
18713		return true
18714	}
18715}
18716func rewriteValueARM64_OpLess16(v *Value) bool {
18717	v_1 := v.Args[1]
18718	v_0 := v.Args[0]
18719	b := v.Block
18720	typ := &b.Func.Config.Types
18721	// match: (Less16 x y)
18722	// result: (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y)))
18723	for {
18724		x := v_0
18725		y := v_1
18726		v.reset(OpARM64LessThan)
18727		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
18728		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
18729		v1.AddArg(x)
18730		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
18731		v2.AddArg(y)
18732		v0.AddArg2(v1, v2)
18733		v.AddArg(v0)
18734		return true
18735	}
18736}
18737func rewriteValueARM64_OpLess16U(v *Value) bool {
18738	v_1 := v.Args[1]
18739	v_0 := v.Args[0]
18740	b := v.Block
18741	typ := &b.Func.Config.Types
18742	// match: (Less16U zero:(MOVDconst [0]) x)
18743	// result: (Neq16 zero x)
18744	for {
18745		zero := v_0
18746		if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
18747			break
18748		}
18749		x := v_1
18750		v.reset(OpNeq16)
18751		v.AddArg2(zero, x)
18752		return true
18753	}
18754	// match: (Less16U x (MOVDconst [1]))
18755	// result: (Eq16 x (MOVDconst [0]))
18756	for {
18757		x := v_0
18758		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
18759			break
18760		}
18761		v.reset(OpEq16)
18762		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
18763		v0.AuxInt = int64ToAuxInt(0)
18764		v.AddArg2(x, v0)
18765		return true
18766	}
18767	// match: (Less16U x y)
18768	// result: (LessThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
18769	for {
18770		x := v_0
18771		y := v_1
18772		v.reset(OpARM64LessThanU)
18773		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
18774		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
18775		v1.AddArg(x)
18776		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
18777		v2.AddArg(y)
18778		v0.AddArg2(v1, v2)
18779		v.AddArg(v0)
18780		return true
18781	}
18782}
18783func rewriteValueARM64_OpLess32(v *Value) bool {
18784	v_1 := v.Args[1]
18785	v_0 := v.Args[0]
18786	b := v.Block
18787	// match: (Less32 x y)
18788	// result: (LessThan (CMPW x y))
18789	for {
18790		x := v_0
18791		y := v_1
18792		v.reset(OpARM64LessThan)
18793		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
18794		v0.AddArg2(x, y)
18795		v.AddArg(v0)
18796		return true
18797	}
18798}
18799func rewriteValueARM64_OpLess32F(v *Value) bool {
18800	v_1 := v.Args[1]
18801	v_0 := v.Args[0]
18802	b := v.Block
18803	// match: (Less32F x y)
18804	// result: (LessThanF (FCMPS x y))
18805	for {
18806		x := v_0
18807		y := v_1
18808		v.reset(OpARM64LessThanF)
18809		v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
18810		v0.AddArg2(x, y)
18811		v.AddArg(v0)
18812		return true
18813	}
18814}
18815func rewriteValueARM64_OpLess32U(v *Value) bool {
18816	v_1 := v.Args[1]
18817	v_0 := v.Args[0]
18818	b := v.Block
18819	typ := &b.Func.Config.Types
18820	// match: (Less32U zero:(MOVDconst [0]) x)
18821	// result: (Neq32 zero x)
18822	for {
18823		zero := v_0
18824		if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
18825			break
18826		}
18827		x := v_1
18828		v.reset(OpNeq32)
18829		v.AddArg2(zero, x)
18830		return true
18831	}
18832	// match: (Less32U x (MOVDconst [1]))
18833	// result: (Eq32 x (MOVDconst [0]))
18834	for {
18835		x := v_0
18836		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
18837			break
18838		}
18839		v.reset(OpEq32)
18840		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
18841		v0.AuxInt = int64ToAuxInt(0)
18842		v.AddArg2(x, v0)
18843		return true
18844	}
18845	// match: (Less32U x y)
18846	// result: (LessThanU (CMPW x y))
18847	for {
18848		x := v_0
18849		y := v_1
18850		v.reset(OpARM64LessThanU)
18851		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
18852		v0.AddArg2(x, y)
18853		v.AddArg(v0)
18854		return true
18855	}
18856}
18857func rewriteValueARM64_OpLess64(v *Value) bool {
18858	v_1 := v.Args[1]
18859	v_0 := v.Args[0]
18860	b := v.Block
18861	// match: (Less64 x y)
18862	// result: (LessThan (CMP x y))
18863	for {
18864		x := v_0
18865		y := v_1
18866		v.reset(OpARM64LessThan)
18867		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
18868		v0.AddArg2(x, y)
18869		v.AddArg(v0)
18870		return true
18871	}
18872}
18873func rewriteValueARM64_OpLess64F(v *Value) bool {
18874	v_1 := v.Args[1]
18875	v_0 := v.Args[0]
18876	b := v.Block
18877	// match: (Less64F x y)
18878	// result: (LessThanF (FCMPD x y))
18879	for {
18880		x := v_0
18881		y := v_1
18882		v.reset(OpARM64LessThanF)
18883		v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
18884		v0.AddArg2(x, y)
18885		v.AddArg(v0)
18886		return true
18887	}
18888}
18889func rewriteValueARM64_OpLess64U(v *Value) bool {
18890	v_1 := v.Args[1]
18891	v_0 := v.Args[0]
18892	b := v.Block
18893	typ := &b.Func.Config.Types
18894	// match: (Less64U zero:(MOVDconst [0]) x)
18895	// result: (Neq64 zero x)
18896	for {
18897		zero := v_0
18898		if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
18899			break
18900		}
18901		x := v_1
18902		v.reset(OpNeq64)
18903		v.AddArg2(zero, x)
18904		return true
18905	}
18906	// match: (Less64U x (MOVDconst [1]))
18907	// result: (Eq64 x (MOVDconst [0]))
18908	for {
18909		x := v_0
18910		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
18911			break
18912		}
18913		v.reset(OpEq64)
18914		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
18915		v0.AuxInt = int64ToAuxInt(0)
18916		v.AddArg2(x, v0)
18917		return true
18918	}
18919	// match: (Less64U x y)
18920	// result: (LessThanU (CMP x y))
18921	for {
18922		x := v_0
18923		y := v_1
18924		v.reset(OpARM64LessThanU)
18925		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
18926		v0.AddArg2(x, y)
18927		v.AddArg(v0)
18928		return true
18929	}
18930}
18931func rewriteValueARM64_OpLess8(v *Value) bool {
18932	v_1 := v.Args[1]
18933	v_0 := v.Args[0]
18934	b := v.Block
18935	typ := &b.Func.Config.Types
18936	// match: (Less8 x y)
18937	// result: (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y)))
18938	for {
18939		x := v_0
18940		y := v_1
18941		v.reset(OpARM64LessThan)
18942		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
18943		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
18944		v1.AddArg(x)
18945		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
18946		v2.AddArg(y)
18947		v0.AddArg2(v1, v2)
18948		v.AddArg(v0)
18949		return true
18950	}
18951}
18952func rewriteValueARM64_OpLess8U(v *Value) bool {
18953	v_1 := v.Args[1]
18954	v_0 := v.Args[0]
18955	b := v.Block
18956	typ := &b.Func.Config.Types
18957	// match: (Less8U zero:(MOVDconst [0]) x)
18958	// result: (Neq8 zero x)
18959	for {
18960		zero := v_0
18961		if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
18962			break
18963		}
18964		x := v_1
18965		v.reset(OpNeq8)
18966		v.AddArg2(zero, x)
18967		return true
18968	}
18969	// match: (Less8U x (MOVDconst [1]))
18970	// result: (Eq8 x (MOVDconst [0]))
18971	for {
18972		x := v_0
18973		if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
18974			break
18975		}
18976		v.reset(OpEq8)
18977		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
18978		v0.AuxInt = int64ToAuxInt(0)
18979		v.AddArg2(x, v0)
18980		return true
18981	}
18982	// match: (Less8U x y)
18983	// result: (LessThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
18984	for {
18985		x := v_0
18986		y := v_1
18987		v.reset(OpARM64LessThanU)
18988		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
18989		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
18990		v1.AddArg(x)
18991		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
18992		v2.AddArg(y)
18993		v0.AddArg2(v1, v2)
18994		v.AddArg(v0)
18995		return true
18996	}
18997}
18998func rewriteValueARM64_OpLoad(v *Value) bool {
18999	v_1 := v.Args[1]
19000	v_0 := v.Args[0]
19001	// match: (Load <t> ptr mem)
19002	// cond: t.IsBoolean()
19003	// result: (MOVBUload ptr mem)
19004	for {
19005		t := v.Type
19006		ptr := v_0
19007		mem := v_1
19008		if !(t.IsBoolean()) {
19009			break
19010		}
19011		v.reset(OpARM64MOVBUload)
19012		v.AddArg2(ptr, mem)
19013		return true
19014	}
19015	// match: (Load <t> ptr mem)
19016	// cond: (is8BitInt(t) && t.IsSigned())
19017	// result: (MOVBload ptr mem)
19018	for {
19019		t := v.Type
19020		ptr := v_0
19021		mem := v_1
19022		if !(is8BitInt(t) && t.IsSigned()) {
19023			break
19024		}
19025		v.reset(OpARM64MOVBload)
19026		v.AddArg2(ptr, mem)
19027		return true
19028	}
19029	// match: (Load <t> ptr mem)
19030	// cond: (is8BitInt(t) && !t.IsSigned())
19031	// result: (MOVBUload ptr mem)
19032	for {
19033		t := v.Type
19034		ptr := v_0
19035		mem := v_1
19036		if !(is8BitInt(t) && !t.IsSigned()) {
19037			break
19038		}
19039		v.reset(OpARM64MOVBUload)
19040		v.AddArg2(ptr, mem)
19041		return true
19042	}
19043	// match: (Load <t> ptr mem)
19044	// cond: (is16BitInt(t) && t.IsSigned())
19045	// result: (MOVHload ptr mem)
19046	for {
19047		t := v.Type
19048		ptr := v_0
19049		mem := v_1
19050		if !(is16BitInt(t) && t.IsSigned()) {
19051			break
19052		}
19053		v.reset(OpARM64MOVHload)
19054		v.AddArg2(ptr, mem)
19055		return true
19056	}
19057	// match: (Load <t> ptr mem)
19058	// cond: (is16BitInt(t) && !t.IsSigned())
19059	// result: (MOVHUload ptr mem)
19060	for {
19061		t := v.Type
19062		ptr := v_0
19063		mem := v_1
19064		if !(is16BitInt(t) && !t.IsSigned()) {
19065			break
19066		}
19067		v.reset(OpARM64MOVHUload)
19068		v.AddArg2(ptr, mem)
19069		return true
19070	}
19071	// match: (Load <t> ptr mem)
19072	// cond: (is32BitInt(t) && t.IsSigned())
19073	// result: (MOVWload ptr mem)
19074	for {
19075		t := v.Type
19076		ptr := v_0
19077		mem := v_1
19078		if !(is32BitInt(t) && t.IsSigned()) {
19079			break
19080		}
19081		v.reset(OpARM64MOVWload)
19082		v.AddArg2(ptr, mem)
19083		return true
19084	}
19085	// match: (Load <t> ptr mem)
19086	// cond: (is32BitInt(t) && !t.IsSigned())
19087	// result: (MOVWUload ptr mem)
19088	for {
19089		t := v.Type
19090		ptr := v_0
19091		mem := v_1
19092		if !(is32BitInt(t) && !t.IsSigned()) {
19093			break
19094		}
19095		v.reset(OpARM64MOVWUload)
19096		v.AddArg2(ptr, mem)
19097		return true
19098	}
19099	// match: (Load <t> ptr mem)
19100	// cond: (is64BitInt(t) || isPtr(t))
19101	// result: (MOVDload ptr mem)
19102	for {
19103		t := v.Type
19104		ptr := v_0
19105		mem := v_1
19106		if !(is64BitInt(t) || isPtr(t)) {
19107			break
19108		}
19109		v.reset(OpARM64MOVDload)
19110		v.AddArg2(ptr, mem)
19111		return true
19112	}
19113	// match: (Load <t> ptr mem)
19114	// cond: is32BitFloat(t)
19115	// result: (FMOVSload ptr mem)
19116	for {
19117		t := v.Type
19118		ptr := v_0
19119		mem := v_1
19120		if !(is32BitFloat(t)) {
19121			break
19122		}
19123		v.reset(OpARM64FMOVSload)
19124		v.AddArg2(ptr, mem)
19125		return true
19126	}
19127	// match: (Load <t> ptr mem)
19128	// cond: is64BitFloat(t)
19129	// result: (FMOVDload ptr mem)
19130	for {
19131		t := v.Type
19132		ptr := v_0
19133		mem := v_1
19134		if !(is64BitFloat(t)) {
19135			break
19136		}
19137		v.reset(OpARM64FMOVDload)
19138		v.AddArg2(ptr, mem)
19139		return true
19140	}
19141	return false
19142}
19143func rewriteValueARM64_OpLocalAddr(v *Value) bool {
19144	v_1 := v.Args[1]
19145	v_0 := v.Args[0]
19146	b := v.Block
19147	typ := &b.Func.Config.Types
19148	// match: (LocalAddr <t> {sym} base mem)
19149	// cond: t.Elem().HasPointers()
19150	// result: (MOVDaddr {sym} (SPanchored base mem))
19151	for {
19152		t := v.Type
19153		sym := auxToSym(v.Aux)
19154		base := v_0
19155		mem := v_1
19156		if !(t.Elem().HasPointers()) {
19157			break
19158		}
19159		v.reset(OpARM64MOVDaddr)
19160		v.Aux = symToAux(sym)
19161		v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
19162		v0.AddArg2(base, mem)
19163		v.AddArg(v0)
19164		return true
19165	}
19166	// match: (LocalAddr <t> {sym} base _)
19167	// cond: !t.Elem().HasPointers()
19168	// result: (MOVDaddr {sym} base)
19169	for {
19170		t := v.Type
19171		sym := auxToSym(v.Aux)
19172		base := v_0
19173		if !(!t.Elem().HasPointers()) {
19174			break
19175		}
19176		v.reset(OpARM64MOVDaddr)
19177		v.Aux = symToAux(sym)
19178		v.AddArg(base)
19179		return true
19180	}
19181	return false
19182}
19183func rewriteValueARM64_OpLsh16x16(v *Value) bool {
19184	v_1 := v.Args[1]
19185	v_0 := v.Args[0]
19186	b := v.Block
19187	typ := &b.Func.Config.Types
19188	// match: (Lsh16x16 <t> x y)
19189	// cond: shiftIsBounded(v)
19190	// result: (SLL <t> x y)
19191	for {
19192		t := v.Type
19193		x := v_0
19194		y := v_1
19195		if !(shiftIsBounded(v)) {
19196			break
19197		}
19198		v.reset(OpARM64SLL)
19199		v.Type = t
19200		v.AddArg2(x, y)
19201		return true
19202	}
19203	// match: (Lsh16x16 <t> x y)
19204	// cond: !shiftIsBounded(v)
19205	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
19206	for {
19207		t := v.Type
19208		x := v_0
19209		y := v_1
19210		if !(!shiftIsBounded(v)) {
19211			break
19212		}
19213		v.reset(OpARM64CSEL)
19214		v.AuxInt = opToAuxInt(OpARM64LessThanU)
19215		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
19216		v0.AddArg2(x, y)
19217		v1 := b.NewValue0(v.Pos, OpConst64, t)
19218		v1.AuxInt = int64ToAuxInt(0)
19219		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
19220		v2.AuxInt = int64ToAuxInt(64)
19221		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
19222		v3.AddArg(y)
19223		v2.AddArg(v3)
19224		v.AddArg3(v0, v1, v2)
19225		return true
19226	}
19227	return false
19228}
19229func rewriteValueARM64_OpLsh16x32(v *Value) bool {
19230	v_1 := v.Args[1]
19231	v_0 := v.Args[0]
19232	b := v.Block
19233	typ := &b.Func.Config.Types
19234	// match: (Lsh16x32 <t> x y)
19235	// cond: shiftIsBounded(v)
19236	// result: (SLL <t> x y)
19237	for {
19238		t := v.Type
19239		x := v_0
19240		y := v_1
19241		if !(shiftIsBounded(v)) {
19242			break
19243		}
19244		v.reset(OpARM64SLL)
19245		v.Type = t
19246		v.AddArg2(x, y)
19247		return true
19248	}
19249	// match: (Lsh16x32 <t> x y)
19250	// cond: !shiftIsBounded(v)
19251	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
19252	for {
19253		t := v.Type
19254		x := v_0
19255		y := v_1
19256		if !(!shiftIsBounded(v)) {
19257			break
19258		}
19259		v.reset(OpARM64CSEL)
19260		v.AuxInt = opToAuxInt(OpARM64LessThanU)
19261		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
19262		v0.AddArg2(x, y)
19263		v1 := b.NewValue0(v.Pos, OpConst64, t)
19264		v1.AuxInt = int64ToAuxInt(0)
19265		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
19266		v2.AuxInt = int64ToAuxInt(64)
19267		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
19268		v3.AddArg(y)
19269		v2.AddArg(v3)
19270		v.AddArg3(v0, v1, v2)
19271		return true
19272	}
19273	return false
19274}
19275func rewriteValueARM64_OpLsh16x64(v *Value) bool {
19276	v_1 := v.Args[1]
19277	v_0 := v.Args[0]
19278	b := v.Block
19279	// match: (Lsh16x64 <t> x y)
19280	// cond: shiftIsBounded(v)
19281	// result: (SLL <t> x y)
19282	for {
19283		t := v.Type
19284		x := v_0
19285		y := v_1
19286		if !(shiftIsBounded(v)) {
19287			break
19288		}
19289		v.reset(OpARM64SLL)
19290		v.Type = t
19291		v.AddArg2(x, y)
19292		return true
19293	}
19294	// match: (Lsh16x64 <t> x y)
19295	// cond: !shiftIsBounded(v)
19296	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
19297	for {
19298		t := v.Type
19299		x := v_0
19300		y := v_1
19301		if !(!shiftIsBounded(v)) {
19302			break
19303		}
19304		v.reset(OpARM64CSEL)
19305		v.AuxInt = opToAuxInt(OpARM64LessThanU)
19306		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
19307		v0.AddArg2(x, y)
19308		v1 := b.NewValue0(v.Pos, OpConst64, t)
19309		v1.AuxInt = int64ToAuxInt(0)
19310		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
19311		v2.AuxInt = int64ToAuxInt(64)
19312		v2.AddArg(y)
19313		v.AddArg3(v0, v1, v2)
19314		return true
19315	}
19316	return false
19317}
19318func rewriteValueARM64_OpLsh16x8(v *Value) bool {
19319	v_1 := v.Args[1]
19320	v_0 := v.Args[0]
19321	b := v.Block
19322	typ := &b.Func.Config.Types
19323	// match: (Lsh16x8 <t> x y)
19324	// cond: shiftIsBounded(v)
19325	// result: (SLL <t> x y)
19326	for {
19327		t := v.Type
19328		x := v_0
19329		y := v_1
19330		if !(shiftIsBounded(v)) {
19331			break
19332		}
19333		v.reset(OpARM64SLL)
19334		v.Type = t
19335		v.AddArg2(x, y)
19336		return true
19337	}
19338	// match: (Lsh16x8 <t> x y)
19339	// cond: !shiftIsBounded(v)
19340	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
19341	for {
19342		t := v.Type
19343		x := v_0
19344		y := v_1
19345		if !(!shiftIsBounded(v)) {
19346			break
19347		}
19348		v.reset(OpARM64CSEL)
19349		v.AuxInt = opToAuxInt(OpARM64LessThanU)
19350		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
19351		v0.AddArg2(x, y)
19352		v1 := b.NewValue0(v.Pos, OpConst64, t)
19353		v1.AuxInt = int64ToAuxInt(0)
19354		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
19355		v2.AuxInt = int64ToAuxInt(64)
19356		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
19357		v3.AddArg(y)
19358		v2.AddArg(v3)
19359		v.AddArg3(v0, v1, v2)
19360		return true
19361	}
19362	return false
19363}
19364func rewriteValueARM64_OpLsh32x16(v *Value) bool {
19365	v_1 := v.Args[1]
19366	v_0 := v.Args[0]
19367	b := v.Block
19368	typ := &b.Func.Config.Types
19369	// match: (Lsh32x16 <t> x y)
19370	// cond: shiftIsBounded(v)
19371	// result: (SLL <t> x y)
19372	for {
19373		t := v.Type
19374		x := v_0
19375		y := v_1
19376		if !(shiftIsBounded(v)) {
19377			break
19378		}
19379		v.reset(OpARM64SLL)
19380		v.Type = t
19381		v.AddArg2(x, y)
19382		return true
19383	}
19384	// match: (Lsh32x16 <t> x y)
19385	// cond: !shiftIsBounded(v)
19386	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
19387	for {
19388		t := v.Type
19389		x := v_0
19390		y := v_1
19391		if !(!shiftIsBounded(v)) {
19392			break
19393		}
19394		v.reset(OpARM64CSEL)
19395		v.AuxInt = opToAuxInt(OpARM64LessThanU)
19396		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
19397		v0.AddArg2(x, y)
19398		v1 := b.NewValue0(v.Pos, OpConst64, t)
19399		v1.AuxInt = int64ToAuxInt(0)
19400		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
19401		v2.AuxInt = int64ToAuxInt(64)
19402		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
19403		v3.AddArg(y)
19404		v2.AddArg(v3)
19405		v.AddArg3(v0, v1, v2)
19406		return true
19407	}
19408	return false
19409}
19410func rewriteValueARM64_OpLsh32x32(v *Value) bool {
19411	v_1 := v.Args[1]
19412	v_0 := v.Args[0]
19413	b := v.Block
19414	typ := &b.Func.Config.Types
19415	// match: (Lsh32x32 <t> x y)
19416	// cond: shiftIsBounded(v)
19417	// result: (SLL <t> x y)
19418	for {
19419		t := v.Type
19420		x := v_0
19421		y := v_1
19422		if !(shiftIsBounded(v)) {
19423			break
19424		}
19425		v.reset(OpARM64SLL)
19426		v.Type = t
19427		v.AddArg2(x, y)
19428		return true
19429	}
19430	// match: (Lsh32x32 <t> x y)
19431	// cond: !shiftIsBounded(v)
19432	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
19433	for {
19434		t := v.Type
19435		x := v_0
19436		y := v_1
19437		if !(!shiftIsBounded(v)) {
19438			break
19439		}
19440		v.reset(OpARM64CSEL)
19441		v.AuxInt = opToAuxInt(OpARM64LessThanU)
19442		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
19443		v0.AddArg2(x, y)
19444		v1 := b.NewValue0(v.Pos, OpConst64, t)
19445		v1.AuxInt = int64ToAuxInt(0)
19446		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
19447		v2.AuxInt = int64ToAuxInt(64)
19448		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
19449		v3.AddArg(y)
19450		v2.AddArg(v3)
19451		v.AddArg3(v0, v1, v2)
19452		return true
19453	}
19454	return false
19455}
19456func rewriteValueARM64_OpLsh32x64(v *Value) bool {
19457	v_1 := v.Args[1]
19458	v_0 := v.Args[0]
19459	b := v.Block
19460	// match: (Lsh32x64 <t> x y)
19461	// cond: shiftIsBounded(v)
19462	// result: (SLL <t> x y)
19463	for {
19464		t := v.Type
19465		x := v_0
19466		y := v_1
19467		if !(shiftIsBounded(v)) {
19468			break
19469		}
19470		v.reset(OpARM64SLL)
19471		v.Type = t
19472		v.AddArg2(x, y)
19473		return true
19474	}
19475	// match: (Lsh32x64 <t> x y)
19476	// cond: !shiftIsBounded(v)
19477	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
19478	for {
19479		t := v.Type
19480		x := v_0
19481		y := v_1
19482		if !(!shiftIsBounded(v)) {
19483			break
19484		}
19485		v.reset(OpARM64CSEL)
19486		v.AuxInt = opToAuxInt(OpARM64LessThanU)
19487		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
19488		v0.AddArg2(x, y)
19489		v1 := b.NewValue0(v.Pos, OpConst64, t)
19490		v1.AuxInt = int64ToAuxInt(0)
19491		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
19492		v2.AuxInt = int64ToAuxInt(64)
19493		v2.AddArg(y)
19494		v.AddArg3(v0, v1, v2)
19495		return true
19496	}
19497	return false
19498}
19499func rewriteValueARM64_OpLsh32x8(v *Value) bool {
19500	v_1 := v.Args[1]
19501	v_0 := v.Args[0]
19502	b := v.Block
19503	typ := &b.Func.Config.Types
19504	// match: (Lsh32x8 <t> x y)
19505	// cond: shiftIsBounded(v)
19506	// result: (SLL <t> x y)
19507	for {
19508		t := v.Type
19509		x := v_0
19510		y := v_1
19511		if !(shiftIsBounded(v)) {
19512			break
19513		}
19514		v.reset(OpARM64SLL)
19515		v.Type = t
19516		v.AddArg2(x, y)
19517		return true
19518	}
19519	// match: (Lsh32x8 <t> x y)
19520	// cond: !shiftIsBounded(v)
19521	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
19522	for {
19523		t := v.Type
19524		x := v_0
19525		y := v_1
19526		if !(!shiftIsBounded(v)) {
19527			break
19528		}
19529		v.reset(OpARM64CSEL)
19530		v.AuxInt = opToAuxInt(OpARM64LessThanU)
19531		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
19532		v0.AddArg2(x, y)
19533		v1 := b.NewValue0(v.Pos, OpConst64, t)
19534		v1.AuxInt = int64ToAuxInt(0)
19535		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
19536		v2.AuxInt = int64ToAuxInt(64)
19537		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
19538		v3.AddArg(y)
19539		v2.AddArg(v3)
19540		v.AddArg3(v0, v1, v2)
19541		return true
19542	}
19543	return false
19544}
19545func rewriteValueARM64_OpLsh64x16(v *Value) bool {
19546	v_1 := v.Args[1]
19547	v_0 := v.Args[0]
19548	b := v.Block
19549	typ := &b.Func.Config.Types
19550	// match: (Lsh64x16 <t> x y)
19551	// cond: shiftIsBounded(v)
19552	// result: (SLL <t> x y)
19553	for {
19554		t := v.Type
19555		x := v_0
19556		y := v_1
19557		if !(shiftIsBounded(v)) {
19558			break
19559		}
19560		v.reset(OpARM64SLL)
19561		v.Type = t
19562		v.AddArg2(x, y)
19563		return true
19564	}
19565	// match: (Lsh64x16 <t> x y)
19566	// cond: !shiftIsBounded(v)
19567	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
19568	for {
19569		t := v.Type
19570		x := v_0
19571		y := v_1
19572		if !(!shiftIsBounded(v)) {
19573			break
19574		}
19575		v.reset(OpARM64CSEL)
19576		v.AuxInt = opToAuxInt(OpARM64LessThanU)
19577		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
19578		v0.AddArg2(x, y)
19579		v1 := b.NewValue0(v.Pos, OpConst64, t)
19580		v1.AuxInt = int64ToAuxInt(0)
19581		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
19582		v2.AuxInt = int64ToAuxInt(64)
19583		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
19584		v3.AddArg(y)
19585		v2.AddArg(v3)
19586		v.AddArg3(v0, v1, v2)
19587		return true
19588	}
19589	return false
19590}
19591func rewriteValueARM64_OpLsh64x32(v *Value) bool {
19592	v_1 := v.Args[1]
19593	v_0 := v.Args[0]
19594	b := v.Block
19595	typ := &b.Func.Config.Types
19596	// match: (Lsh64x32 <t> x y)
19597	// cond: shiftIsBounded(v)
19598	// result: (SLL <t> x y)
19599	for {
19600		t := v.Type
19601		x := v_0
19602		y := v_1
19603		if !(shiftIsBounded(v)) {
19604			break
19605		}
19606		v.reset(OpARM64SLL)
19607		v.Type = t
19608		v.AddArg2(x, y)
19609		return true
19610	}
19611	// match: (Lsh64x32 <t> x y)
19612	// cond: !shiftIsBounded(v)
19613	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
19614	for {
19615		t := v.Type
19616		x := v_0
19617		y := v_1
19618		if !(!shiftIsBounded(v)) {
19619			break
19620		}
19621		v.reset(OpARM64CSEL)
19622		v.AuxInt = opToAuxInt(OpARM64LessThanU)
19623		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
19624		v0.AddArg2(x, y)
19625		v1 := b.NewValue0(v.Pos, OpConst64, t)
19626		v1.AuxInt = int64ToAuxInt(0)
19627		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
19628		v2.AuxInt = int64ToAuxInt(64)
19629		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
19630		v3.AddArg(y)
19631		v2.AddArg(v3)
19632		v.AddArg3(v0, v1, v2)
19633		return true
19634	}
19635	return false
19636}
19637func rewriteValueARM64_OpLsh64x64(v *Value) bool {
19638	v_1 := v.Args[1]
19639	v_0 := v.Args[0]
19640	b := v.Block
19641	// match: (Lsh64x64 <t> x y)
19642	// cond: shiftIsBounded(v)
19643	// result: (SLL <t> x y)
19644	for {
19645		t := v.Type
19646		x := v_0
19647		y := v_1
19648		if !(shiftIsBounded(v)) {
19649			break
19650		}
19651		v.reset(OpARM64SLL)
19652		v.Type = t
19653		v.AddArg2(x, y)
19654		return true
19655	}
19656	// match: (Lsh64x64 <t> x y)
19657	// cond: !shiftIsBounded(v)
19658	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
19659	for {
19660		t := v.Type
19661		x := v_0
19662		y := v_1
19663		if !(!shiftIsBounded(v)) {
19664			break
19665		}
19666		v.reset(OpARM64CSEL)
19667		v.AuxInt = opToAuxInt(OpARM64LessThanU)
19668		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
19669		v0.AddArg2(x, y)
19670		v1 := b.NewValue0(v.Pos, OpConst64, t)
19671		v1.AuxInt = int64ToAuxInt(0)
19672		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
19673		v2.AuxInt = int64ToAuxInt(64)
19674		v2.AddArg(y)
19675		v.AddArg3(v0, v1, v2)
19676		return true
19677	}
19678	return false
19679}
19680func rewriteValueARM64_OpLsh64x8(v *Value) bool {
19681	v_1 := v.Args[1]
19682	v_0 := v.Args[0]
19683	b := v.Block
19684	typ := &b.Func.Config.Types
19685	// match: (Lsh64x8 <t> x y)
19686	// cond: shiftIsBounded(v)
19687	// result: (SLL <t> x y)
19688	for {
19689		t := v.Type
19690		x := v_0
19691		y := v_1
19692		if !(shiftIsBounded(v)) {
19693			break
19694		}
19695		v.reset(OpARM64SLL)
19696		v.Type = t
19697		v.AddArg2(x, y)
19698		return true
19699	}
19700	// match: (Lsh64x8 <t> x y)
19701	// cond: !shiftIsBounded(v)
19702	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
19703	for {
19704		t := v.Type
19705		x := v_0
19706		y := v_1
19707		if !(!shiftIsBounded(v)) {
19708			break
19709		}
19710		v.reset(OpARM64CSEL)
19711		v.AuxInt = opToAuxInt(OpARM64LessThanU)
19712		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
19713		v0.AddArg2(x, y)
19714		v1 := b.NewValue0(v.Pos, OpConst64, t)
19715		v1.AuxInt = int64ToAuxInt(0)
19716		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
19717		v2.AuxInt = int64ToAuxInt(64)
19718		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
19719		v3.AddArg(y)
19720		v2.AddArg(v3)
19721		v.AddArg3(v0, v1, v2)
19722		return true
19723	}
19724	return false
19725}
19726func rewriteValueARM64_OpLsh8x16(v *Value) bool {
19727	v_1 := v.Args[1]
19728	v_0 := v.Args[0]
19729	b := v.Block
19730	typ := &b.Func.Config.Types
19731	// match: (Lsh8x16 <t> x y)
19732	// cond: shiftIsBounded(v)
19733	// result: (SLL <t> x y)
19734	for {
19735		t := v.Type
19736		x := v_0
19737		y := v_1
19738		if !(shiftIsBounded(v)) {
19739			break
19740		}
19741		v.reset(OpARM64SLL)
19742		v.Type = t
19743		v.AddArg2(x, y)
19744		return true
19745	}
19746	// match: (Lsh8x16 <t> x y)
19747	// cond: !shiftIsBounded(v)
19748	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
19749	for {
19750		t := v.Type
19751		x := v_0
19752		y := v_1
19753		if !(!shiftIsBounded(v)) {
19754			break
19755		}
19756		v.reset(OpARM64CSEL)
19757		v.AuxInt = opToAuxInt(OpARM64LessThanU)
19758		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
19759		v0.AddArg2(x, y)
19760		v1 := b.NewValue0(v.Pos, OpConst64, t)
19761		v1.AuxInt = int64ToAuxInt(0)
19762		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
19763		v2.AuxInt = int64ToAuxInt(64)
19764		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
19765		v3.AddArg(y)
19766		v2.AddArg(v3)
19767		v.AddArg3(v0, v1, v2)
19768		return true
19769	}
19770	return false
19771}
19772func rewriteValueARM64_OpLsh8x32(v *Value) bool {
19773	v_1 := v.Args[1]
19774	v_0 := v.Args[0]
19775	b := v.Block
19776	typ := &b.Func.Config.Types
19777	// match: (Lsh8x32 <t> x y)
19778	// cond: shiftIsBounded(v)
19779	// result: (SLL <t> x y)
19780	for {
19781		t := v.Type
19782		x := v_0
19783		y := v_1
19784		if !(shiftIsBounded(v)) {
19785			break
19786		}
19787		v.reset(OpARM64SLL)
19788		v.Type = t
19789		v.AddArg2(x, y)
19790		return true
19791	}
19792	// match: (Lsh8x32 <t> x y)
19793	// cond: !shiftIsBounded(v)
19794	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
19795	for {
19796		t := v.Type
19797		x := v_0
19798		y := v_1
19799		if !(!shiftIsBounded(v)) {
19800			break
19801		}
19802		v.reset(OpARM64CSEL)
19803		v.AuxInt = opToAuxInt(OpARM64LessThanU)
19804		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
19805		v0.AddArg2(x, y)
19806		v1 := b.NewValue0(v.Pos, OpConst64, t)
19807		v1.AuxInt = int64ToAuxInt(0)
19808		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
19809		v2.AuxInt = int64ToAuxInt(64)
19810		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
19811		v3.AddArg(y)
19812		v2.AddArg(v3)
19813		v.AddArg3(v0, v1, v2)
19814		return true
19815	}
19816	return false
19817}
19818func rewriteValueARM64_OpLsh8x64(v *Value) bool {
19819	v_1 := v.Args[1]
19820	v_0 := v.Args[0]
19821	b := v.Block
19822	// match: (Lsh8x64 <t> x y)
19823	// cond: shiftIsBounded(v)
19824	// result: (SLL <t> x y)
19825	for {
19826		t := v.Type
19827		x := v_0
19828		y := v_1
19829		if !(shiftIsBounded(v)) {
19830			break
19831		}
19832		v.reset(OpARM64SLL)
19833		v.Type = t
19834		v.AddArg2(x, y)
19835		return true
19836	}
19837	// match: (Lsh8x64 <t> x y)
19838	// cond: !shiftIsBounded(v)
19839	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
19840	for {
19841		t := v.Type
19842		x := v_0
19843		y := v_1
19844		if !(!shiftIsBounded(v)) {
19845			break
19846		}
19847		v.reset(OpARM64CSEL)
19848		v.AuxInt = opToAuxInt(OpARM64LessThanU)
19849		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
19850		v0.AddArg2(x, y)
19851		v1 := b.NewValue0(v.Pos, OpConst64, t)
19852		v1.AuxInt = int64ToAuxInt(0)
19853		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
19854		v2.AuxInt = int64ToAuxInt(64)
19855		v2.AddArg(y)
19856		v.AddArg3(v0, v1, v2)
19857		return true
19858	}
19859	return false
19860}
19861func rewriteValueARM64_OpLsh8x8(v *Value) bool {
19862	v_1 := v.Args[1]
19863	v_0 := v.Args[0]
19864	b := v.Block
19865	typ := &b.Func.Config.Types
19866	// match: (Lsh8x8 <t> x y)
19867	// cond: shiftIsBounded(v)
19868	// result: (SLL <t> x y)
19869	for {
19870		t := v.Type
19871		x := v_0
19872		y := v_1
19873		if !(shiftIsBounded(v)) {
19874			break
19875		}
19876		v.reset(OpARM64SLL)
19877		v.Type = t
19878		v.AddArg2(x, y)
19879		return true
19880	}
19881	// match: (Lsh8x8 <t> x y)
19882	// cond: !shiftIsBounded(v)
19883	// result: (CSEL [OpARM64LessThanU] (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
19884	for {
19885		t := v.Type
19886		x := v_0
19887		y := v_1
19888		if !(!shiftIsBounded(v)) {
19889			break
19890		}
19891		v.reset(OpARM64CSEL)
19892		v.AuxInt = opToAuxInt(OpARM64LessThanU)
19893		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
19894		v0.AddArg2(x, y)
19895		v1 := b.NewValue0(v.Pos, OpConst64, t)
19896		v1.AuxInt = int64ToAuxInt(0)
19897		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
19898		v2.AuxInt = int64ToAuxInt(64)
19899		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
19900		v3.AddArg(y)
19901		v2.AddArg(v3)
19902		v.AddArg3(v0, v1, v2)
19903		return true
19904	}
19905	return false
19906}
19907func rewriteValueARM64_OpMod16(v *Value) bool {
19908	v_1 := v.Args[1]
19909	v_0 := v.Args[0]
19910	b := v.Block
19911	typ := &b.Func.Config.Types
19912	// match: (Mod16 x y)
19913	// result: (MODW (SignExt16to32 x) (SignExt16to32 y))
19914	for {
19915		x := v_0
19916		y := v_1
19917		v.reset(OpARM64MODW)
19918		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
19919		v0.AddArg(x)
19920		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
19921		v1.AddArg(y)
19922		v.AddArg2(v0, v1)
19923		return true
19924	}
19925}
19926func rewriteValueARM64_OpMod16u(v *Value) bool {
19927	v_1 := v.Args[1]
19928	v_0 := v.Args[0]
19929	b := v.Block
19930	typ := &b.Func.Config.Types
19931	// match: (Mod16u x y)
19932	// result: (UMODW (ZeroExt16to32 x) (ZeroExt16to32 y))
19933	for {
19934		x := v_0
19935		y := v_1
19936		v.reset(OpARM64UMODW)
19937		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
19938		v0.AddArg(x)
19939		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
19940		v1.AddArg(y)
19941		v.AddArg2(v0, v1)
19942		return true
19943	}
19944}
19945func rewriteValueARM64_OpMod32(v *Value) bool {
19946	v_1 := v.Args[1]
19947	v_0 := v.Args[0]
19948	// match: (Mod32 x y)
19949	// result: (MODW x y)
19950	for {
19951		x := v_0
19952		y := v_1
19953		v.reset(OpARM64MODW)
19954		v.AddArg2(x, y)
19955		return true
19956	}
19957}
19958func rewriteValueARM64_OpMod64(v *Value) bool {
19959	v_1 := v.Args[1]
19960	v_0 := v.Args[0]
19961	// match: (Mod64 x y)
19962	// result: (MOD x y)
19963	for {
19964		x := v_0
19965		y := v_1
19966		v.reset(OpARM64MOD)
19967		v.AddArg2(x, y)
19968		return true
19969	}
19970}
19971func rewriteValueARM64_OpMod8(v *Value) bool {
19972	v_1 := v.Args[1]
19973	v_0 := v.Args[0]
19974	b := v.Block
19975	typ := &b.Func.Config.Types
19976	// match: (Mod8 x y)
19977	// result: (MODW (SignExt8to32 x) (SignExt8to32 y))
19978	for {
19979		x := v_0
19980		y := v_1
19981		v.reset(OpARM64MODW)
19982		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
19983		v0.AddArg(x)
19984		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
19985		v1.AddArg(y)
19986		v.AddArg2(v0, v1)
19987		return true
19988	}
19989}
19990func rewriteValueARM64_OpMod8u(v *Value) bool {
19991	v_1 := v.Args[1]
19992	v_0 := v.Args[0]
19993	b := v.Block
19994	typ := &b.Func.Config.Types
19995	// match: (Mod8u x y)
19996	// result: (UMODW (ZeroExt8to32 x) (ZeroExt8to32 y))
19997	for {
19998		x := v_0
19999		y := v_1
20000		v.reset(OpARM64UMODW)
20001		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
20002		v0.AddArg(x)
20003		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
20004		v1.AddArg(y)
20005		v.AddArg2(v0, v1)
20006		return true
20007	}
20008}
20009func rewriteValueARM64_OpMove(v *Value) bool {
20010	v_2 := v.Args[2]
20011	v_1 := v.Args[1]
20012	v_0 := v.Args[0]
20013	b := v.Block
20014	config := b.Func.Config
20015	typ := &b.Func.Config.Types
20016	// match: (Move [0] _ _ mem)
20017	// result: mem
20018	for {
20019		if auxIntToInt64(v.AuxInt) != 0 {
20020			break
20021		}
20022		mem := v_2
20023		v.copyOf(mem)
20024		return true
20025	}
20026	// match: (Move [1] dst src mem)
20027	// result: (MOVBstore dst (MOVBUload src mem) mem)
20028	for {
20029		if auxIntToInt64(v.AuxInt) != 1 {
20030			break
20031		}
20032		dst := v_0
20033		src := v_1
20034		mem := v_2
20035		v.reset(OpARM64MOVBstore)
20036		v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
20037		v0.AddArg2(src, mem)
20038		v.AddArg3(dst, v0, mem)
20039		return true
20040	}
20041	// match: (Move [2] dst src mem)
20042	// result: (MOVHstore dst (MOVHUload src mem) mem)
20043	for {
20044		if auxIntToInt64(v.AuxInt) != 2 {
20045			break
20046		}
20047		dst := v_0
20048		src := v_1
20049		mem := v_2
20050		v.reset(OpARM64MOVHstore)
20051		v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
20052		v0.AddArg2(src, mem)
20053		v.AddArg3(dst, v0, mem)
20054		return true
20055	}
20056	// match: (Move [3] dst src mem)
20057	// result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem))
20058	for {
20059		if auxIntToInt64(v.AuxInt) != 3 {
20060			break
20061		}
20062		dst := v_0
20063		src := v_1
20064		mem := v_2
20065		v.reset(OpARM64MOVBstore)
20066		v.AuxInt = int32ToAuxInt(2)
20067		v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
20068		v0.AuxInt = int32ToAuxInt(2)
20069		v0.AddArg2(src, mem)
20070		v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
20071		v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
20072		v2.AddArg2(src, mem)
20073		v1.AddArg3(dst, v2, mem)
20074		v.AddArg3(dst, v0, v1)
20075		return true
20076	}
20077	// match: (Move [4] dst src mem)
20078	// result: (MOVWstore dst (MOVWUload src mem) mem)
20079	for {
20080		if auxIntToInt64(v.AuxInt) != 4 {
20081			break
20082		}
20083		dst := v_0
20084		src := v_1
20085		mem := v_2
20086		v.reset(OpARM64MOVWstore)
20087		v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
20088		v0.AddArg2(src, mem)
20089		v.AddArg3(dst, v0, mem)
20090		return true
20091	}
20092	// match: (Move [5] dst src mem)
20093	// result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))
20094	for {
20095		if auxIntToInt64(v.AuxInt) != 5 {
20096			break
20097		}
20098		dst := v_0
20099		src := v_1
20100		mem := v_2
20101		v.reset(OpARM64MOVBstore)
20102		v.AuxInt = int32ToAuxInt(4)
20103		v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
20104		v0.AuxInt = int32ToAuxInt(4)
20105		v0.AddArg2(src, mem)
20106		v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
20107		v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
20108		v2.AddArg2(src, mem)
20109		v1.AddArg3(dst, v2, mem)
20110		v.AddArg3(dst, v0, v1)
20111		return true
20112	}
20113	// match: (Move [6] dst src mem)
20114	// result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))
20115	for {
20116		if auxIntToInt64(v.AuxInt) != 6 {
20117			break
20118		}
20119		dst := v_0
20120		src := v_1
20121		mem := v_2
20122		v.reset(OpARM64MOVHstore)
20123		v.AuxInt = int32ToAuxInt(4)
20124		v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
20125		v0.AuxInt = int32ToAuxInt(4)
20126		v0.AddArg2(src, mem)
20127		v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
20128		v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
20129		v2.AddArg2(src, mem)
20130		v1.AddArg3(dst, v2, mem)
20131		v.AddArg3(dst, v0, v1)
20132		return true
20133	}
20134	// match: (Move [7] dst src mem)
20135	// result: (MOVWstore [3] dst (MOVWUload [3] src mem) (MOVWstore dst (MOVWUload src mem) mem))
20136	for {
20137		if auxIntToInt64(v.AuxInt) != 7 {
20138			break
20139		}
20140		dst := v_0
20141		src := v_1
20142		mem := v_2
20143		v.reset(OpARM64MOVWstore)
20144		v.AuxInt = int32ToAuxInt(3)
20145		v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
20146		v0.AuxInt = int32ToAuxInt(3)
20147		v0.AddArg2(src, mem)
20148		v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
20149		v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
20150		v2.AddArg2(src, mem)
20151		v1.AddArg3(dst, v2, mem)
20152		v.AddArg3(dst, v0, v1)
20153		return true
20154	}
20155	// match: (Move [8] dst src mem)
20156	// result: (MOVDstore dst (MOVDload src mem) mem)
20157	for {
20158		if auxIntToInt64(v.AuxInt) != 8 {
20159			break
20160		}
20161		dst := v_0
20162		src := v_1
20163		mem := v_2
20164		v.reset(OpARM64MOVDstore)
20165		v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
20166		v0.AddArg2(src, mem)
20167		v.AddArg3(dst, v0, mem)
20168		return true
20169	}
20170	// match: (Move [9] dst src mem)
20171	// result: (MOVBstore [8] dst (MOVBUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
20172	for {
20173		if auxIntToInt64(v.AuxInt) != 9 {
20174			break
20175		}
20176		dst := v_0
20177		src := v_1
20178		mem := v_2
20179		v.reset(OpARM64MOVBstore)
20180		v.AuxInt = int32ToAuxInt(8)
20181		v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
20182		v0.AuxInt = int32ToAuxInt(8)
20183		v0.AddArg2(src, mem)
20184		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
20185		v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
20186		v2.AddArg2(src, mem)
20187		v1.AddArg3(dst, v2, mem)
20188		v.AddArg3(dst, v0, v1)
20189		return true
20190	}
20191	// match: (Move [10] dst src mem)
20192	// result: (MOVHstore [8] dst (MOVHUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
20193	for {
20194		if auxIntToInt64(v.AuxInt) != 10 {
20195			break
20196		}
20197		dst := v_0
20198		src := v_1
20199		mem := v_2
20200		v.reset(OpARM64MOVHstore)
20201		v.AuxInt = int32ToAuxInt(8)
20202		v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
20203		v0.AuxInt = int32ToAuxInt(8)
20204		v0.AddArg2(src, mem)
20205		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
20206		v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
20207		v2.AddArg2(src, mem)
20208		v1.AddArg3(dst, v2, mem)
20209		v.AddArg3(dst, v0, v1)
20210		return true
20211	}
20212	// match: (Move [11] dst src mem)
20213	// result: (MOVDstore [3] dst (MOVDload [3] src mem) (MOVDstore dst (MOVDload src mem) mem))
20214	for {
20215		if auxIntToInt64(v.AuxInt) != 11 {
20216			break
20217		}
20218		dst := v_0
20219		src := v_1
20220		mem := v_2
20221		v.reset(OpARM64MOVDstore)
20222		v.AuxInt = int32ToAuxInt(3)
20223		v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
20224		v0.AuxInt = int32ToAuxInt(3)
20225		v0.AddArg2(src, mem)
20226		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
20227		v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
20228		v2.AddArg2(src, mem)
20229		v1.AddArg3(dst, v2, mem)
20230		v.AddArg3(dst, v0, v1)
20231		return true
20232	}
20233	// match: (Move [12] dst src mem)
20234	// result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
20235	for {
20236		if auxIntToInt64(v.AuxInt) != 12 {
20237			break
20238		}
20239		dst := v_0
20240		src := v_1
20241		mem := v_2
20242		v.reset(OpARM64MOVWstore)
20243		v.AuxInt = int32ToAuxInt(8)
20244		v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
20245		v0.AuxInt = int32ToAuxInt(8)
20246		v0.AddArg2(src, mem)
20247		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
20248		v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
20249		v2.AddArg2(src, mem)
20250		v1.AddArg3(dst, v2, mem)
20251		v.AddArg3(dst, v0, v1)
20252		return true
20253	}
20254	// match: (Move [13] dst src mem)
20255	// result: (MOVDstore [5] dst (MOVDload [5] src mem) (MOVDstore dst (MOVDload src mem) mem))
20256	for {
20257		if auxIntToInt64(v.AuxInt) != 13 {
20258			break
20259		}
20260		dst := v_0
20261		src := v_1
20262		mem := v_2
20263		v.reset(OpARM64MOVDstore)
20264		v.AuxInt = int32ToAuxInt(5)
20265		v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
20266		v0.AuxInt = int32ToAuxInt(5)
20267		v0.AddArg2(src, mem)
20268		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
20269		v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
20270		v2.AddArg2(src, mem)
20271		v1.AddArg3(dst, v2, mem)
20272		v.AddArg3(dst, v0, v1)
20273		return true
20274	}
20275	// match: (Move [14] dst src mem)
20276	// result: (MOVDstore [6] dst (MOVDload [6] src mem) (MOVDstore dst (MOVDload src mem) mem))
20277	for {
20278		if auxIntToInt64(v.AuxInt) != 14 {
20279			break
20280		}
20281		dst := v_0
20282		src := v_1
20283		mem := v_2
20284		v.reset(OpARM64MOVDstore)
20285		v.AuxInt = int32ToAuxInt(6)
20286		v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
20287		v0.AuxInt = int32ToAuxInt(6)
20288		v0.AddArg2(src, mem)
20289		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
20290		v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
20291		v2.AddArg2(src, mem)
20292		v1.AddArg3(dst, v2, mem)
20293		v.AddArg3(dst, v0, v1)
20294		return true
20295	}
20296	// match: (Move [15] dst src mem)
20297	// result: (MOVDstore [7] dst (MOVDload [7] src mem) (MOVDstore dst (MOVDload src mem) mem))
20298	for {
20299		if auxIntToInt64(v.AuxInt) != 15 {
20300			break
20301		}
20302		dst := v_0
20303		src := v_1
20304		mem := v_2
20305		v.reset(OpARM64MOVDstore)
20306		v.AuxInt = int32ToAuxInt(7)
20307		v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
20308		v0.AuxInt = int32ToAuxInt(7)
20309		v0.AddArg2(src, mem)
20310		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
20311		v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
20312		v2.AddArg2(src, mem)
20313		v1.AddArg3(dst, v2, mem)
20314		v.AddArg3(dst, v0, v1)
20315		return true
20316	}
20317	// match: (Move [16] dst src mem)
20318	// result: (STP dst (Select0 <typ.UInt64> (LDP src mem)) (Select1 <typ.UInt64> (LDP src mem)) mem)
20319	for {
20320		if auxIntToInt64(v.AuxInt) != 16 {
20321			break
20322		}
20323		dst := v_0
20324		src := v_1
20325		mem := v_2
20326		v.reset(OpARM64STP)
20327		v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
20328		v1 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
20329		v1.AddArg2(src, mem)
20330		v0.AddArg(v1)
20331		v2 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
20332		v2.AddArg(v1)
20333		v.AddArg4(dst, v0, v2, mem)
20334		return true
20335	}
20336	// match: (Move [32] dst src mem)
20337	// result: (STP [16] dst (Select0 <typ.UInt64> (LDP [16] src mem)) (Select1 <typ.UInt64> (LDP [16] src mem)) (STP dst (Select0 <typ.UInt64> (LDP src mem)) (Select1 <typ.UInt64> (LDP src mem)) mem))
20338	for {
20339		if auxIntToInt64(v.AuxInt) != 32 {
20340			break
20341		}
20342		dst := v_0
20343		src := v_1
20344		mem := v_2
20345		v.reset(OpARM64STP)
20346		v.AuxInt = int32ToAuxInt(16)
20347		v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
20348		v1 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
20349		v1.AuxInt = int32ToAuxInt(16)
20350		v1.AddArg2(src, mem)
20351		v0.AddArg(v1)
20352		v2 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
20353		v2.AddArg(v1)
20354		v3 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
20355		v4 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
20356		v5 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
20357		v5.AddArg2(src, mem)
20358		v4.AddArg(v5)
20359		v6 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
20360		v6.AddArg(v5)
20361		v3.AddArg4(dst, v4, v6, mem)
20362		v.AddArg4(dst, v0, v2, v3)
20363		return true
20364	}
20365	// match: (Move [48] dst src mem)
20366	// result: (STP [32] dst (Select0 <typ.UInt64> (LDP [32] src mem)) (Select1 <typ.UInt64> (LDP [32] src mem)) (STP [16] dst (Select0 <typ.UInt64> (LDP [16] src mem)) (Select1 <typ.UInt64> (LDP [16] src mem)) (STP dst (Select0 <typ.UInt64> (LDP src mem)) (Select1 <typ.UInt64> (LDP src mem)) mem)))
20367	for {
20368		if auxIntToInt64(v.AuxInt) != 48 {
20369			break
20370		}
20371		dst := v_0
20372		src := v_1
20373		mem := v_2
20374		v.reset(OpARM64STP)
20375		v.AuxInt = int32ToAuxInt(32)
20376		v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
20377		v1 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
20378		v1.AuxInt = int32ToAuxInt(32)
20379		v1.AddArg2(src, mem)
20380		v0.AddArg(v1)
20381		v2 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
20382		v2.AddArg(v1)
20383		v3 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
20384		v3.AuxInt = int32ToAuxInt(16)
20385		v4 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
20386		v5 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
20387		v5.AuxInt = int32ToAuxInt(16)
20388		v5.AddArg2(src, mem)
20389		v4.AddArg(v5)
20390		v6 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
20391		v6.AddArg(v5)
20392		v7 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
20393		v8 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
20394		v9 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
20395		v9.AddArg2(src, mem)
20396		v8.AddArg(v9)
20397		v10 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
20398		v10.AddArg(v9)
20399		v7.AddArg4(dst, v8, v10, mem)
20400		v3.AddArg4(dst, v4, v6, v7)
20401		v.AddArg4(dst, v0, v2, v3)
20402		return true
20403	}
20404	// match: (Move [64] dst src mem)
20405	// result: (STP [48] dst (Select0 <typ.UInt64> (LDP [48] src mem)) (Select1 <typ.UInt64> (LDP [48] src mem)) (STP [32] dst (Select0 <typ.UInt64> (LDP [32] src mem)) (Select1 <typ.UInt64> (LDP [32] src mem)) (STP [16] dst (Select0 <typ.UInt64> (LDP [16] src mem)) (Select1 <typ.UInt64> (LDP [16] src mem)) (STP dst (Select0 <typ.UInt64> (LDP src mem)) (Select1 <typ.UInt64> (LDP src mem)) mem))))
20406	for {
20407		if auxIntToInt64(v.AuxInt) != 64 {
20408			break
20409		}
20410		dst := v_0
20411		src := v_1
20412		mem := v_2
20413		v.reset(OpARM64STP)
20414		v.AuxInt = int32ToAuxInt(48)
20415		v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
20416		v1 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
20417		v1.AuxInt = int32ToAuxInt(48)
20418		v1.AddArg2(src, mem)
20419		v0.AddArg(v1)
20420		v2 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
20421		v2.AddArg(v1)
20422		v3 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
20423		v3.AuxInt = int32ToAuxInt(32)
20424		v4 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
20425		v5 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
20426		v5.AuxInt = int32ToAuxInt(32)
20427		v5.AddArg2(src, mem)
20428		v4.AddArg(v5)
20429		v6 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
20430		v6.AddArg(v5)
20431		v7 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
20432		v7.AuxInt = int32ToAuxInt(16)
20433		v8 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
20434		v9 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
20435		v9.AuxInt = int32ToAuxInt(16)
20436		v9.AddArg2(src, mem)
20437		v8.AddArg(v9)
20438		v10 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
20439		v10.AddArg(v9)
20440		v11 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
20441		v12 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
20442		v13 := b.NewValue0(v.Pos, OpARM64LDP, types.NewTuple(typ.UInt64, typ.UInt64))
20443		v13.AddArg2(src, mem)
20444		v12.AddArg(v13)
20445		v14 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
20446		v14.AddArg(v13)
20447		v11.AddArg4(dst, v12, v14, mem)
20448		v7.AddArg4(dst, v8, v10, v11)
20449		v3.AddArg4(dst, v4, v6, v7)
20450		v.AddArg4(dst, v0, v2, v3)
20451		return true
20452	}
20453	// match: (Move [s] dst src mem)
20454	// cond: s%16 != 0 && s%16 <= 8 && s > 16
20455	// result: (Move [8] (OffPtr <dst.Type> dst [s-8]) (OffPtr <src.Type> src [s-8]) (Move [s-s%16] dst src mem))
20456	for {
20457		s := auxIntToInt64(v.AuxInt)
20458		dst := v_0
20459		src := v_1
20460		mem := v_2
20461		if !(s%16 != 0 && s%16 <= 8 && s > 16) {
20462			break
20463		}
20464		v.reset(OpMove)
20465		v.AuxInt = int64ToAuxInt(8)
20466		v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
20467		v0.AuxInt = int64ToAuxInt(s - 8)
20468		v0.AddArg(dst)
20469		v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type)
20470		v1.AuxInt = int64ToAuxInt(s - 8)
20471		v1.AddArg(src)
20472		v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem)
20473		v2.AuxInt = int64ToAuxInt(s - s%16)
20474		v2.AddArg3(dst, src, mem)
20475		v.AddArg3(v0, v1, v2)
20476		return true
20477	}
20478	// match: (Move [s] dst src mem)
20479	// cond: s%16 != 0 && s%16 > 8 && s > 16
20480	// result: (Move [16] (OffPtr <dst.Type> dst [s-16]) (OffPtr <src.Type> src [s-16]) (Move [s-s%16] dst src mem))
20481	for {
20482		s := auxIntToInt64(v.AuxInt)
20483		dst := v_0
20484		src := v_1
20485		mem := v_2
20486		if !(s%16 != 0 && s%16 > 8 && s > 16) {
20487			break
20488		}
20489		v.reset(OpMove)
20490		v.AuxInt = int64ToAuxInt(16)
20491		v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
20492		v0.AuxInt = int64ToAuxInt(s - 16)
20493		v0.AddArg(dst)
20494		v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type)
20495		v1.AuxInt = int64ToAuxInt(s - 16)
20496		v1.AddArg(src)
20497		v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem)
20498		v2.AuxInt = int64ToAuxInt(s - s%16)
20499		v2.AddArg3(dst, src, mem)
20500		v.AddArg3(v0, v1, v2)
20501		return true
20502	}
20503	// match: (Move [s] dst src mem)
20504	// cond: s > 64 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice && logLargeCopy(v, s)
20505	// result: (DUFFCOPY [8 * (64 - s/16)] dst src mem)
20506	for {
20507		s := auxIntToInt64(v.AuxInt)
20508		dst := v_0
20509		src := v_1
20510		mem := v_2
20511		if !(s > 64 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) {
20512			break
20513		}
20514		v.reset(OpARM64DUFFCOPY)
20515		v.AuxInt = int64ToAuxInt(8 * (64 - s/16))
20516		v.AddArg3(dst, src, mem)
20517		return true
20518	}
20519	// match: (Move [s] dst src mem)
20520	// cond: s%16 == 0 && (s > 16*64 || config.noDuffDevice) && logLargeCopy(v, s)
20521	// result: (LoweredMove dst src (ADDconst <src.Type> src [s-16]) mem)
20522	for {
20523		s := auxIntToInt64(v.AuxInt)
20524		dst := v_0
20525		src := v_1
20526		mem := v_2
20527		if !(s%16 == 0 && (s > 16*64 || config.noDuffDevice) && logLargeCopy(v, s)) {
20528			break
20529		}
20530		v.reset(OpARM64LoweredMove)
20531		v0 := b.NewValue0(v.Pos, OpARM64ADDconst, src.Type)
20532		v0.AuxInt = int64ToAuxInt(s - 16)
20533		v0.AddArg(src)
20534		v.AddArg4(dst, src, v0, mem)
20535		return true
20536	}
20537	return false
20538}
20539func rewriteValueARM64_OpNeq16(v *Value) bool {
20540	v_1 := v.Args[1]
20541	v_0 := v.Args[0]
20542	b := v.Block
20543	typ := &b.Func.Config.Types
20544	// match: (Neq16 x y)
20545	// result: (NotEqual (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
20546	for {
20547		x := v_0
20548		y := v_1
20549		v.reset(OpARM64NotEqual)
20550		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
20551		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
20552		v1.AddArg(x)
20553		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
20554		v2.AddArg(y)
20555		v0.AddArg2(v1, v2)
20556		v.AddArg(v0)
20557		return true
20558	}
20559}
20560func rewriteValueARM64_OpNeq32(v *Value) bool {
20561	v_1 := v.Args[1]
20562	v_0 := v.Args[0]
20563	b := v.Block
20564	// match: (Neq32 x y)
20565	// result: (NotEqual (CMPW x y))
20566	for {
20567		x := v_0
20568		y := v_1
20569		v.reset(OpARM64NotEqual)
20570		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
20571		v0.AddArg2(x, y)
20572		v.AddArg(v0)
20573		return true
20574	}
20575}
20576func rewriteValueARM64_OpNeq32F(v *Value) bool {
20577	v_1 := v.Args[1]
20578	v_0 := v.Args[0]
20579	b := v.Block
20580	// match: (Neq32F x y)
20581	// result: (NotEqual (FCMPS x y))
20582	for {
20583		x := v_0
20584		y := v_1
20585		v.reset(OpARM64NotEqual)
20586		v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
20587		v0.AddArg2(x, y)
20588		v.AddArg(v0)
20589		return true
20590	}
20591}
20592func rewriteValueARM64_OpNeq64(v *Value) bool {
20593	v_1 := v.Args[1]
20594	v_0 := v.Args[0]
20595	b := v.Block
20596	// match: (Neq64 x y)
20597	// result: (NotEqual (CMP x y))
20598	for {
20599		x := v_0
20600		y := v_1
20601		v.reset(OpARM64NotEqual)
20602		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
20603		v0.AddArg2(x, y)
20604		v.AddArg(v0)
20605		return true
20606	}
20607}
20608func rewriteValueARM64_OpNeq64F(v *Value) bool {
20609	v_1 := v.Args[1]
20610	v_0 := v.Args[0]
20611	b := v.Block
20612	// match: (Neq64F x y)
20613	// result: (NotEqual (FCMPD x y))
20614	for {
20615		x := v_0
20616		y := v_1
20617		v.reset(OpARM64NotEqual)
20618		v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
20619		v0.AddArg2(x, y)
20620		v.AddArg(v0)
20621		return true
20622	}
20623}
20624func rewriteValueARM64_OpNeq8(v *Value) bool {
20625	v_1 := v.Args[1]
20626	v_0 := v.Args[0]
20627	b := v.Block
20628	typ := &b.Func.Config.Types
20629	// match: (Neq8 x y)
20630	// result: (NotEqual (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
20631	for {
20632		x := v_0
20633		y := v_1
20634		v.reset(OpARM64NotEqual)
20635		v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
20636		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
20637		v1.AddArg(x)
20638		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
20639		v2.AddArg(y)
20640		v0.AddArg2(v1, v2)
20641		v.AddArg(v0)
20642		return true
20643	}
20644}
20645func rewriteValueARM64_OpNeqPtr(v *Value) bool {
20646	v_1 := v.Args[1]
20647	v_0 := v.Args[0]
20648	b := v.Block
20649	// match: (NeqPtr x y)
20650	// result: (NotEqual (CMP x y))
20651	for {
20652		x := v_0
20653		y := v_1
20654		v.reset(OpARM64NotEqual)
20655		v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
20656		v0.AddArg2(x, y)
20657		v.AddArg(v0)
20658		return true
20659	}
20660}
20661func rewriteValueARM64_OpNot(v *Value) bool {
20662	v_0 := v.Args[0]
20663	b := v.Block
20664	typ := &b.Func.Config.Types
20665	// match: (Not x)
20666	// result: (XOR (MOVDconst [1]) x)
20667	for {
20668		x := v_0
20669		v.reset(OpARM64XOR)
20670		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
20671		v0.AuxInt = int64ToAuxInt(1)
20672		v.AddArg2(v0, x)
20673		return true
20674	}
20675}
20676func rewriteValueARM64_OpOffPtr(v *Value) bool {
20677	v_0 := v.Args[0]
20678	// match: (OffPtr [off] ptr:(SP))
20679	// cond: is32Bit(off)
20680	// result: (MOVDaddr [int32(off)] ptr)
20681	for {
20682		off := auxIntToInt64(v.AuxInt)
20683		ptr := v_0
20684		if ptr.Op != OpSP || !(is32Bit(off)) {
20685			break
20686		}
20687		v.reset(OpARM64MOVDaddr)
20688		v.AuxInt = int32ToAuxInt(int32(off))
20689		v.AddArg(ptr)
20690		return true
20691	}
20692	// match: (OffPtr [off] ptr)
20693	// result: (ADDconst [off] ptr)
20694	for {
20695		off := auxIntToInt64(v.AuxInt)
20696		ptr := v_0
20697		v.reset(OpARM64ADDconst)
20698		v.AuxInt = int64ToAuxInt(off)
20699		v.AddArg(ptr)
20700		return true
20701	}
20702}
20703func rewriteValueARM64_OpPanicBounds(v *Value) bool {
20704	v_2 := v.Args[2]
20705	v_1 := v.Args[1]
20706	v_0 := v.Args[0]
20707	// match: (PanicBounds [kind] x y mem)
20708	// cond: boundsABI(kind) == 0
20709	// result: (LoweredPanicBoundsA [kind] x y mem)
20710	for {
20711		kind := auxIntToInt64(v.AuxInt)
20712		x := v_0
20713		y := v_1
20714		mem := v_2
20715		if !(boundsABI(kind) == 0) {
20716			break
20717		}
20718		v.reset(OpARM64LoweredPanicBoundsA)
20719		v.AuxInt = int64ToAuxInt(kind)
20720		v.AddArg3(x, y, mem)
20721		return true
20722	}
20723	// match: (PanicBounds [kind] x y mem)
20724	// cond: boundsABI(kind) == 1
20725	// result: (LoweredPanicBoundsB [kind] x y mem)
20726	for {
20727		kind := auxIntToInt64(v.AuxInt)
20728		x := v_0
20729		y := v_1
20730		mem := v_2
20731		if !(boundsABI(kind) == 1) {
20732			break
20733		}
20734		v.reset(OpARM64LoweredPanicBoundsB)
20735		v.AuxInt = int64ToAuxInt(kind)
20736		v.AddArg3(x, y, mem)
20737		return true
20738	}
20739	// match: (PanicBounds [kind] x y mem)
20740	// cond: boundsABI(kind) == 2
20741	// result: (LoweredPanicBoundsC [kind] x y mem)
20742	for {
20743		kind := auxIntToInt64(v.AuxInt)
20744		x := v_0
20745		y := v_1
20746		mem := v_2
20747		if !(boundsABI(kind) == 2) {
20748			break
20749		}
20750		v.reset(OpARM64LoweredPanicBoundsC)
20751		v.AuxInt = int64ToAuxInt(kind)
20752		v.AddArg3(x, y, mem)
20753		return true
20754	}
20755	return false
20756}
20757func rewriteValueARM64_OpPopCount16(v *Value) bool {
20758	v_0 := v.Args[0]
20759	b := v.Block
20760	typ := &b.Func.Config.Types
20761	// match: (PopCount16 <t> x)
20762	// result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> (ZeroExt16to64 x)))))
20763	for {
20764		t := v.Type
20765		x := v_0
20766		v.reset(OpARM64FMOVDfpgp)
20767		v.Type = t
20768		v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64)
20769		v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64)
20770		v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64)
20771		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
20772		v3.AddArg(x)
20773		v2.AddArg(v3)
20774		v1.AddArg(v2)
20775		v0.AddArg(v1)
20776		v.AddArg(v0)
20777		return true
20778	}
20779}
20780func rewriteValueARM64_OpPopCount32(v *Value) bool {
20781	v_0 := v.Args[0]
20782	b := v.Block
20783	typ := &b.Func.Config.Types
20784	// match: (PopCount32 <t> x)
20785	// result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> (ZeroExt32to64 x)))))
20786	for {
20787		t := v.Type
20788		x := v_0
20789		v.reset(OpARM64FMOVDfpgp)
20790		v.Type = t
20791		v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64)
20792		v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64)
20793		v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64)
20794		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
20795		v3.AddArg(x)
20796		v2.AddArg(v3)
20797		v1.AddArg(v2)
20798		v0.AddArg(v1)
20799		v.AddArg(v0)
20800		return true
20801	}
20802}
20803func rewriteValueARM64_OpPopCount64(v *Value) bool {
20804	v_0 := v.Args[0]
20805	b := v.Block
20806	typ := &b.Func.Config.Types
20807	// match: (PopCount64 <t> x)
20808	// result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> x))))
20809	for {
20810		t := v.Type
20811		x := v_0
20812		v.reset(OpARM64FMOVDfpgp)
20813		v.Type = t
20814		v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64)
20815		v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64)
20816		v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64)
20817		v2.AddArg(x)
20818		v1.AddArg(v2)
20819		v0.AddArg(v1)
20820		v.AddArg(v0)
20821		return true
20822	}
20823}
20824func rewriteValueARM64_OpPrefetchCache(v *Value) bool {
20825	v_1 := v.Args[1]
20826	v_0 := v.Args[0]
20827	// match: (PrefetchCache addr mem)
20828	// result: (PRFM [0] addr mem)
20829	for {
20830		addr := v_0
20831		mem := v_1
20832		v.reset(OpARM64PRFM)
20833		v.AuxInt = int64ToAuxInt(0)
20834		v.AddArg2(addr, mem)
20835		return true
20836	}
20837}
20838func rewriteValueARM64_OpPrefetchCacheStreamed(v *Value) bool {
20839	v_1 := v.Args[1]
20840	v_0 := v.Args[0]
20841	// match: (PrefetchCacheStreamed addr mem)
20842	// result: (PRFM [1] addr mem)
20843	for {
20844		addr := v_0
20845		mem := v_1
20846		v.reset(OpARM64PRFM)
20847		v.AuxInt = int64ToAuxInt(1)
20848		v.AddArg2(addr, mem)
20849		return true
20850	}
20851}
20852func rewriteValueARM64_OpPubBarrier(v *Value) bool {
20853	v_0 := v.Args[0]
20854	// match: (PubBarrier mem)
20855	// result: (DMB [0xe] mem)
20856	for {
20857		mem := v_0
20858		v.reset(OpARM64DMB)
20859		v.AuxInt = int64ToAuxInt(0xe)
20860		v.AddArg(mem)
20861		return true
20862	}
20863}
20864func rewriteValueARM64_OpRotateLeft16(v *Value) bool {
20865	v_1 := v.Args[1]
20866	v_0 := v.Args[0]
20867	b := v.Block
20868	typ := &b.Func.Config.Types
20869	// match: (RotateLeft16 <t> x (MOVDconst [c]))
20870	// result: (Or16 (Lsh16x64 <t> x (MOVDconst [c&15])) (Rsh16Ux64 <t> x (MOVDconst [-c&15])))
20871	for {
20872		t := v.Type
20873		x := v_0
20874		if v_1.Op != OpARM64MOVDconst {
20875			break
20876		}
20877		c := auxIntToInt64(v_1.AuxInt)
20878		v.reset(OpOr16)
20879		v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
20880		v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
20881		v1.AuxInt = int64ToAuxInt(c & 15)
20882		v0.AddArg2(x, v1)
20883		v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
20884		v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
20885		v3.AuxInt = int64ToAuxInt(-c & 15)
20886		v2.AddArg2(x, v3)
20887		v.AddArg2(v0, v2)
20888		return true
20889	}
20890	// match: (RotateLeft16 <t> x y)
20891	// result: (RORW <t> (ORshiftLL <typ.UInt32> (ZeroExt16to32 x) (ZeroExt16to32 x) [16]) (NEG <typ.Int64> y))
20892	for {
20893		t := v.Type
20894		x := v_0
20895		y := v_1
20896		v.reset(OpARM64RORW)
20897		v.Type = t
20898		v0 := b.NewValue0(v.Pos, OpARM64ORshiftLL, typ.UInt32)
20899		v0.AuxInt = int64ToAuxInt(16)
20900		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
20901		v1.AddArg(x)
20902		v0.AddArg2(v1, v1)
20903		v2 := b.NewValue0(v.Pos, OpARM64NEG, typ.Int64)
20904		v2.AddArg(y)
20905		v.AddArg2(v0, v2)
20906		return true
20907	}
20908}
20909func rewriteValueARM64_OpRotateLeft32(v *Value) bool {
20910	v_1 := v.Args[1]
20911	v_0 := v.Args[0]
20912	b := v.Block
20913	// match: (RotateLeft32 x y)
20914	// result: (RORW x (NEG <y.Type> y))
20915	for {
20916		x := v_0
20917		y := v_1
20918		v.reset(OpARM64RORW)
20919		v0 := b.NewValue0(v.Pos, OpARM64NEG, y.Type)
20920		v0.AddArg(y)
20921		v.AddArg2(x, v0)
20922		return true
20923	}
20924}
20925func rewriteValueARM64_OpRotateLeft64(v *Value) bool {
20926	v_1 := v.Args[1]
20927	v_0 := v.Args[0]
20928	b := v.Block
20929	// match: (RotateLeft64 x y)
20930	// result: (ROR x (NEG <y.Type> y))
20931	for {
20932		x := v_0
20933		y := v_1
20934		v.reset(OpARM64ROR)
20935		v0 := b.NewValue0(v.Pos, OpARM64NEG, y.Type)
20936		v0.AddArg(y)
20937		v.AddArg2(x, v0)
20938		return true
20939	}
20940}
20941func rewriteValueARM64_OpRotateLeft8(v *Value) bool {
20942	v_1 := v.Args[1]
20943	v_0 := v.Args[0]
20944	b := v.Block
20945	typ := &b.Func.Config.Types
20946	// match: (RotateLeft8 <t> x (MOVDconst [c]))
20947	// result: (Or8 (Lsh8x64 <t> x (MOVDconst [c&7])) (Rsh8Ux64 <t> x (MOVDconst [-c&7])))
20948	for {
20949		t := v.Type
20950		x := v_0
20951		if v_1.Op != OpARM64MOVDconst {
20952			break
20953		}
20954		c := auxIntToInt64(v_1.AuxInt)
20955		v.reset(OpOr8)
20956		v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
20957		v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
20958		v1.AuxInt = int64ToAuxInt(c & 7)
20959		v0.AddArg2(x, v1)
20960		v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
20961		v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
20962		v3.AuxInt = int64ToAuxInt(-c & 7)
20963		v2.AddArg2(x, v3)
20964		v.AddArg2(v0, v2)
20965		return true
20966	}
20967	// match: (RotateLeft8 <t> x y)
20968	// result: (OR <t> (SLL <t> x (ANDconst <typ.Int64> [7] y)) (SRL <t> (ZeroExt8to64 x) (ANDconst <typ.Int64> [7] (NEG <typ.Int64> y))))
20969	for {
20970		t := v.Type
20971		x := v_0
20972		y := v_1
20973		v.reset(OpARM64OR)
20974		v.Type = t
20975		v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
20976		v1 := b.NewValue0(v.Pos, OpARM64ANDconst, typ.Int64)
20977		v1.AuxInt = int64ToAuxInt(7)
20978		v1.AddArg(y)
20979		v0.AddArg2(x, v1)
20980		v2 := b.NewValue0(v.Pos, OpARM64SRL, t)
20981		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
20982		v3.AddArg(x)
20983		v4 := b.NewValue0(v.Pos, OpARM64ANDconst, typ.Int64)
20984		v4.AuxInt = int64ToAuxInt(7)
20985		v5 := b.NewValue0(v.Pos, OpARM64NEG, typ.Int64)
20986		v5.AddArg(y)
20987		v4.AddArg(v5)
20988		v2.AddArg2(v3, v4)
20989		v.AddArg2(v0, v2)
20990		return true
20991	}
20992}
20993func rewriteValueARM64_OpRsh16Ux16(v *Value) bool {
20994	v_1 := v.Args[1]
20995	v_0 := v.Args[0]
20996	b := v.Block
20997	typ := &b.Func.Config.Types
20998	// match: (Rsh16Ux16 <t> x y)
20999	// cond: shiftIsBounded(v)
21000	// result: (SRL <t> (ZeroExt16to64 x) y)
21001	for {
21002		t := v.Type
21003		x := v_0
21004		y := v_1
21005		if !(shiftIsBounded(v)) {
21006			break
21007		}
21008		v.reset(OpARM64SRL)
21009		v.Type = t
21010		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
21011		v0.AddArg(x)
21012		v.AddArg2(v0, y)
21013		return true
21014	}
21015	// match: (Rsh16Ux16 <t> x y)
21016	// cond: !shiftIsBounded(v)
21017	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
21018	for {
21019		t := v.Type
21020		x := v_0
21021		y := v_1
21022		if !(!shiftIsBounded(v)) {
21023			break
21024		}
21025		v.reset(OpARM64CSEL)
21026		v.AuxInt = opToAuxInt(OpARM64LessThanU)
21027		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
21028		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
21029		v1.AddArg(x)
21030		v0.AddArg2(v1, y)
21031		v2 := b.NewValue0(v.Pos, OpConst64, t)
21032		v2.AuxInt = int64ToAuxInt(0)
21033		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21034		v3.AuxInt = int64ToAuxInt(64)
21035		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
21036		v4.AddArg(y)
21037		v3.AddArg(v4)
21038		v.AddArg3(v0, v2, v3)
21039		return true
21040	}
21041	return false
21042}
21043func rewriteValueARM64_OpRsh16Ux32(v *Value) bool {
21044	v_1 := v.Args[1]
21045	v_0 := v.Args[0]
21046	b := v.Block
21047	typ := &b.Func.Config.Types
21048	// match: (Rsh16Ux32 <t> x y)
21049	// cond: shiftIsBounded(v)
21050	// result: (SRL <t> (ZeroExt16to64 x) y)
21051	for {
21052		t := v.Type
21053		x := v_0
21054		y := v_1
21055		if !(shiftIsBounded(v)) {
21056			break
21057		}
21058		v.reset(OpARM64SRL)
21059		v.Type = t
21060		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
21061		v0.AddArg(x)
21062		v.AddArg2(v0, y)
21063		return true
21064	}
21065	// match: (Rsh16Ux32 <t> x y)
21066	// cond: !shiftIsBounded(v)
21067	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
21068	for {
21069		t := v.Type
21070		x := v_0
21071		y := v_1
21072		if !(!shiftIsBounded(v)) {
21073			break
21074		}
21075		v.reset(OpARM64CSEL)
21076		v.AuxInt = opToAuxInt(OpARM64LessThanU)
21077		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
21078		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
21079		v1.AddArg(x)
21080		v0.AddArg2(v1, y)
21081		v2 := b.NewValue0(v.Pos, OpConst64, t)
21082		v2.AuxInt = int64ToAuxInt(0)
21083		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21084		v3.AuxInt = int64ToAuxInt(64)
21085		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
21086		v4.AddArg(y)
21087		v3.AddArg(v4)
21088		v.AddArg3(v0, v2, v3)
21089		return true
21090	}
21091	return false
21092}
21093func rewriteValueARM64_OpRsh16Ux64(v *Value) bool {
21094	v_1 := v.Args[1]
21095	v_0 := v.Args[0]
21096	b := v.Block
21097	typ := &b.Func.Config.Types
21098	// match: (Rsh16Ux64 <t> x y)
21099	// cond: shiftIsBounded(v)
21100	// result: (SRL <t> (ZeroExt16to64 x) y)
21101	for {
21102		t := v.Type
21103		x := v_0
21104		y := v_1
21105		if !(shiftIsBounded(v)) {
21106			break
21107		}
21108		v.reset(OpARM64SRL)
21109		v.Type = t
21110		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
21111		v0.AddArg(x)
21112		v.AddArg2(v0, y)
21113		return true
21114	}
21115	// match: (Rsh16Ux64 <t> x y)
21116	// cond: !shiftIsBounded(v)
21117	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
21118	for {
21119		t := v.Type
21120		x := v_0
21121		y := v_1
21122		if !(!shiftIsBounded(v)) {
21123			break
21124		}
21125		v.reset(OpARM64CSEL)
21126		v.AuxInt = opToAuxInt(OpARM64LessThanU)
21127		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
21128		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
21129		v1.AddArg(x)
21130		v0.AddArg2(v1, y)
21131		v2 := b.NewValue0(v.Pos, OpConst64, t)
21132		v2.AuxInt = int64ToAuxInt(0)
21133		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21134		v3.AuxInt = int64ToAuxInt(64)
21135		v3.AddArg(y)
21136		v.AddArg3(v0, v2, v3)
21137		return true
21138	}
21139	return false
21140}
21141func rewriteValueARM64_OpRsh16Ux8(v *Value) bool {
21142	v_1 := v.Args[1]
21143	v_0 := v.Args[0]
21144	b := v.Block
21145	typ := &b.Func.Config.Types
21146	// match: (Rsh16Ux8 <t> x y)
21147	// cond: shiftIsBounded(v)
21148	// result: (SRL <t> (ZeroExt16to64 x) y)
21149	for {
21150		t := v.Type
21151		x := v_0
21152		y := v_1
21153		if !(shiftIsBounded(v)) {
21154			break
21155		}
21156		v.reset(OpARM64SRL)
21157		v.Type = t
21158		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
21159		v0.AddArg(x)
21160		v.AddArg2(v0, y)
21161		return true
21162	}
21163	// match: (Rsh16Ux8 <t> x y)
21164	// cond: !shiftIsBounded(v)
21165	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
21166	for {
21167		t := v.Type
21168		x := v_0
21169		y := v_1
21170		if !(!shiftIsBounded(v)) {
21171			break
21172		}
21173		v.reset(OpARM64CSEL)
21174		v.AuxInt = opToAuxInt(OpARM64LessThanU)
21175		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
21176		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
21177		v1.AddArg(x)
21178		v0.AddArg2(v1, y)
21179		v2 := b.NewValue0(v.Pos, OpConst64, t)
21180		v2.AuxInt = int64ToAuxInt(0)
21181		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21182		v3.AuxInt = int64ToAuxInt(64)
21183		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
21184		v4.AddArg(y)
21185		v3.AddArg(v4)
21186		v.AddArg3(v0, v2, v3)
21187		return true
21188	}
21189	return false
21190}
21191func rewriteValueARM64_OpRsh16x16(v *Value) bool {
21192	v_1 := v.Args[1]
21193	v_0 := v.Args[0]
21194	b := v.Block
21195	typ := &b.Func.Config.Types
21196	// match: (Rsh16x16 <t> x y)
21197	// cond: shiftIsBounded(v)
21198	// result: (SRA <t> (SignExt16to64 x) y)
21199	for {
21200		t := v.Type
21201		x := v_0
21202		y := v_1
21203		if !(shiftIsBounded(v)) {
21204			break
21205		}
21206		v.reset(OpARM64SRA)
21207		v.Type = t
21208		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
21209		v0.AddArg(x)
21210		v.AddArg2(v0, y)
21211		return true
21212	}
21213	// match: (Rsh16x16 x y)
21214	// cond: !shiftIsBounded(v)
21215	// result: (SRA (SignExt16to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
21216	for {
21217		x := v_0
21218		y := v_1
21219		if !(!shiftIsBounded(v)) {
21220			break
21221		}
21222		v.reset(OpARM64SRA)
21223		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
21224		v0.AddArg(x)
21225		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
21226		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
21227		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
21228		v2.AuxInt = int64ToAuxInt(63)
21229		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21230		v3.AuxInt = int64ToAuxInt(64)
21231		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
21232		v4.AddArg(y)
21233		v3.AddArg(v4)
21234		v1.AddArg3(y, v2, v3)
21235		v.AddArg2(v0, v1)
21236		return true
21237	}
21238	return false
21239}
21240func rewriteValueARM64_OpRsh16x32(v *Value) bool {
21241	v_1 := v.Args[1]
21242	v_0 := v.Args[0]
21243	b := v.Block
21244	typ := &b.Func.Config.Types
21245	// match: (Rsh16x32 <t> x y)
21246	// cond: shiftIsBounded(v)
21247	// result: (SRA <t> (SignExt16to64 x) y)
21248	for {
21249		t := v.Type
21250		x := v_0
21251		y := v_1
21252		if !(shiftIsBounded(v)) {
21253			break
21254		}
21255		v.reset(OpARM64SRA)
21256		v.Type = t
21257		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
21258		v0.AddArg(x)
21259		v.AddArg2(v0, y)
21260		return true
21261	}
21262	// match: (Rsh16x32 x y)
21263	// cond: !shiftIsBounded(v)
21264	// result: (SRA (SignExt16to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
21265	for {
21266		x := v_0
21267		y := v_1
21268		if !(!shiftIsBounded(v)) {
21269			break
21270		}
21271		v.reset(OpARM64SRA)
21272		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
21273		v0.AddArg(x)
21274		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
21275		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
21276		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
21277		v2.AuxInt = int64ToAuxInt(63)
21278		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21279		v3.AuxInt = int64ToAuxInt(64)
21280		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
21281		v4.AddArg(y)
21282		v3.AddArg(v4)
21283		v1.AddArg3(y, v2, v3)
21284		v.AddArg2(v0, v1)
21285		return true
21286	}
21287	return false
21288}
21289func rewriteValueARM64_OpRsh16x64(v *Value) bool {
21290	v_1 := v.Args[1]
21291	v_0 := v.Args[0]
21292	b := v.Block
21293	typ := &b.Func.Config.Types
21294	// match: (Rsh16x64 <t> x y)
21295	// cond: shiftIsBounded(v)
21296	// result: (SRA <t> (SignExt16to64 x) y)
21297	for {
21298		t := v.Type
21299		x := v_0
21300		y := v_1
21301		if !(shiftIsBounded(v)) {
21302			break
21303		}
21304		v.reset(OpARM64SRA)
21305		v.Type = t
21306		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
21307		v0.AddArg(x)
21308		v.AddArg2(v0, y)
21309		return true
21310	}
21311	// match: (Rsh16x64 x y)
21312	// cond: !shiftIsBounded(v)
21313	// result: (SRA (SignExt16to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
21314	for {
21315		x := v_0
21316		y := v_1
21317		if !(!shiftIsBounded(v)) {
21318			break
21319		}
21320		v.reset(OpARM64SRA)
21321		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
21322		v0.AddArg(x)
21323		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
21324		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
21325		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
21326		v2.AuxInt = int64ToAuxInt(63)
21327		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21328		v3.AuxInt = int64ToAuxInt(64)
21329		v3.AddArg(y)
21330		v1.AddArg3(y, v2, v3)
21331		v.AddArg2(v0, v1)
21332		return true
21333	}
21334	return false
21335}
21336func rewriteValueARM64_OpRsh16x8(v *Value) bool {
21337	v_1 := v.Args[1]
21338	v_0 := v.Args[0]
21339	b := v.Block
21340	typ := &b.Func.Config.Types
21341	// match: (Rsh16x8 <t> x y)
21342	// cond: shiftIsBounded(v)
21343	// result: (SRA <t> (SignExt16to64 x) y)
21344	for {
21345		t := v.Type
21346		x := v_0
21347		y := v_1
21348		if !(shiftIsBounded(v)) {
21349			break
21350		}
21351		v.reset(OpARM64SRA)
21352		v.Type = t
21353		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
21354		v0.AddArg(x)
21355		v.AddArg2(v0, y)
21356		return true
21357	}
21358	// match: (Rsh16x8 x y)
21359	// cond: !shiftIsBounded(v)
21360	// result: (SRA (SignExt16to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
21361	for {
21362		x := v_0
21363		y := v_1
21364		if !(!shiftIsBounded(v)) {
21365			break
21366		}
21367		v.reset(OpARM64SRA)
21368		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
21369		v0.AddArg(x)
21370		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
21371		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
21372		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
21373		v2.AuxInt = int64ToAuxInt(63)
21374		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21375		v3.AuxInt = int64ToAuxInt(64)
21376		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
21377		v4.AddArg(y)
21378		v3.AddArg(v4)
21379		v1.AddArg3(y, v2, v3)
21380		v.AddArg2(v0, v1)
21381		return true
21382	}
21383	return false
21384}
21385func rewriteValueARM64_OpRsh32Ux16(v *Value) bool {
21386	v_1 := v.Args[1]
21387	v_0 := v.Args[0]
21388	b := v.Block
21389	typ := &b.Func.Config.Types
21390	// match: (Rsh32Ux16 <t> x y)
21391	// cond: shiftIsBounded(v)
21392	// result: (SRL <t> (ZeroExt32to64 x) y)
21393	for {
21394		t := v.Type
21395		x := v_0
21396		y := v_1
21397		if !(shiftIsBounded(v)) {
21398			break
21399		}
21400		v.reset(OpARM64SRL)
21401		v.Type = t
21402		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
21403		v0.AddArg(x)
21404		v.AddArg2(v0, y)
21405		return true
21406	}
21407	// match: (Rsh32Ux16 <t> x y)
21408	// cond: !shiftIsBounded(v)
21409	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
21410	for {
21411		t := v.Type
21412		x := v_0
21413		y := v_1
21414		if !(!shiftIsBounded(v)) {
21415			break
21416		}
21417		v.reset(OpARM64CSEL)
21418		v.AuxInt = opToAuxInt(OpARM64LessThanU)
21419		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
21420		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
21421		v1.AddArg(x)
21422		v0.AddArg2(v1, y)
21423		v2 := b.NewValue0(v.Pos, OpConst64, t)
21424		v2.AuxInt = int64ToAuxInt(0)
21425		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21426		v3.AuxInt = int64ToAuxInt(64)
21427		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
21428		v4.AddArg(y)
21429		v3.AddArg(v4)
21430		v.AddArg3(v0, v2, v3)
21431		return true
21432	}
21433	return false
21434}
21435func rewriteValueARM64_OpRsh32Ux32(v *Value) bool {
21436	v_1 := v.Args[1]
21437	v_0 := v.Args[0]
21438	b := v.Block
21439	typ := &b.Func.Config.Types
21440	// match: (Rsh32Ux32 <t> x y)
21441	// cond: shiftIsBounded(v)
21442	// result: (SRL <t> (ZeroExt32to64 x) y)
21443	for {
21444		t := v.Type
21445		x := v_0
21446		y := v_1
21447		if !(shiftIsBounded(v)) {
21448			break
21449		}
21450		v.reset(OpARM64SRL)
21451		v.Type = t
21452		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
21453		v0.AddArg(x)
21454		v.AddArg2(v0, y)
21455		return true
21456	}
21457	// match: (Rsh32Ux32 <t> x y)
21458	// cond: !shiftIsBounded(v)
21459	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
21460	for {
21461		t := v.Type
21462		x := v_0
21463		y := v_1
21464		if !(!shiftIsBounded(v)) {
21465			break
21466		}
21467		v.reset(OpARM64CSEL)
21468		v.AuxInt = opToAuxInt(OpARM64LessThanU)
21469		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
21470		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
21471		v1.AddArg(x)
21472		v0.AddArg2(v1, y)
21473		v2 := b.NewValue0(v.Pos, OpConst64, t)
21474		v2.AuxInt = int64ToAuxInt(0)
21475		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21476		v3.AuxInt = int64ToAuxInt(64)
21477		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
21478		v4.AddArg(y)
21479		v3.AddArg(v4)
21480		v.AddArg3(v0, v2, v3)
21481		return true
21482	}
21483	return false
21484}
21485func rewriteValueARM64_OpRsh32Ux64(v *Value) bool {
21486	v_1 := v.Args[1]
21487	v_0 := v.Args[0]
21488	b := v.Block
21489	typ := &b.Func.Config.Types
21490	// match: (Rsh32Ux64 <t> x y)
21491	// cond: shiftIsBounded(v)
21492	// result: (SRL <t> (ZeroExt32to64 x) y)
21493	for {
21494		t := v.Type
21495		x := v_0
21496		y := v_1
21497		if !(shiftIsBounded(v)) {
21498			break
21499		}
21500		v.reset(OpARM64SRL)
21501		v.Type = t
21502		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
21503		v0.AddArg(x)
21504		v.AddArg2(v0, y)
21505		return true
21506	}
21507	// match: (Rsh32Ux64 <t> x y)
21508	// cond: !shiftIsBounded(v)
21509	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
21510	for {
21511		t := v.Type
21512		x := v_0
21513		y := v_1
21514		if !(!shiftIsBounded(v)) {
21515			break
21516		}
21517		v.reset(OpARM64CSEL)
21518		v.AuxInt = opToAuxInt(OpARM64LessThanU)
21519		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
21520		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
21521		v1.AddArg(x)
21522		v0.AddArg2(v1, y)
21523		v2 := b.NewValue0(v.Pos, OpConst64, t)
21524		v2.AuxInt = int64ToAuxInt(0)
21525		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21526		v3.AuxInt = int64ToAuxInt(64)
21527		v3.AddArg(y)
21528		v.AddArg3(v0, v2, v3)
21529		return true
21530	}
21531	return false
21532}
21533func rewriteValueARM64_OpRsh32Ux8(v *Value) bool {
21534	v_1 := v.Args[1]
21535	v_0 := v.Args[0]
21536	b := v.Block
21537	typ := &b.Func.Config.Types
21538	// match: (Rsh32Ux8 <t> x y)
21539	// cond: shiftIsBounded(v)
21540	// result: (SRL <t> (ZeroExt32to64 x) y)
21541	for {
21542		t := v.Type
21543		x := v_0
21544		y := v_1
21545		if !(shiftIsBounded(v)) {
21546			break
21547		}
21548		v.reset(OpARM64SRL)
21549		v.Type = t
21550		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
21551		v0.AddArg(x)
21552		v.AddArg2(v0, y)
21553		return true
21554	}
21555	// match: (Rsh32Ux8 <t> x y)
21556	// cond: !shiftIsBounded(v)
21557	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
21558	for {
21559		t := v.Type
21560		x := v_0
21561		y := v_1
21562		if !(!shiftIsBounded(v)) {
21563			break
21564		}
21565		v.reset(OpARM64CSEL)
21566		v.AuxInt = opToAuxInt(OpARM64LessThanU)
21567		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
21568		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
21569		v1.AddArg(x)
21570		v0.AddArg2(v1, y)
21571		v2 := b.NewValue0(v.Pos, OpConst64, t)
21572		v2.AuxInt = int64ToAuxInt(0)
21573		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21574		v3.AuxInt = int64ToAuxInt(64)
21575		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
21576		v4.AddArg(y)
21577		v3.AddArg(v4)
21578		v.AddArg3(v0, v2, v3)
21579		return true
21580	}
21581	return false
21582}
21583func rewriteValueARM64_OpRsh32x16(v *Value) bool {
21584	v_1 := v.Args[1]
21585	v_0 := v.Args[0]
21586	b := v.Block
21587	typ := &b.Func.Config.Types
21588	// match: (Rsh32x16 <t> x y)
21589	// cond: shiftIsBounded(v)
21590	// result: (SRA <t> (SignExt32to64 x) y)
21591	for {
21592		t := v.Type
21593		x := v_0
21594		y := v_1
21595		if !(shiftIsBounded(v)) {
21596			break
21597		}
21598		v.reset(OpARM64SRA)
21599		v.Type = t
21600		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
21601		v0.AddArg(x)
21602		v.AddArg2(v0, y)
21603		return true
21604	}
21605	// match: (Rsh32x16 x y)
21606	// cond: !shiftIsBounded(v)
21607	// result: (SRA (SignExt32to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
21608	for {
21609		x := v_0
21610		y := v_1
21611		if !(!shiftIsBounded(v)) {
21612			break
21613		}
21614		v.reset(OpARM64SRA)
21615		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
21616		v0.AddArg(x)
21617		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
21618		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
21619		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
21620		v2.AuxInt = int64ToAuxInt(63)
21621		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21622		v3.AuxInt = int64ToAuxInt(64)
21623		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
21624		v4.AddArg(y)
21625		v3.AddArg(v4)
21626		v1.AddArg3(y, v2, v3)
21627		v.AddArg2(v0, v1)
21628		return true
21629	}
21630	return false
21631}
21632func rewriteValueARM64_OpRsh32x32(v *Value) bool {
21633	v_1 := v.Args[1]
21634	v_0 := v.Args[0]
21635	b := v.Block
21636	typ := &b.Func.Config.Types
21637	// match: (Rsh32x32 <t> x y)
21638	// cond: shiftIsBounded(v)
21639	// result: (SRA <t> (SignExt32to64 x) y)
21640	for {
21641		t := v.Type
21642		x := v_0
21643		y := v_1
21644		if !(shiftIsBounded(v)) {
21645			break
21646		}
21647		v.reset(OpARM64SRA)
21648		v.Type = t
21649		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
21650		v0.AddArg(x)
21651		v.AddArg2(v0, y)
21652		return true
21653	}
21654	// match: (Rsh32x32 x y)
21655	// cond: !shiftIsBounded(v)
21656	// result: (SRA (SignExt32to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
21657	for {
21658		x := v_0
21659		y := v_1
21660		if !(!shiftIsBounded(v)) {
21661			break
21662		}
21663		v.reset(OpARM64SRA)
21664		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
21665		v0.AddArg(x)
21666		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
21667		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
21668		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
21669		v2.AuxInt = int64ToAuxInt(63)
21670		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21671		v3.AuxInt = int64ToAuxInt(64)
21672		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
21673		v4.AddArg(y)
21674		v3.AddArg(v4)
21675		v1.AddArg3(y, v2, v3)
21676		v.AddArg2(v0, v1)
21677		return true
21678	}
21679	return false
21680}
21681func rewriteValueARM64_OpRsh32x64(v *Value) bool {
21682	v_1 := v.Args[1]
21683	v_0 := v.Args[0]
21684	b := v.Block
21685	typ := &b.Func.Config.Types
21686	// match: (Rsh32x64 <t> x y)
21687	// cond: shiftIsBounded(v)
21688	// result: (SRA <t> (SignExt32to64 x) y)
21689	for {
21690		t := v.Type
21691		x := v_0
21692		y := v_1
21693		if !(shiftIsBounded(v)) {
21694			break
21695		}
21696		v.reset(OpARM64SRA)
21697		v.Type = t
21698		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
21699		v0.AddArg(x)
21700		v.AddArg2(v0, y)
21701		return true
21702	}
21703	// match: (Rsh32x64 x y)
21704	// cond: !shiftIsBounded(v)
21705	// result: (SRA (SignExt32to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
21706	for {
21707		x := v_0
21708		y := v_1
21709		if !(!shiftIsBounded(v)) {
21710			break
21711		}
21712		v.reset(OpARM64SRA)
21713		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
21714		v0.AddArg(x)
21715		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
21716		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
21717		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
21718		v2.AuxInt = int64ToAuxInt(63)
21719		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21720		v3.AuxInt = int64ToAuxInt(64)
21721		v3.AddArg(y)
21722		v1.AddArg3(y, v2, v3)
21723		v.AddArg2(v0, v1)
21724		return true
21725	}
21726	return false
21727}
21728func rewriteValueARM64_OpRsh32x8(v *Value) bool {
21729	v_1 := v.Args[1]
21730	v_0 := v.Args[0]
21731	b := v.Block
21732	typ := &b.Func.Config.Types
21733	// match: (Rsh32x8 <t> x y)
21734	// cond: shiftIsBounded(v)
21735	// result: (SRA <t> (SignExt32to64 x) y)
21736	for {
21737		t := v.Type
21738		x := v_0
21739		y := v_1
21740		if !(shiftIsBounded(v)) {
21741			break
21742		}
21743		v.reset(OpARM64SRA)
21744		v.Type = t
21745		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
21746		v0.AddArg(x)
21747		v.AddArg2(v0, y)
21748		return true
21749	}
21750	// match: (Rsh32x8 x y)
21751	// cond: !shiftIsBounded(v)
21752	// result: (SRA (SignExt32to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
21753	for {
21754		x := v_0
21755		y := v_1
21756		if !(!shiftIsBounded(v)) {
21757			break
21758		}
21759		v.reset(OpARM64SRA)
21760		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
21761		v0.AddArg(x)
21762		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
21763		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
21764		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
21765		v2.AuxInt = int64ToAuxInt(63)
21766		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21767		v3.AuxInt = int64ToAuxInt(64)
21768		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
21769		v4.AddArg(y)
21770		v3.AddArg(v4)
21771		v1.AddArg3(y, v2, v3)
21772		v.AddArg2(v0, v1)
21773		return true
21774	}
21775	return false
21776}
21777func rewriteValueARM64_OpRsh64Ux16(v *Value) bool {
21778	v_1 := v.Args[1]
21779	v_0 := v.Args[0]
21780	b := v.Block
21781	typ := &b.Func.Config.Types
21782	// match: (Rsh64Ux16 <t> x y)
21783	// cond: shiftIsBounded(v)
21784	// result: (SRL <t> x y)
21785	for {
21786		t := v.Type
21787		x := v_0
21788		y := v_1
21789		if !(shiftIsBounded(v)) {
21790			break
21791		}
21792		v.reset(OpARM64SRL)
21793		v.Type = t
21794		v.AddArg2(x, y)
21795		return true
21796	}
21797	// match: (Rsh64Ux16 <t> x y)
21798	// cond: !shiftIsBounded(v)
21799	// result: (CSEL [OpARM64LessThanU] (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
21800	for {
21801		t := v.Type
21802		x := v_0
21803		y := v_1
21804		if !(!shiftIsBounded(v)) {
21805			break
21806		}
21807		v.reset(OpARM64CSEL)
21808		v.AuxInt = opToAuxInt(OpARM64LessThanU)
21809		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
21810		v0.AddArg2(x, y)
21811		v1 := b.NewValue0(v.Pos, OpConst64, t)
21812		v1.AuxInt = int64ToAuxInt(0)
21813		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21814		v2.AuxInt = int64ToAuxInt(64)
21815		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
21816		v3.AddArg(y)
21817		v2.AddArg(v3)
21818		v.AddArg3(v0, v1, v2)
21819		return true
21820	}
21821	return false
21822}
21823func rewriteValueARM64_OpRsh64Ux32(v *Value) bool {
21824	v_1 := v.Args[1]
21825	v_0 := v.Args[0]
21826	b := v.Block
21827	typ := &b.Func.Config.Types
21828	// match: (Rsh64Ux32 <t> x y)
21829	// cond: shiftIsBounded(v)
21830	// result: (SRL <t> x y)
21831	for {
21832		t := v.Type
21833		x := v_0
21834		y := v_1
21835		if !(shiftIsBounded(v)) {
21836			break
21837		}
21838		v.reset(OpARM64SRL)
21839		v.Type = t
21840		v.AddArg2(x, y)
21841		return true
21842	}
21843	// match: (Rsh64Ux32 <t> x y)
21844	// cond: !shiftIsBounded(v)
21845	// result: (CSEL [OpARM64LessThanU] (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
21846	for {
21847		t := v.Type
21848		x := v_0
21849		y := v_1
21850		if !(!shiftIsBounded(v)) {
21851			break
21852		}
21853		v.reset(OpARM64CSEL)
21854		v.AuxInt = opToAuxInt(OpARM64LessThanU)
21855		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
21856		v0.AddArg2(x, y)
21857		v1 := b.NewValue0(v.Pos, OpConst64, t)
21858		v1.AuxInt = int64ToAuxInt(0)
21859		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21860		v2.AuxInt = int64ToAuxInt(64)
21861		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
21862		v3.AddArg(y)
21863		v2.AddArg(v3)
21864		v.AddArg3(v0, v1, v2)
21865		return true
21866	}
21867	return false
21868}
21869func rewriteValueARM64_OpRsh64Ux64(v *Value) bool {
21870	v_1 := v.Args[1]
21871	v_0 := v.Args[0]
21872	b := v.Block
21873	// match: (Rsh64Ux64 <t> x y)
21874	// cond: shiftIsBounded(v)
21875	// result: (SRL <t> x y)
21876	for {
21877		t := v.Type
21878		x := v_0
21879		y := v_1
21880		if !(shiftIsBounded(v)) {
21881			break
21882		}
21883		v.reset(OpARM64SRL)
21884		v.Type = t
21885		v.AddArg2(x, y)
21886		return true
21887	}
21888	// match: (Rsh64Ux64 <t> x y)
21889	// cond: !shiftIsBounded(v)
21890	// result: (CSEL [OpARM64LessThanU] (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
21891	for {
21892		t := v.Type
21893		x := v_0
21894		y := v_1
21895		if !(!shiftIsBounded(v)) {
21896			break
21897		}
21898		v.reset(OpARM64CSEL)
21899		v.AuxInt = opToAuxInt(OpARM64LessThanU)
21900		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
21901		v0.AddArg2(x, y)
21902		v1 := b.NewValue0(v.Pos, OpConst64, t)
21903		v1.AuxInt = int64ToAuxInt(0)
21904		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21905		v2.AuxInt = int64ToAuxInt(64)
21906		v2.AddArg(y)
21907		v.AddArg3(v0, v1, v2)
21908		return true
21909	}
21910	return false
21911}
21912func rewriteValueARM64_OpRsh64Ux8(v *Value) bool {
21913	v_1 := v.Args[1]
21914	v_0 := v.Args[0]
21915	b := v.Block
21916	typ := &b.Func.Config.Types
21917	// match: (Rsh64Ux8 <t> x y)
21918	// cond: shiftIsBounded(v)
21919	// result: (SRL <t> x y)
21920	for {
21921		t := v.Type
21922		x := v_0
21923		y := v_1
21924		if !(shiftIsBounded(v)) {
21925			break
21926		}
21927		v.reset(OpARM64SRL)
21928		v.Type = t
21929		v.AddArg2(x, y)
21930		return true
21931	}
21932	// match: (Rsh64Ux8 <t> x y)
21933	// cond: !shiftIsBounded(v)
21934	// result: (CSEL [OpARM64LessThanU] (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
21935	for {
21936		t := v.Type
21937		x := v_0
21938		y := v_1
21939		if !(!shiftIsBounded(v)) {
21940			break
21941		}
21942		v.reset(OpARM64CSEL)
21943		v.AuxInt = opToAuxInt(OpARM64LessThanU)
21944		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
21945		v0.AddArg2(x, y)
21946		v1 := b.NewValue0(v.Pos, OpConst64, t)
21947		v1.AuxInt = int64ToAuxInt(0)
21948		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21949		v2.AuxInt = int64ToAuxInt(64)
21950		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
21951		v3.AddArg(y)
21952		v2.AddArg(v3)
21953		v.AddArg3(v0, v1, v2)
21954		return true
21955	}
21956	return false
21957}
21958func rewriteValueARM64_OpRsh64x16(v *Value) bool {
21959	v_1 := v.Args[1]
21960	v_0 := v.Args[0]
21961	b := v.Block
21962	typ := &b.Func.Config.Types
21963	// match: (Rsh64x16 <t> x y)
21964	// cond: shiftIsBounded(v)
21965	// result: (SRA <t> x y)
21966	for {
21967		t := v.Type
21968		x := v_0
21969		y := v_1
21970		if !(shiftIsBounded(v)) {
21971			break
21972		}
21973		v.reset(OpARM64SRA)
21974		v.Type = t
21975		v.AddArg2(x, y)
21976		return true
21977	}
21978	// match: (Rsh64x16 x y)
21979	// cond: !shiftIsBounded(v)
21980	// result: (SRA x (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
21981	for {
21982		x := v_0
21983		y := v_1
21984		if !(!shiftIsBounded(v)) {
21985			break
21986		}
21987		v.reset(OpARM64SRA)
21988		v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
21989		v0.AuxInt = opToAuxInt(OpARM64LessThanU)
21990		v1 := b.NewValue0(v.Pos, OpConst64, y.Type)
21991		v1.AuxInt = int64ToAuxInt(63)
21992		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
21993		v2.AuxInt = int64ToAuxInt(64)
21994		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
21995		v3.AddArg(y)
21996		v2.AddArg(v3)
21997		v0.AddArg3(y, v1, v2)
21998		v.AddArg2(x, v0)
21999		return true
22000	}
22001	return false
22002}
22003func rewriteValueARM64_OpRsh64x32(v *Value) bool {
22004	v_1 := v.Args[1]
22005	v_0 := v.Args[0]
22006	b := v.Block
22007	typ := &b.Func.Config.Types
22008	// match: (Rsh64x32 <t> x y)
22009	// cond: shiftIsBounded(v)
22010	// result: (SRA <t> x y)
22011	for {
22012		t := v.Type
22013		x := v_0
22014		y := v_1
22015		if !(shiftIsBounded(v)) {
22016			break
22017		}
22018		v.reset(OpARM64SRA)
22019		v.Type = t
22020		v.AddArg2(x, y)
22021		return true
22022	}
22023	// match: (Rsh64x32 x y)
22024	// cond: !shiftIsBounded(v)
22025	// result: (SRA x (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
22026	for {
22027		x := v_0
22028		y := v_1
22029		if !(!shiftIsBounded(v)) {
22030			break
22031		}
22032		v.reset(OpARM64SRA)
22033		v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
22034		v0.AuxInt = opToAuxInt(OpARM64LessThanU)
22035		v1 := b.NewValue0(v.Pos, OpConst64, y.Type)
22036		v1.AuxInt = int64ToAuxInt(63)
22037		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
22038		v2.AuxInt = int64ToAuxInt(64)
22039		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
22040		v3.AddArg(y)
22041		v2.AddArg(v3)
22042		v0.AddArg3(y, v1, v2)
22043		v.AddArg2(x, v0)
22044		return true
22045	}
22046	return false
22047}
22048func rewriteValueARM64_OpRsh64x64(v *Value) bool {
22049	v_1 := v.Args[1]
22050	v_0 := v.Args[0]
22051	b := v.Block
22052	// match: (Rsh64x64 <t> x y)
22053	// cond: shiftIsBounded(v)
22054	// result: (SRA <t> x y)
22055	for {
22056		t := v.Type
22057		x := v_0
22058		y := v_1
22059		if !(shiftIsBounded(v)) {
22060			break
22061		}
22062		v.reset(OpARM64SRA)
22063		v.Type = t
22064		v.AddArg2(x, y)
22065		return true
22066	}
22067	// match: (Rsh64x64 x y)
22068	// cond: !shiftIsBounded(v)
22069	// result: (SRA x (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
22070	for {
22071		x := v_0
22072		y := v_1
22073		if !(!shiftIsBounded(v)) {
22074			break
22075		}
22076		v.reset(OpARM64SRA)
22077		v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
22078		v0.AuxInt = opToAuxInt(OpARM64LessThanU)
22079		v1 := b.NewValue0(v.Pos, OpConst64, y.Type)
22080		v1.AuxInt = int64ToAuxInt(63)
22081		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
22082		v2.AuxInt = int64ToAuxInt(64)
22083		v2.AddArg(y)
22084		v0.AddArg3(y, v1, v2)
22085		v.AddArg2(x, v0)
22086		return true
22087	}
22088	return false
22089}
22090func rewriteValueARM64_OpRsh64x8(v *Value) bool {
22091	v_1 := v.Args[1]
22092	v_0 := v.Args[0]
22093	b := v.Block
22094	typ := &b.Func.Config.Types
22095	// match: (Rsh64x8 <t> x y)
22096	// cond: shiftIsBounded(v)
22097	// result: (SRA <t> x y)
22098	for {
22099		t := v.Type
22100		x := v_0
22101		y := v_1
22102		if !(shiftIsBounded(v)) {
22103			break
22104		}
22105		v.reset(OpARM64SRA)
22106		v.Type = t
22107		v.AddArg2(x, y)
22108		return true
22109	}
22110	// match: (Rsh64x8 x y)
22111	// cond: !shiftIsBounded(v)
22112	// result: (SRA x (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
22113	for {
22114		x := v_0
22115		y := v_1
22116		if !(!shiftIsBounded(v)) {
22117			break
22118		}
22119		v.reset(OpARM64SRA)
22120		v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
22121		v0.AuxInt = opToAuxInt(OpARM64LessThanU)
22122		v1 := b.NewValue0(v.Pos, OpConst64, y.Type)
22123		v1.AuxInt = int64ToAuxInt(63)
22124		v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
22125		v2.AuxInt = int64ToAuxInt(64)
22126		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
22127		v3.AddArg(y)
22128		v2.AddArg(v3)
22129		v0.AddArg3(y, v1, v2)
22130		v.AddArg2(x, v0)
22131		return true
22132	}
22133	return false
22134}
22135func rewriteValueARM64_OpRsh8Ux16(v *Value) bool {
22136	v_1 := v.Args[1]
22137	v_0 := v.Args[0]
22138	b := v.Block
22139	typ := &b.Func.Config.Types
22140	// match: (Rsh8Ux16 <t> x y)
22141	// cond: shiftIsBounded(v)
22142	// result: (SRL <t> (ZeroExt8to64 x) y)
22143	for {
22144		t := v.Type
22145		x := v_0
22146		y := v_1
22147		if !(shiftIsBounded(v)) {
22148			break
22149		}
22150		v.reset(OpARM64SRL)
22151		v.Type = t
22152		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
22153		v0.AddArg(x)
22154		v.AddArg2(v0, y)
22155		return true
22156	}
22157	// match: (Rsh8Ux16 <t> x y)
22158	// cond: !shiftIsBounded(v)
22159	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
22160	for {
22161		t := v.Type
22162		x := v_0
22163		y := v_1
22164		if !(!shiftIsBounded(v)) {
22165			break
22166		}
22167		v.reset(OpARM64CSEL)
22168		v.AuxInt = opToAuxInt(OpARM64LessThanU)
22169		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
22170		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
22171		v1.AddArg(x)
22172		v0.AddArg2(v1, y)
22173		v2 := b.NewValue0(v.Pos, OpConst64, t)
22174		v2.AuxInt = int64ToAuxInt(0)
22175		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
22176		v3.AuxInt = int64ToAuxInt(64)
22177		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
22178		v4.AddArg(y)
22179		v3.AddArg(v4)
22180		v.AddArg3(v0, v2, v3)
22181		return true
22182	}
22183	return false
22184}
22185func rewriteValueARM64_OpRsh8Ux32(v *Value) bool {
22186	v_1 := v.Args[1]
22187	v_0 := v.Args[0]
22188	b := v.Block
22189	typ := &b.Func.Config.Types
22190	// match: (Rsh8Ux32 <t> x y)
22191	// cond: shiftIsBounded(v)
22192	// result: (SRL <t> (ZeroExt8to64 x) y)
22193	for {
22194		t := v.Type
22195		x := v_0
22196		y := v_1
22197		if !(shiftIsBounded(v)) {
22198			break
22199		}
22200		v.reset(OpARM64SRL)
22201		v.Type = t
22202		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
22203		v0.AddArg(x)
22204		v.AddArg2(v0, y)
22205		return true
22206	}
22207	// match: (Rsh8Ux32 <t> x y)
22208	// cond: !shiftIsBounded(v)
22209	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
22210	for {
22211		t := v.Type
22212		x := v_0
22213		y := v_1
22214		if !(!shiftIsBounded(v)) {
22215			break
22216		}
22217		v.reset(OpARM64CSEL)
22218		v.AuxInt = opToAuxInt(OpARM64LessThanU)
22219		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
22220		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
22221		v1.AddArg(x)
22222		v0.AddArg2(v1, y)
22223		v2 := b.NewValue0(v.Pos, OpConst64, t)
22224		v2.AuxInt = int64ToAuxInt(0)
22225		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
22226		v3.AuxInt = int64ToAuxInt(64)
22227		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
22228		v4.AddArg(y)
22229		v3.AddArg(v4)
22230		v.AddArg3(v0, v2, v3)
22231		return true
22232	}
22233	return false
22234}
22235func rewriteValueARM64_OpRsh8Ux64(v *Value) bool {
22236	v_1 := v.Args[1]
22237	v_0 := v.Args[0]
22238	b := v.Block
22239	typ := &b.Func.Config.Types
22240	// match: (Rsh8Ux64 <t> x y)
22241	// cond: shiftIsBounded(v)
22242	// result: (SRL <t> (ZeroExt8to64 x) y)
22243	for {
22244		t := v.Type
22245		x := v_0
22246		y := v_1
22247		if !(shiftIsBounded(v)) {
22248			break
22249		}
22250		v.reset(OpARM64SRL)
22251		v.Type = t
22252		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
22253		v0.AddArg(x)
22254		v.AddArg2(v0, y)
22255		return true
22256	}
22257	// match: (Rsh8Ux64 <t> x y)
22258	// cond: !shiftIsBounded(v)
22259	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
22260	for {
22261		t := v.Type
22262		x := v_0
22263		y := v_1
22264		if !(!shiftIsBounded(v)) {
22265			break
22266		}
22267		v.reset(OpARM64CSEL)
22268		v.AuxInt = opToAuxInt(OpARM64LessThanU)
22269		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
22270		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
22271		v1.AddArg(x)
22272		v0.AddArg2(v1, y)
22273		v2 := b.NewValue0(v.Pos, OpConst64, t)
22274		v2.AuxInt = int64ToAuxInt(0)
22275		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
22276		v3.AuxInt = int64ToAuxInt(64)
22277		v3.AddArg(y)
22278		v.AddArg3(v0, v2, v3)
22279		return true
22280	}
22281	return false
22282}
22283func rewriteValueARM64_OpRsh8Ux8(v *Value) bool {
22284	v_1 := v.Args[1]
22285	v_0 := v.Args[0]
22286	b := v.Block
22287	typ := &b.Func.Config.Types
22288	// match: (Rsh8Ux8 <t> x y)
22289	// cond: shiftIsBounded(v)
22290	// result: (SRL <t> (ZeroExt8to64 x) y)
22291	for {
22292		t := v.Type
22293		x := v_0
22294		y := v_1
22295		if !(shiftIsBounded(v)) {
22296			break
22297		}
22298		v.reset(OpARM64SRL)
22299		v.Type = t
22300		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
22301		v0.AddArg(x)
22302		v.AddArg2(v0, y)
22303		return true
22304	}
22305	// match: (Rsh8Ux8 <t> x y)
22306	// cond: !shiftIsBounded(v)
22307	// result: (CSEL [OpARM64LessThanU] (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
22308	for {
22309		t := v.Type
22310		x := v_0
22311		y := v_1
22312		if !(!shiftIsBounded(v)) {
22313			break
22314		}
22315		v.reset(OpARM64CSEL)
22316		v.AuxInt = opToAuxInt(OpARM64LessThanU)
22317		v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
22318		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
22319		v1.AddArg(x)
22320		v0.AddArg2(v1, y)
22321		v2 := b.NewValue0(v.Pos, OpConst64, t)
22322		v2.AuxInt = int64ToAuxInt(0)
22323		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
22324		v3.AuxInt = int64ToAuxInt(64)
22325		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
22326		v4.AddArg(y)
22327		v3.AddArg(v4)
22328		v.AddArg3(v0, v2, v3)
22329		return true
22330	}
22331	return false
22332}
22333func rewriteValueARM64_OpRsh8x16(v *Value) bool {
22334	v_1 := v.Args[1]
22335	v_0 := v.Args[0]
22336	b := v.Block
22337	typ := &b.Func.Config.Types
22338	// match: (Rsh8x16 <t> x y)
22339	// cond: shiftIsBounded(v)
22340	// result: (SRA <t> (SignExt8to64 x) y)
22341	for {
22342		t := v.Type
22343		x := v_0
22344		y := v_1
22345		if !(shiftIsBounded(v)) {
22346			break
22347		}
22348		v.reset(OpARM64SRA)
22349		v.Type = t
22350		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
22351		v0.AddArg(x)
22352		v.AddArg2(v0, y)
22353		return true
22354	}
22355	// match: (Rsh8x16 x y)
22356	// cond: !shiftIsBounded(v)
22357	// result: (SRA (SignExt8to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
22358	for {
22359		x := v_0
22360		y := v_1
22361		if !(!shiftIsBounded(v)) {
22362			break
22363		}
22364		v.reset(OpARM64SRA)
22365		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
22366		v0.AddArg(x)
22367		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
22368		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
22369		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
22370		v2.AuxInt = int64ToAuxInt(63)
22371		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
22372		v3.AuxInt = int64ToAuxInt(64)
22373		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
22374		v4.AddArg(y)
22375		v3.AddArg(v4)
22376		v1.AddArg3(y, v2, v3)
22377		v.AddArg2(v0, v1)
22378		return true
22379	}
22380	return false
22381}
22382func rewriteValueARM64_OpRsh8x32(v *Value) bool {
22383	v_1 := v.Args[1]
22384	v_0 := v.Args[0]
22385	b := v.Block
22386	typ := &b.Func.Config.Types
22387	// match: (Rsh8x32 <t> x y)
22388	// cond: shiftIsBounded(v)
22389	// result: (SRA <t> (SignExt8to64 x) y)
22390	for {
22391		t := v.Type
22392		x := v_0
22393		y := v_1
22394		if !(shiftIsBounded(v)) {
22395			break
22396		}
22397		v.reset(OpARM64SRA)
22398		v.Type = t
22399		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
22400		v0.AddArg(x)
22401		v.AddArg2(v0, y)
22402		return true
22403	}
22404	// match: (Rsh8x32 x y)
22405	// cond: !shiftIsBounded(v)
22406	// result: (SRA (SignExt8to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
22407	for {
22408		x := v_0
22409		y := v_1
22410		if !(!shiftIsBounded(v)) {
22411			break
22412		}
22413		v.reset(OpARM64SRA)
22414		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
22415		v0.AddArg(x)
22416		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
22417		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
22418		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
22419		v2.AuxInt = int64ToAuxInt(63)
22420		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
22421		v3.AuxInt = int64ToAuxInt(64)
22422		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
22423		v4.AddArg(y)
22424		v3.AddArg(v4)
22425		v1.AddArg3(y, v2, v3)
22426		v.AddArg2(v0, v1)
22427		return true
22428	}
22429	return false
22430}
22431func rewriteValueARM64_OpRsh8x64(v *Value) bool {
22432	v_1 := v.Args[1]
22433	v_0 := v.Args[0]
22434	b := v.Block
22435	typ := &b.Func.Config.Types
22436	// match: (Rsh8x64 <t> x y)
22437	// cond: shiftIsBounded(v)
22438	// result: (SRA <t> (SignExt8to64 x) y)
22439	for {
22440		t := v.Type
22441		x := v_0
22442		y := v_1
22443		if !(shiftIsBounded(v)) {
22444			break
22445		}
22446		v.reset(OpARM64SRA)
22447		v.Type = t
22448		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
22449		v0.AddArg(x)
22450		v.AddArg2(v0, y)
22451		return true
22452	}
22453	// match: (Rsh8x64 x y)
22454	// cond: !shiftIsBounded(v)
22455	// result: (SRA (SignExt8to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
22456	for {
22457		x := v_0
22458		y := v_1
22459		if !(!shiftIsBounded(v)) {
22460			break
22461		}
22462		v.reset(OpARM64SRA)
22463		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
22464		v0.AddArg(x)
22465		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
22466		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
22467		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
22468		v2.AuxInt = int64ToAuxInt(63)
22469		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
22470		v3.AuxInt = int64ToAuxInt(64)
22471		v3.AddArg(y)
22472		v1.AddArg3(y, v2, v3)
22473		v.AddArg2(v0, v1)
22474		return true
22475	}
22476	return false
22477}
22478func rewriteValueARM64_OpRsh8x8(v *Value) bool {
22479	v_1 := v.Args[1]
22480	v_0 := v.Args[0]
22481	b := v.Block
22482	typ := &b.Func.Config.Types
22483	// match: (Rsh8x8 <t> x y)
22484	// cond: shiftIsBounded(v)
22485	// result: (SRA <t> (SignExt8to64 x) y)
22486	for {
22487		t := v.Type
22488		x := v_0
22489		y := v_1
22490		if !(shiftIsBounded(v)) {
22491			break
22492		}
22493		v.reset(OpARM64SRA)
22494		v.Type = t
22495		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
22496		v0.AddArg(x)
22497		v.AddArg2(v0, y)
22498		return true
22499	}
22500	// match: (Rsh8x8 x y)
22501	// cond: !shiftIsBounded(v)
22502	// result: (SRA (SignExt8to64 x) (CSEL [OpARM64LessThanU] <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
22503	for {
22504		x := v_0
22505		y := v_1
22506		if !(!shiftIsBounded(v)) {
22507			break
22508		}
22509		v.reset(OpARM64SRA)
22510		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
22511		v0.AddArg(x)
22512		v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
22513		v1.AuxInt = opToAuxInt(OpARM64LessThanU)
22514		v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
22515		v2.AuxInt = int64ToAuxInt(63)
22516		v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
22517		v3.AuxInt = int64ToAuxInt(64)
22518		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
22519		v4.AddArg(y)
22520		v3.AddArg(v4)
22521		v1.AddArg3(y, v2, v3)
22522		v.AddArg2(v0, v1)
22523		return true
22524	}
22525	return false
22526}
22527func rewriteValueARM64_OpSelect0(v *Value) bool {
22528	v_0 := v.Args[0]
22529	b := v.Block
22530	typ := &b.Func.Config.Types
22531	// match: (Select0 (Mul64uhilo x y))
22532	// result: (UMULH x y)
22533	for {
22534		if v_0.Op != OpMul64uhilo {
22535			break
22536		}
22537		y := v_0.Args[1]
22538		x := v_0.Args[0]
22539		v.reset(OpARM64UMULH)
22540		v.AddArg2(x, y)
22541		return true
22542	}
22543	// match: (Select0 (Add64carry x y c))
22544	// result: (Select0 <typ.UInt64> (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] c))))
22545	for {
22546		if v_0.Op != OpAdd64carry {
22547			break
22548		}
22549		c := v_0.Args[2]
22550		x := v_0.Args[0]
22551		y := v_0.Args[1]
22552		v.reset(OpSelect0)
22553		v.Type = typ.UInt64
22554		v0 := b.NewValue0(v.Pos, OpARM64ADCSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
22555		v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
22556		v2 := b.NewValue0(v.Pos, OpARM64ADDSconstflags, types.NewTuple(typ.UInt64, types.TypeFlags))
22557		v2.AuxInt = int64ToAuxInt(-1)
22558		v2.AddArg(c)
22559		v1.AddArg(v2)
22560		v0.AddArg3(x, y, v1)
22561		v.AddArg(v0)
22562		return true
22563	}
22564	// match: (Select0 (Sub64borrow x y bo))
22565	// result: (Select0 <typ.UInt64> (SBCSflags x y (Select1 <types.TypeFlags> (NEGSflags bo))))
22566	for {
22567		if v_0.Op != OpSub64borrow {
22568			break
22569		}
22570		bo := v_0.Args[2]
22571		x := v_0.Args[0]
22572		y := v_0.Args[1]
22573		v.reset(OpSelect0)
22574		v.Type = typ.UInt64
22575		v0 := b.NewValue0(v.Pos, OpARM64SBCSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
22576		v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
22577		v2 := b.NewValue0(v.Pos, OpARM64NEGSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
22578		v2.AddArg(bo)
22579		v1.AddArg(v2)
22580		v0.AddArg3(x, y, v1)
22581		v.AddArg(v0)
22582		return true
22583	}
22584	// match: (Select0 (Mul64uover x y))
22585	// result: (MUL x y)
22586	for {
22587		if v_0.Op != OpMul64uover {
22588			break
22589		}
22590		y := v_0.Args[1]
22591		x := v_0.Args[0]
22592		v.reset(OpARM64MUL)
22593		v.AddArg2(x, y)
22594		return true
22595	}
22596	return false
22597}
22598func rewriteValueARM64_OpSelect1(v *Value) bool {
22599	v_0 := v.Args[0]
22600	b := v.Block
22601	typ := &b.Func.Config.Types
22602	// match: (Select1 (Mul64uhilo x y))
22603	// result: (MUL x y)
22604	for {
22605		if v_0.Op != OpMul64uhilo {
22606			break
22607		}
22608		y := v_0.Args[1]
22609		x := v_0.Args[0]
22610		v.reset(OpARM64MUL)
22611		v.AddArg2(x, y)
22612		return true
22613	}
22614	// match: (Select1 (Add64carry x y c))
22615	// result: (ADCzerocarry <typ.UInt64> (Select1 <types.TypeFlags> (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] c)))))
22616	for {
22617		if v_0.Op != OpAdd64carry {
22618			break
22619		}
22620		c := v_0.Args[2]
22621		x := v_0.Args[0]
22622		y := v_0.Args[1]
22623		v.reset(OpARM64ADCzerocarry)
22624		v.Type = typ.UInt64
22625		v0 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
22626		v1 := b.NewValue0(v.Pos, OpARM64ADCSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
22627		v2 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
22628		v3 := b.NewValue0(v.Pos, OpARM64ADDSconstflags, types.NewTuple(typ.UInt64, types.TypeFlags))
22629		v3.AuxInt = int64ToAuxInt(-1)
22630		v3.AddArg(c)
22631		v2.AddArg(v3)
22632		v1.AddArg3(x, y, v2)
22633		v0.AddArg(v1)
22634		v.AddArg(v0)
22635		return true
22636	}
22637	// match: (Select1 (Sub64borrow x y bo))
22638	// result: (NEG <typ.UInt64> (NGCzerocarry <typ.UInt64> (Select1 <types.TypeFlags> (SBCSflags x y (Select1 <types.TypeFlags> (NEGSflags bo))))))
22639	for {
22640		if v_0.Op != OpSub64borrow {
22641			break
22642		}
22643		bo := v_0.Args[2]
22644		x := v_0.Args[0]
22645		y := v_0.Args[1]
22646		v.reset(OpARM64NEG)
22647		v.Type = typ.UInt64
22648		v0 := b.NewValue0(v.Pos, OpARM64NGCzerocarry, typ.UInt64)
22649		v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
22650		v2 := b.NewValue0(v.Pos, OpARM64SBCSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
22651		v3 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
22652		v4 := b.NewValue0(v.Pos, OpARM64NEGSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
22653		v4.AddArg(bo)
22654		v3.AddArg(v4)
22655		v2.AddArg3(x, y, v3)
22656		v1.AddArg(v2)
22657		v0.AddArg(v1)
22658		v.AddArg(v0)
22659		return true
22660	}
22661	// match: (Select1 (Mul64uover x y))
22662	// result: (NotEqual (CMPconst (UMULH <typ.UInt64> x y) [0]))
22663	for {
22664		if v_0.Op != OpMul64uover {
22665			break
22666		}
22667		y := v_0.Args[1]
22668		x := v_0.Args[0]
22669		v.reset(OpARM64NotEqual)
22670		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
22671		v0.AuxInt = int64ToAuxInt(0)
22672		v1 := b.NewValue0(v.Pos, OpARM64UMULH, typ.UInt64)
22673		v1.AddArg2(x, y)
22674		v0.AddArg(v1)
22675		v.AddArg(v0)
22676		return true
22677	}
22678	return false
22679}
22680func rewriteValueARM64_OpSelectN(v *Value) bool {
22681	v_0 := v.Args[0]
22682	b := v.Block
22683	config := b.Func.Config
22684	// match: (SelectN [0] call:(CALLstatic {sym} s1:(MOVDstore _ (MOVDconst [sz]) s2:(MOVDstore _ src s3:(MOVDstore {t} _ dst mem)))))
22685	// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(s1, s2, s3, call)
22686	// result: (Move [sz] dst src mem)
22687	for {
22688		if auxIntToInt64(v.AuxInt) != 0 {
22689			break
22690		}
22691		call := v_0
22692		if call.Op != OpARM64CALLstatic || len(call.Args) != 1 {
22693			break
22694		}
22695		sym := auxToCall(call.Aux)
22696		s1 := call.Args[0]
22697		if s1.Op != OpARM64MOVDstore {
22698			break
22699		}
22700		_ = s1.Args[2]
22701		s1_1 := s1.Args[1]
22702		if s1_1.Op != OpARM64MOVDconst {
22703			break
22704		}
22705		sz := auxIntToInt64(s1_1.AuxInt)
22706		s2 := s1.Args[2]
22707		if s2.Op != OpARM64MOVDstore {
22708			break
22709		}
22710		_ = s2.Args[2]
22711		src := s2.Args[1]
22712		s3 := s2.Args[2]
22713		if s3.Op != OpARM64MOVDstore {
22714			break
22715		}
22716		mem := s3.Args[2]
22717		dst := s3.Args[1]
22718		if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(s1, s2, s3, call)) {
22719			break
22720		}
22721		v.reset(OpMove)
22722		v.AuxInt = int64ToAuxInt(sz)
22723		v.AddArg3(dst, src, mem)
22724		return true
22725	}
22726	// match: (SelectN [0] call:(CALLstatic {sym} dst src (MOVDconst [sz]) mem))
22727	// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)
22728	// result: (Move [sz] dst src mem)
22729	for {
22730		if auxIntToInt64(v.AuxInt) != 0 {
22731			break
22732		}
22733		call := v_0
22734		if call.Op != OpARM64CALLstatic || len(call.Args) != 4 {
22735			break
22736		}
22737		sym := auxToCall(call.Aux)
22738		mem := call.Args[3]
22739		dst := call.Args[0]
22740		src := call.Args[1]
22741		call_2 := call.Args[2]
22742		if call_2.Op != OpARM64MOVDconst {
22743			break
22744		}
22745		sz := auxIntToInt64(call_2.AuxInt)
22746		if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)) {
22747			break
22748		}
22749		v.reset(OpMove)
22750		v.AuxInt = int64ToAuxInt(sz)
22751		v.AddArg3(dst, src, mem)
22752		return true
22753	}
22754	return false
22755}
22756func rewriteValueARM64_OpSlicemask(v *Value) bool {
22757	v_0 := v.Args[0]
22758	b := v.Block
22759	// match: (Slicemask <t> x)
22760	// result: (SRAconst (NEG <t> x) [63])
22761	for {
22762		t := v.Type
22763		x := v_0
22764		v.reset(OpARM64SRAconst)
22765		v.AuxInt = int64ToAuxInt(63)
22766		v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
22767		v0.AddArg(x)
22768		v.AddArg(v0)
22769		return true
22770	}
22771}
22772func rewriteValueARM64_OpStore(v *Value) bool {
22773	v_2 := v.Args[2]
22774	v_1 := v.Args[1]
22775	v_0 := v.Args[0]
22776	// match: (Store {t} ptr val mem)
22777	// cond: t.Size() == 1
22778	// result: (MOVBstore ptr val mem)
22779	for {
22780		t := auxToType(v.Aux)
22781		ptr := v_0
22782		val := v_1
22783		mem := v_2
22784		if !(t.Size() == 1) {
22785			break
22786		}
22787		v.reset(OpARM64MOVBstore)
22788		v.AddArg3(ptr, val, mem)
22789		return true
22790	}
22791	// match: (Store {t} ptr val mem)
22792	// cond: t.Size() == 2
22793	// result: (MOVHstore ptr val mem)
22794	for {
22795		t := auxToType(v.Aux)
22796		ptr := v_0
22797		val := v_1
22798		mem := v_2
22799		if !(t.Size() == 2) {
22800			break
22801		}
22802		v.reset(OpARM64MOVHstore)
22803		v.AddArg3(ptr, val, mem)
22804		return true
22805	}
22806	// match: (Store {t} ptr val mem)
22807	// cond: t.Size() == 4 && !t.IsFloat()
22808	// result: (MOVWstore ptr val mem)
22809	for {
22810		t := auxToType(v.Aux)
22811		ptr := v_0
22812		val := v_1
22813		mem := v_2
22814		if !(t.Size() == 4 && !t.IsFloat()) {
22815			break
22816		}
22817		v.reset(OpARM64MOVWstore)
22818		v.AddArg3(ptr, val, mem)
22819		return true
22820	}
22821	// match: (Store {t} ptr val mem)
22822	// cond: t.Size() == 8 && !t.IsFloat()
22823	// result: (MOVDstore ptr val mem)
22824	for {
22825		t := auxToType(v.Aux)
22826		ptr := v_0
22827		val := v_1
22828		mem := v_2
22829		if !(t.Size() == 8 && !t.IsFloat()) {
22830			break
22831		}
22832		v.reset(OpARM64MOVDstore)
22833		v.AddArg3(ptr, val, mem)
22834		return true
22835	}
22836	// match: (Store {t} ptr val mem)
22837	// cond: t.Size() == 4 && t.IsFloat()
22838	// result: (FMOVSstore ptr val mem)
22839	for {
22840		t := auxToType(v.Aux)
22841		ptr := v_0
22842		val := v_1
22843		mem := v_2
22844		if !(t.Size() == 4 && t.IsFloat()) {
22845			break
22846		}
22847		v.reset(OpARM64FMOVSstore)
22848		v.AddArg3(ptr, val, mem)
22849		return true
22850	}
22851	// match: (Store {t} ptr val mem)
22852	// cond: t.Size() == 8 && t.IsFloat()
22853	// result: (FMOVDstore ptr val mem)
22854	for {
22855		t := auxToType(v.Aux)
22856		ptr := v_0
22857		val := v_1
22858		mem := v_2
22859		if !(t.Size() == 8 && t.IsFloat()) {
22860			break
22861		}
22862		v.reset(OpARM64FMOVDstore)
22863		v.AddArg3(ptr, val, mem)
22864		return true
22865	}
22866	return false
22867}
22868func rewriteValueARM64_OpZero(v *Value) bool {
22869	v_1 := v.Args[1]
22870	v_0 := v.Args[0]
22871	b := v.Block
22872	config := b.Func.Config
22873	typ := &b.Func.Config.Types
22874	// match: (Zero [0] _ mem)
22875	// result: mem
22876	for {
22877		if auxIntToInt64(v.AuxInt) != 0 {
22878			break
22879		}
22880		mem := v_1
22881		v.copyOf(mem)
22882		return true
22883	}
22884	// match: (Zero [1] ptr mem)
22885	// result: (MOVBstore ptr (MOVDconst [0]) mem)
22886	for {
22887		if auxIntToInt64(v.AuxInt) != 1 {
22888			break
22889		}
22890		ptr := v_0
22891		mem := v_1
22892		v.reset(OpARM64MOVBstore)
22893		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
22894		v0.AuxInt = int64ToAuxInt(0)
22895		v.AddArg3(ptr, v0, mem)
22896		return true
22897	}
22898	// match: (Zero [2] ptr mem)
22899	// result: (MOVHstore ptr (MOVDconst [0]) mem)
22900	for {
22901		if auxIntToInt64(v.AuxInt) != 2 {
22902			break
22903		}
22904		ptr := v_0
22905		mem := v_1
22906		v.reset(OpARM64MOVHstore)
22907		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
22908		v0.AuxInt = int64ToAuxInt(0)
22909		v.AddArg3(ptr, v0, mem)
22910		return true
22911	}
22912	// match: (Zero [4] ptr mem)
22913	// result: (MOVWstore ptr (MOVDconst [0]) mem)
22914	for {
22915		if auxIntToInt64(v.AuxInt) != 4 {
22916			break
22917		}
22918		ptr := v_0
22919		mem := v_1
22920		v.reset(OpARM64MOVWstore)
22921		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
22922		v0.AuxInt = int64ToAuxInt(0)
22923		v.AddArg3(ptr, v0, mem)
22924		return true
22925	}
22926	// match: (Zero [3] ptr mem)
22927	// result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))
22928	for {
22929		if auxIntToInt64(v.AuxInt) != 3 {
22930			break
22931		}
22932		ptr := v_0
22933		mem := v_1
22934		v.reset(OpARM64MOVBstore)
22935		v.AuxInt = int32ToAuxInt(2)
22936		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
22937		v0.AuxInt = int64ToAuxInt(0)
22938		v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
22939		v1.AddArg3(ptr, v0, mem)
22940		v.AddArg3(ptr, v0, v1)
22941		return true
22942	}
22943	// match: (Zero [5] ptr mem)
22944	// result: (MOVBstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
22945	for {
22946		if auxIntToInt64(v.AuxInt) != 5 {
22947			break
22948		}
22949		ptr := v_0
22950		mem := v_1
22951		v.reset(OpARM64MOVBstore)
22952		v.AuxInt = int32ToAuxInt(4)
22953		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
22954		v0.AuxInt = int64ToAuxInt(0)
22955		v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
22956		v1.AddArg3(ptr, v0, mem)
22957		v.AddArg3(ptr, v0, v1)
22958		return true
22959	}
22960	// match: (Zero [6] ptr mem)
22961	// result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
22962	for {
22963		if auxIntToInt64(v.AuxInt) != 6 {
22964			break
22965		}
22966		ptr := v_0
22967		mem := v_1
22968		v.reset(OpARM64MOVHstore)
22969		v.AuxInt = int32ToAuxInt(4)
22970		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
22971		v0.AuxInt = int64ToAuxInt(0)
22972		v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
22973		v1.AddArg3(ptr, v0, mem)
22974		v.AddArg3(ptr, v0, v1)
22975		return true
22976	}
22977	// match: (Zero [7] ptr mem)
22978	// result: (MOVWstore [3] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
22979	for {
22980		if auxIntToInt64(v.AuxInt) != 7 {
22981			break
22982		}
22983		ptr := v_0
22984		mem := v_1
22985		v.reset(OpARM64MOVWstore)
22986		v.AuxInt = int32ToAuxInt(3)
22987		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
22988		v0.AuxInt = int64ToAuxInt(0)
22989		v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
22990		v1.AddArg3(ptr, v0, mem)
22991		v.AddArg3(ptr, v0, v1)
22992		return true
22993	}
22994	// match: (Zero [8] ptr mem)
22995	// result: (MOVDstore ptr (MOVDconst [0]) mem)
22996	for {
22997		if auxIntToInt64(v.AuxInt) != 8 {
22998			break
22999		}
23000		ptr := v_0
23001		mem := v_1
23002		v.reset(OpARM64MOVDstore)
23003		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23004		v0.AuxInt = int64ToAuxInt(0)
23005		v.AddArg3(ptr, v0, mem)
23006		return true
23007	}
23008	// match: (Zero [9] ptr mem)
23009	// result: (MOVBstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
23010	for {
23011		if auxIntToInt64(v.AuxInt) != 9 {
23012			break
23013		}
23014		ptr := v_0
23015		mem := v_1
23016		v.reset(OpARM64MOVBstore)
23017		v.AuxInt = int32ToAuxInt(8)
23018		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23019		v0.AuxInt = int64ToAuxInt(0)
23020		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
23021		v1.AddArg3(ptr, v0, mem)
23022		v.AddArg3(ptr, v0, v1)
23023		return true
23024	}
23025	// match: (Zero [10] ptr mem)
23026	// result: (MOVHstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
23027	for {
23028		if auxIntToInt64(v.AuxInt) != 10 {
23029			break
23030		}
23031		ptr := v_0
23032		mem := v_1
23033		v.reset(OpARM64MOVHstore)
23034		v.AuxInt = int32ToAuxInt(8)
23035		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23036		v0.AuxInt = int64ToAuxInt(0)
23037		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
23038		v1.AddArg3(ptr, v0, mem)
23039		v.AddArg3(ptr, v0, v1)
23040		return true
23041	}
23042	// match: (Zero [11] ptr mem)
23043	// result: (MOVDstore [3] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
23044	for {
23045		if auxIntToInt64(v.AuxInt) != 11 {
23046			break
23047		}
23048		ptr := v_0
23049		mem := v_1
23050		v.reset(OpARM64MOVDstore)
23051		v.AuxInt = int32ToAuxInt(3)
23052		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23053		v0.AuxInt = int64ToAuxInt(0)
23054		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
23055		v1.AddArg3(ptr, v0, mem)
23056		v.AddArg3(ptr, v0, v1)
23057		return true
23058	}
23059	// match: (Zero [12] ptr mem)
23060	// result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
23061	for {
23062		if auxIntToInt64(v.AuxInt) != 12 {
23063			break
23064		}
23065		ptr := v_0
23066		mem := v_1
23067		v.reset(OpARM64MOVWstore)
23068		v.AuxInt = int32ToAuxInt(8)
23069		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23070		v0.AuxInt = int64ToAuxInt(0)
23071		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
23072		v1.AddArg3(ptr, v0, mem)
23073		v.AddArg3(ptr, v0, v1)
23074		return true
23075	}
23076	// match: (Zero [13] ptr mem)
23077	// result: (MOVDstore [5] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
23078	for {
23079		if auxIntToInt64(v.AuxInt) != 13 {
23080			break
23081		}
23082		ptr := v_0
23083		mem := v_1
23084		v.reset(OpARM64MOVDstore)
23085		v.AuxInt = int32ToAuxInt(5)
23086		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23087		v0.AuxInt = int64ToAuxInt(0)
23088		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
23089		v1.AddArg3(ptr, v0, mem)
23090		v.AddArg3(ptr, v0, v1)
23091		return true
23092	}
23093	// match: (Zero [14] ptr mem)
23094	// result: (MOVDstore [6] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
23095	for {
23096		if auxIntToInt64(v.AuxInt) != 14 {
23097			break
23098		}
23099		ptr := v_0
23100		mem := v_1
23101		v.reset(OpARM64MOVDstore)
23102		v.AuxInt = int32ToAuxInt(6)
23103		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23104		v0.AuxInt = int64ToAuxInt(0)
23105		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
23106		v1.AddArg3(ptr, v0, mem)
23107		v.AddArg3(ptr, v0, v1)
23108		return true
23109	}
23110	// match: (Zero [15] ptr mem)
23111	// result: (MOVDstore [7] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
23112	for {
23113		if auxIntToInt64(v.AuxInt) != 15 {
23114			break
23115		}
23116		ptr := v_0
23117		mem := v_1
23118		v.reset(OpARM64MOVDstore)
23119		v.AuxInt = int32ToAuxInt(7)
23120		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23121		v0.AuxInt = int64ToAuxInt(0)
23122		v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
23123		v1.AddArg3(ptr, v0, mem)
23124		v.AddArg3(ptr, v0, v1)
23125		return true
23126	}
23127	// match: (Zero [16] ptr mem)
23128	// result: (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)
23129	for {
23130		if auxIntToInt64(v.AuxInt) != 16 {
23131			break
23132		}
23133		ptr := v_0
23134		mem := v_1
23135		v.reset(OpARM64STP)
23136		v.AuxInt = int32ToAuxInt(0)
23137		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23138		v0.AuxInt = int64ToAuxInt(0)
23139		v.AddArg4(ptr, v0, v0, mem)
23140		return true
23141	}
23142	// match: (Zero [32] ptr mem)
23143	// result: (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem))
23144	for {
23145		if auxIntToInt64(v.AuxInt) != 32 {
23146			break
23147		}
23148		ptr := v_0
23149		mem := v_1
23150		v.reset(OpARM64STP)
23151		v.AuxInt = int32ToAuxInt(16)
23152		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23153		v0.AuxInt = int64ToAuxInt(0)
23154		v1 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
23155		v1.AuxInt = int32ToAuxInt(0)
23156		v1.AddArg4(ptr, v0, v0, mem)
23157		v.AddArg4(ptr, v0, v0, v1)
23158		return true
23159	}
23160	// match: (Zero [48] ptr mem)
23161	// result: (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)))
23162	for {
23163		if auxIntToInt64(v.AuxInt) != 48 {
23164			break
23165		}
23166		ptr := v_0
23167		mem := v_1
23168		v.reset(OpARM64STP)
23169		v.AuxInt = int32ToAuxInt(32)
23170		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23171		v0.AuxInt = int64ToAuxInt(0)
23172		v1 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
23173		v1.AuxInt = int32ToAuxInt(16)
23174		v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
23175		v2.AuxInt = int32ToAuxInt(0)
23176		v2.AddArg4(ptr, v0, v0, mem)
23177		v1.AddArg4(ptr, v0, v0, v2)
23178		v.AddArg4(ptr, v0, v0, v1)
23179		return true
23180	}
23181	// match: (Zero [64] ptr mem)
23182	// result: (STP [48] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem))))
23183	for {
23184		if auxIntToInt64(v.AuxInt) != 64 {
23185			break
23186		}
23187		ptr := v_0
23188		mem := v_1
23189		v.reset(OpARM64STP)
23190		v.AuxInt = int32ToAuxInt(48)
23191		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23192		v0.AuxInt = int64ToAuxInt(0)
23193		v1 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
23194		v1.AuxInt = int32ToAuxInt(32)
23195		v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
23196		v2.AuxInt = int32ToAuxInt(16)
23197		v3 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
23198		v3.AuxInt = int32ToAuxInt(0)
23199		v3.AddArg4(ptr, v0, v0, mem)
23200		v2.AddArg4(ptr, v0, v0, v3)
23201		v1.AddArg4(ptr, v0, v0, v2)
23202		v.AddArg4(ptr, v0, v0, v1)
23203		return true
23204	}
23205	// match: (Zero [s] ptr mem)
23206	// cond: s%16 != 0 && s%16 <= 8 && s > 16
23207	// result: (Zero [8] (OffPtr <ptr.Type> ptr [s-8]) (Zero [s-s%16] ptr mem))
23208	for {
23209		s := auxIntToInt64(v.AuxInt)
23210		ptr := v_0
23211		mem := v_1
23212		if !(s%16 != 0 && s%16 <= 8 && s > 16) {
23213			break
23214		}
23215		v.reset(OpZero)
23216		v.AuxInt = int64ToAuxInt(8)
23217		v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type)
23218		v0.AuxInt = int64ToAuxInt(s - 8)
23219		v0.AddArg(ptr)
23220		v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
23221		v1.AuxInt = int64ToAuxInt(s - s%16)
23222		v1.AddArg2(ptr, mem)
23223		v.AddArg2(v0, v1)
23224		return true
23225	}
23226	// match: (Zero [s] ptr mem)
23227	// cond: s%16 != 0 && s%16 > 8 && s > 16
23228	// result: (Zero [16] (OffPtr <ptr.Type> ptr [s-16]) (Zero [s-s%16] ptr mem))
23229	for {
23230		s := auxIntToInt64(v.AuxInt)
23231		ptr := v_0
23232		mem := v_1
23233		if !(s%16 != 0 && s%16 > 8 && s > 16) {
23234			break
23235		}
23236		v.reset(OpZero)
23237		v.AuxInt = int64ToAuxInt(16)
23238		v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type)
23239		v0.AuxInt = int64ToAuxInt(s - 16)
23240		v0.AddArg(ptr)
23241		v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
23242		v1.AuxInt = int64ToAuxInt(s - s%16)
23243		v1.AddArg2(ptr, mem)
23244		v.AddArg2(v0, v1)
23245		return true
23246	}
23247	// match: (Zero [s] ptr mem)
23248	// cond: s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice
23249	// result: (DUFFZERO [4 * (64 - s/16)] ptr mem)
23250	for {
23251		s := auxIntToInt64(v.AuxInt)
23252		ptr := v_0
23253		mem := v_1
23254		if !(s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice) {
23255			break
23256		}
23257		v.reset(OpARM64DUFFZERO)
23258		v.AuxInt = int64ToAuxInt(4 * (64 - s/16))
23259		v.AddArg2(ptr, mem)
23260		return true
23261	}
23262	// match: (Zero [s] ptr mem)
23263	// cond: s%16 == 0 && (s > 16*64 || config.noDuffDevice)
23264	// result: (LoweredZero ptr (ADDconst <ptr.Type> [s-16] ptr) mem)
23265	for {
23266		s := auxIntToInt64(v.AuxInt)
23267		ptr := v_0
23268		mem := v_1
23269		if !(s%16 == 0 && (s > 16*64 || config.noDuffDevice)) {
23270			break
23271		}
23272		v.reset(OpARM64LoweredZero)
23273		v0 := b.NewValue0(v.Pos, OpARM64ADDconst, ptr.Type)
23274		v0.AuxInt = int64ToAuxInt(s - 16)
23275		v0.AddArg(ptr)
23276		v.AddArg3(ptr, v0, mem)
23277		return true
23278	}
23279	return false
23280}
23281func rewriteBlockARM64(b *Block) bool {
23282	typ := &b.Func.Config.Types
23283	switch b.Kind {
23284	case BlockARM64EQ:
23285		// match: (EQ (CMPconst [0] z:(AND x y)) yes no)
23286		// cond: z.Uses == 1
23287		// result: (EQ (TST x y) yes no)
23288		for b.Controls[0].Op == OpARM64CMPconst {
23289			v_0 := b.Controls[0]
23290			if auxIntToInt64(v_0.AuxInt) != 0 {
23291				break
23292			}
23293			z := v_0.Args[0]
23294			if z.Op != OpARM64AND {
23295				break
23296			}
23297			_ = z.Args[1]
23298			z_0 := z.Args[0]
23299			z_1 := z.Args[1]
23300			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
23301				x := z_0
23302				y := z_1
23303				if !(z.Uses == 1) {
23304					continue
23305				}
23306				v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
23307				v0.AddArg2(x, y)
23308				b.resetWithControl(BlockARM64EQ, v0)
23309				return true
23310			}
23311			break
23312		}
23313		// match: (EQ (CMPconst [0] x:(ANDconst [c] y)) yes no)
23314		// cond: x.Uses == 1
23315		// result: (EQ (TSTconst [c] y) yes no)
23316		for b.Controls[0].Op == OpARM64CMPconst {
23317			v_0 := b.Controls[0]
23318			if auxIntToInt64(v_0.AuxInt) != 0 {
23319				break
23320			}
23321			x := v_0.Args[0]
23322			if x.Op != OpARM64ANDconst {
23323				break
23324			}
23325			c := auxIntToInt64(x.AuxInt)
23326			y := x.Args[0]
23327			if !(x.Uses == 1) {
23328				break
23329			}
23330			v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
23331			v0.AuxInt = int64ToAuxInt(c)
23332			v0.AddArg(y)
23333			b.resetWithControl(BlockARM64EQ, v0)
23334			return true
23335		}
23336		// match: (EQ (CMPWconst [0] z:(AND x y)) yes no)
23337		// cond: z.Uses == 1
23338		// result: (EQ (TSTW x y) yes no)
23339		for b.Controls[0].Op == OpARM64CMPWconst {
23340			v_0 := b.Controls[0]
23341			if auxIntToInt32(v_0.AuxInt) != 0 {
23342				break
23343			}
23344			z := v_0.Args[0]
23345			if z.Op != OpARM64AND {
23346				break
23347			}
23348			_ = z.Args[1]
23349			z_0 := z.Args[0]
23350			z_1 := z.Args[1]
23351			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
23352				x := z_0
23353				y := z_1
23354				if !(z.Uses == 1) {
23355					continue
23356				}
23357				v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
23358				v0.AddArg2(x, y)
23359				b.resetWithControl(BlockARM64EQ, v0)
23360				return true
23361			}
23362			break
23363		}
23364		// match: (EQ (CMPWconst [0] x:(ANDconst [c] y)) yes no)
23365		// cond: x.Uses == 1
23366		// result: (EQ (TSTWconst [int32(c)] y) yes no)
23367		for b.Controls[0].Op == OpARM64CMPWconst {
23368			v_0 := b.Controls[0]
23369			if auxIntToInt32(v_0.AuxInt) != 0 {
23370				break
23371			}
23372			x := v_0.Args[0]
23373			if x.Op != OpARM64ANDconst {
23374				break
23375			}
23376			c := auxIntToInt64(x.AuxInt)
23377			y := x.Args[0]
23378			if !(x.Uses == 1) {
23379				break
23380			}
23381			v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
23382			v0.AuxInt = int32ToAuxInt(int32(c))
23383			v0.AddArg(y)
23384			b.resetWithControl(BlockARM64EQ, v0)
23385			return true
23386		}
23387		// match: (EQ (CMPconst [0] x:(ADDconst [c] y)) yes no)
23388		// cond: x.Uses == 1
23389		// result: (EQ (CMNconst [c] y) yes no)
23390		for b.Controls[0].Op == OpARM64CMPconst {
23391			v_0 := b.Controls[0]
23392			if auxIntToInt64(v_0.AuxInt) != 0 {
23393				break
23394			}
23395			x := v_0.Args[0]
23396			if x.Op != OpARM64ADDconst {
23397				break
23398			}
23399			c := auxIntToInt64(x.AuxInt)
23400			y := x.Args[0]
23401			if !(x.Uses == 1) {
23402				break
23403			}
23404			v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
23405			v0.AuxInt = int64ToAuxInt(c)
23406			v0.AddArg(y)
23407			b.resetWithControl(BlockARM64EQ, v0)
23408			return true
23409		}
23410		// match: (EQ (CMPWconst [0] x:(ADDconst [c] y)) yes no)
23411		// cond: x.Uses == 1
23412		// result: (EQ (CMNWconst [int32(c)] y) yes no)
23413		for b.Controls[0].Op == OpARM64CMPWconst {
23414			v_0 := b.Controls[0]
23415			if auxIntToInt32(v_0.AuxInt) != 0 {
23416				break
23417			}
23418			x := v_0.Args[0]
23419			if x.Op != OpARM64ADDconst {
23420				break
23421			}
23422			c := auxIntToInt64(x.AuxInt)
23423			y := x.Args[0]
23424			if !(x.Uses == 1) {
23425				break
23426			}
23427			v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
23428			v0.AuxInt = int32ToAuxInt(int32(c))
23429			v0.AddArg(y)
23430			b.resetWithControl(BlockARM64EQ, v0)
23431			return true
23432		}
23433		// match: (EQ (CMPconst [0] z:(ADD x y)) yes no)
23434		// cond: z.Uses == 1
23435		// result: (EQ (CMN x y) yes no)
23436		for b.Controls[0].Op == OpARM64CMPconst {
23437			v_0 := b.Controls[0]
23438			if auxIntToInt64(v_0.AuxInt) != 0 {
23439				break
23440			}
23441			z := v_0.Args[0]
23442			if z.Op != OpARM64ADD {
23443				break
23444			}
23445			_ = z.Args[1]
23446			z_0 := z.Args[0]
23447			z_1 := z.Args[1]
23448			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
23449				x := z_0
23450				y := z_1
23451				if !(z.Uses == 1) {
23452					continue
23453				}
23454				v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
23455				v0.AddArg2(x, y)
23456				b.resetWithControl(BlockARM64EQ, v0)
23457				return true
23458			}
23459			break
23460		}
23461		// match: (EQ (CMPWconst [0] z:(ADD x y)) yes no)
23462		// cond: z.Uses == 1
23463		// result: (EQ (CMNW x y) yes no)
23464		for b.Controls[0].Op == OpARM64CMPWconst {
23465			v_0 := b.Controls[0]
23466			if auxIntToInt32(v_0.AuxInt) != 0 {
23467				break
23468			}
23469			z := v_0.Args[0]
23470			if z.Op != OpARM64ADD {
23471				break
23472			}
23473			_ = z.Args[1]
23474			z_0 := z.Args[0]
23475			z_1 := z.Args[1]
23476			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
23477				x := z_0
23478				y := z_1
23479				if !(z.Uses == 1) {
23480					continue
23481				}
23482				v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
23483				v0.AddArg2(x, y)
23484				b.resetWithControl(BlockARM64EQ, v0)
23485				return true
23486			}
23487			break
23488		}
23489		// match: (EQ (CMP x z:(NEG y)) yes no)
23490		// cond: z.Uses == 1
23491		// result: (EQ (CMN x y) yes no)
23492		for b.Controls[0].Op == OpARM64CMP {
23493			v_0 := b.Controls[0]
23494			_ = v_0.Args[1]
23495			x := v_0.Args[0]
23496			z := v_0.Args[1]
23497			if z.Op != OpARM64NEG {
23498				break
23499			}
23500			y := z.Args[0]
23501			if !(z.Uses == 1) {
23502				break
23503			}
23504			v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
23505			v0.AddArg2(x, y)
23506			b.resetWithControl(BlockARM64EQ, v0)
23507			return true
23508		}
23509		// match: (EQ (CMPW x z:(NEG y)) yes no)
23510		// cond: z.Uses == 1
23511		// result: (EQ (CMNW x y) yes no)
23512		for b.Controls[0].Op == OpARM64CMPW {
23513			v_0 := b.Controls[0]
23514			_ = v_0.Args[1]
23515			x := v_0.Args[0]
23516			z := v_0.Args[1]
23517			if z.Op != OpARM64NEG {
23518				break
23519			}
23520			y := z.Args[0]
23521			if !(z.Uses == 1) {
23522				break
23523			}
23524			v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
23525			v0.AddArg2(x, y)
23526			b.resetWithControl(BlockARM64EQ, v0)
23527			return true
23528		}
23529		// match: (EQ (CMPconst [0] x) yes no)
23530		// result: (Z x yes no)
23531		for b.Controls[0].Op == OpARM64CMPconst {
23532			v_0 := b.Controls[0]
23533			if auxIntToInt64(v_0.AuxInt) != 0 {
23534				break
23535			}
23536			x := v_0.Args[0]
23537			b.resetWithControl(BlockARM64Z, x)
23538			return true
23539		}
23540		// match: (EQ (CMPWconst [0] x) yes no)
23541		// result: (ZW x yes no)
23542		for b.Controls[0].Op == OpARM64CMPWconst {
23543			v_0 := b.Controls[0]
23544			if auxIntToInt32(v_0.AuxInt) != 0 {
23545				break
23546			}
23547			x := v_0.Args[0]
23548			b.resetWithControl(BlockARM64ZW, x)
23549			return true
23550		}
23551		// match: (EQ (CMPconst [0] z:(MADD a x y)) yes no)
23552		// cond: z.Uses==1
23553		// result: (EQ (CMN a (MUL <x.Type> x y)) yes no)
23554		for b.Controls[0].Op == OpARM64CMPconst {
23555			v_0 := b.Controls[0]
23556			if auxIntToInt64(v_0.AuxInt) != 0 {
23557				break
23558			}
23559			z := v_0.Args[0]
23560			if z.Op != OpARM64MADD {
23561				break
23562			}
23563			y := z.Args[2]
23564			a := z.Args[0]
23565			x := z.Args[1]
23566			if !(z.Uses == 1) {
23567				break
23568			}
23569			v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
23570			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
23571			v1.AddArg2(x, y)
23572			v0.AddArg2(a, v1)
23573			b.resetWithControl(BlockARM64EQ, v0)
23574			return true
23575		}
23576		// match: (EQ (CMPconst [0] z:(MSUB a x y)) yes no)
23577		// cond: z.Uses==1
23578		// result: (EQ (CMP a (MUL <x.Type> x y)) yes no)
23579		for b.Controls[0].Op == OpARM64CMPconst {
23580			v_0 := b.Controls[0]
23581			if auxIntToInt64(v_0.AuxInt) != 0 {
23582				break
23583			}
23584			z := v_0.Args[0]
23585			if z.Op != OpARM64MSUB {
23586				break
23587			}
23588			y := z.Args[2]
23589			a := z.Args[0]
23590			x := z.Args[1]
23591			if !(z.Uses == 1) {
23592				break
23593			}
23594			v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
23595			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
23596			v1.AddArg2(x, y)
23597			v0.AddArg2(a, v1)
23598			b.resetWithControl(BlockARM64EQ, v0)
23599			return true
23600		}
23601		// match: (EQ (CMPWconst [0] z:(MADDW a x y)) yes no)
23602		// cond: z.Uses==1
23603		// result: (EQ (CMNW a (MULW <x.Type> x y)) yes no)
23604		for b.Controls[0].Op == OpARM64CMPWconst {
23605			v_0 := b.Controls[0]
23606			if auxIntToInt32(v_0.AuxInt) != 0 {
23607				break
23608			}
23609			z := v_0.Args[0]
23610			if z.Op != OpARM64MADDW {
23611				break
23612			}
23613			y := z.Args[2]
23614			a := z.Args[0]
23615			x := z.Args[1]
23616			if !(z.Uses == 1) {
23617				break
23618			}
23619			v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
23620			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
23621			v1.AddArg2(x, y)
23622			v0.AddArg2(a, v1)
23623			b.resetWithControl(BlockARM64EQ, v0)
23624			return true
23625		}
23626		// match: (EQ (CMPWconst [0] z:(MSUBW a x y)) yes no)
23627		// cond: z.Uses==1
23628		// result: (EQ (CMPW a (MULW <x.Type> x y)) yes no)
23629		for b.Controls[0].Op == OpARM64CMPWconst {
23630			v_0 := b.Controls[0]
23631			if auxIntToInt32(v_0.AuxInt) != 0 {
23632				break
23633			}
23634			z := v_0.Args[0]
23635			if z.Op != OpARM64MSUBW {
23636				break
23637			}
23638			y := z.Args[2]
23639			a := z.Args[0]
23640			x := z.Args[1]
23641			if !(z.Uses == 1) {
23642				break
23643			}
23644			v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
23645			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
23646			v1.AddArg2(x, y)
23647			v0.AddArg2(a, v1)
23648			b.resetWithControl(BlockARM64EQ, v0)
23649			return true
23650		}
23651		// match: (EQ (TSTconst [c] x) yes no)
23652		// cond: oneBit(c)
23653		// result: (TBZ [int64(ntz64(c))] x yes no)
23654		for b.Controls[0].Op == OpARM64TSTconst {
23655			v_0 := b.Controls[0]
23656			c := auxIntToInt64(v_0.AuxInt)
23657			x := v_0.Args[0]
23658			if !(oneBit(c)) {
23659				break
23660			}
23661			b.resetWithControl(BlockARM64TBZ, x)
23662			b.AuxInt = int64ToAuxInt(int64(ntz64(c)))
23663			return true
23664		}
23665		// match: (EQ (TSTWconst [c] x) yes no)
23666		// cond: oneBit(int64(uint32(c)))
23667		// result: (TBZ [int64(ntz64(int64(uint32(c))))] x yes no)
23668		for b.Controls[0].Op == OpARM64TSTWconst {
23669			v_0 := b.Controls[0]
23670			c := auxIntToInt32(v_0.AuxInt)
23671			x := v_0.Args[0]
23672			if !(oneBit(int64(uint32(c)))) {
23673				break
23674			}
23675			b.resetWithControl(BlockARM64TBZ, x)
23676			b.AuxInt = int64ToAuxInt(int64(ntz64(int64(uint32(c)))))
23677			return true
23678		}
23679		// match: (EQ (FlagConstant [fc]) yes no)
23680		// cond: fc.eq()
23681		// result: (First yes no)
23682		for b.Controls[0].Op == OpARM64FlagConstant {
23683			v_0 := b.Controls[0]
23684			fc := auxIntToFlagConstant(v_0.AuxInt)
23685			if !(fc.eq()) {
23686				break
23687			}
23688			b.Reset(BlockFirst)
23689			return true
23690		}
23691		// match: (EQ (FlagConstant [fc]) yes no)
23692		// cond: !fc.eq()
23693		// result: (First no yes)
23694		for b.Controls[0].Op == OpARM64FlagConstant {
23695			v_0 := b.Controls[0]
23696			fc := auxIntToFlagConstant(v_0.AuxInt)
23697			if !(!fc.eq()) {
23698				break
23699			}
23700			b.Reset(BlockFirst)
23701			b.swapSuccessors()
23702			return true
23703		}
23704		// match: (EQ (InvertFlags cmp) yes no)
23705		// result: (EQ cmp yes no)
23706		for b.Controls[0].Op == OpARM64InvertFlags {
23707			v_0 := b.Controls[0]
23708			cmp := v_0.Args[0]
23709			b.resetWithControl(BlockARM64EQ, cmp)
23710			return true
23711		}
23712	case BlockARM64FGE:
23713		// match: (FGE (InvertFlags cmp) yes no)
23714		// result: (FLE cmp yes no)
23715		for b.Controls[0].Op == OpARM64InvertFlags {
23716			v_0 := b.Controls[0]
23717			cmp := v_0.Args[0]
23718			b.resetWithControl(BlockARM64FLE, cmp)
23719			return true
23720		}
23721	case BlockARM64FGT:
23722		// match: (FGT (InvertFlags cmp) yes no)
23723		// result: (FLT cmp yes no)
23724		for b.Controls[0].Op == OpARM64InvertFlags {
23725			v_0 := b.Controls[0]
23726			cmp := v_0.Args[0]
23727			b.resetWithControl(BlockARM64FLT, cmp)
23728			return true
23729		}
23730	case BlockARM64FLE:
23731		// match: (FLE (InvertFlags cmp) yes no)
23732		// result: (FGE cmp yes no)
23733		for b.Controls[0].Op == OpARM64InvertFlags {
23734			v_0 := b.Controls[0]
23735			cmp := v_0.Args[0]
23736			b.resetWithControl(BlockARM64FGE, cmp)
23737			return true
23738		}
23739	case BlockARM64FLT:
23740		// match: (FLT (InvertFlags cmp) yes no)
23741		// result: (FGT cmp yes no)
23742		for b.Controls[0].Op == OpARM64InvertFlags {
23743			v_0 := b.Controls[0]
23744			cmp := v_0.Args[0]
23745			b.resetWithControl(BlockARM64FGT, cmp)
23746			return true
23747		}
23748	case BlockARM64GE:
23749		// match: (GE (CMPconst [0] z:(AND x y)) yes no)
23750		// cond: z.Uses == 1
23751		// result: (GE (TST x y) yes no)
23752		for b.Controls[0].Op == OpARM64CMPconst {
23753			v_0 := b.Controls[0]
23754			if auxIntToInt64(v_0.AuxInt) != 0 {
23755				break
23756			}
23757			z := v_0.Args[0]
23758			if z.Op != OpARM64AND {
23759				break
23760			}
23761			_ = z.Args[1]
23762			z_0 := z.Args[0]
23763			z_1 := z.Args[1]
23764			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
23765				x := z_0
23766				y := z_1
23767				if !(z.Uses == 1) {
23768					continue
23769				}
23770				v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
23771				v0.AddArg2(x, y)
23772				b.resetWithControl(BlockARM64GE, v0)
23773				return true
23774			}
23775			break
23776		}
23777		// match: (GE (CMPconst [0] x:(ANDconst [c] y)) yes no)
23778		// cond: x.Uses == 1
23779		// result: (GE (TSTconst [c] y) yes no)
23780		for b.Controls[0].Op == OpARM64CMPconst {
23781			v_0 := b.Controls[0]
23782			if auxIntToInt64(v_0.AuxInt) != 0 {
23783				break
23784			}
23785			x := v_0.Args[0]
23786			if x.Op != OpARM64ANDconst {
23787				break
23788			}
23789			c := auxIntToInt64(x.AuxInt)
23790			y := x.Args[0]
23791			if !(x.Uses == 1) {
23792				break
23793			}
23794			v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
23795			v0.AuxInt = int64ToAuxInt(c)
23796			v0.AddArg(y)
23797			b.resetWithControl(BlockARM64GE, v0)
23798			return true
23799		}
23800		// match: (GE (CMPWconst [0] z:(AND x y)) yes no)
23801		// cond: z.Uses == 1
23802		// result: (GE (TSTW x y) yes no)
23803		for b.Controls[0].Op == OpARM64CMPWconst {
23804			v_0 := b.Controls[0]
23805			if auxIntToInt32(v_0.AuxInt) != 0 {
23806				break
23807			}
23808			z := v_0.Args[0]
23809			if z.Op != OpARM64AND {
23810				break
23811			}
23812			_ = z.Args[1]
23813			z_0 := z.Args[0]
23814			z_1 := z.Args[1]
23815			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
23816				x := z_0
23817				y := z_1
23818				if !(z.Uses == 1) {
23819					continue
23820				}
23821				v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
23822				v0.AddArg2(x, y)
23823				b.resetWithControl(BlockARM64GE, v0)
23824				return true
23825			}
23826			break
23827		}
23828		// match: (GE (CMPWconst [0] x:(ANDconst [c] y)) yes no)
23829		// cond: x.Uses == 1
23830		// result: (GE (TSTWconst [int32(c)] y) yes no)
23831		for b.Controls[0].Op == OpARM64CMPWconst {
23832			v_0 := b.Controls[0]
23833			if auxIntToInt32(v_0.AuxInt) != 0 {
23834				break
23835			}
23836			x := v_0.Args[0]
23837			if x.Op != OpARM64ANDconst {
23838				break
23839			}
23840			c := auxIntToInt64(x.AuxInt)
23841			y := x.Args[0]
23842			if !(x.Uses == 1) {
23843				break
23844			}
23845			v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
23846			v0.AuxInt = int32ToAuxInt(int32(c))
23847			v0.AddArg(y)
23848			b.resetWithControl(BlockARM64GE, v0)
23849			return true
23850		}
23851		// match: (GE (CMPconst [0] x:(ADDconst [c] y)) yes no)
23852		// cond: x.Uses == 1
23853		// result: (GEnoov (CMNconst [c] y) yes no)
23854		for b.Controls[0].Op == OpARM64CMPconst {
23855			v_0 := b.Controls[0]
23856			if auxIntToInt64(v_0.AuxInt) != 0 {
23857				break
23858			}
23859			x := v_0.Args[0]
23860			if x.Op != OpARM64ADDconst {
23861				break
23862			}
23863			c := auxIntToInt64(x.AuxInt)
23864			y := x.Args[0]
23865			if !(x.Uses == 1) {
23866				break
23867			}
23868			v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
23869			v0.AuxInt = int64ToAuxInt(c)
23870			v0.AddArg(y)
23871			b.resetWithControl(BlockARM64GEnoov, v0)
23872			return true
23873		}
23874		// match: (GE (CMPWconst [0] x:(ADDconst [c] y)) yes no)
23875		// cond: x.Uses == 1
23876		// result: (GEnoov (CMNWconst [int32(c)] y) yes no)
23877		for b.Controls[0].Op == OpARM64CMPWconst {
23878			v_0 := b.Controls[0]
23879			if auxIntToInt32(v_0.AuxInt) != 0 {
23880				break
23881			}
23882			x := v_0.Args[0]
23883			if x.Op != OpARM64ADDconst {
23884				break
23885			}
23886			c := auxIntToInt64(x.AuxInt)
23887			y := x.Args[0]
23888			if !(x.Uses == 1) {
23889				break
23890			}
23891			v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
23892			v0.AuxInt = int32ToAuxInt(int32(c))
23893			v0.AddArg(y)
23894			b.resetWithControl(BlockARM64GEnoov, v0)
23895			return true
23896		}
23897		// match: (GE (CMPconst [0] z:(ADD x y)) yes no)
23898		// cond: z.Uses == 1
23899		// result: (GEnoov (CMN x y) yes no)
23900		for b.Controls[0].Op == OpARM64CMPconst {
23901			v_0 := b.Controls[0]
23902			if auxIntToInt64(v_0.AuxInt) != 0 {
23903				break
23904			}
23905			z := v_0.Args[0]
23906			if z.Op != OpARM64ADD {
23907				break
23908			}
23909			_ = z.Args[1]
23910			z_0 := z.Args[0]
23911			z_1 := z.Args[1]
23912			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
23913				x := z_0
23914				y := z_1
23915				if !(z.Uses == 1) {
23916					continue
23917				}
23918				v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
23919				v0.AddArg2(x, y)
23920				b.resetWithControl(BlockARM64GEnoov, v0)
23921				return true
23922			}
23923			break
23924		}
23925		// match: (GE (CMPWconst [0] z:(ADD x y)) yes no)
23926		// cond: z.Uses == 1
23927		// result: (GEnoov (CMNW x y) yes no)
23928		for b.Controls[0].Op == OpARM64CMPWconst {
23929			v_0 := b.Controls[0]
23930			if auxIntToInt32(v_0.AuxInt) != 0 {
23931				break
23932			}
23933			z := v_0.Args[0]
23934			if z.Op != OpARM64ADD {
23935				break
23936			}
23937			_ = z.Args[1]
23938			z_0 := z.Args[0]
23939			z_1 := z.Args[1]
23940			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
23941				x := z_0
23942				y := z_1
23943				if !(z.Uses == 1) {
23944					continue
23945				}
23946				v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
23947				v0.AddArg2(x, y)
23948				b.resetWithControl(BlockARM64GEnoov, v0)
23949				return true
23950			}
23951			break
23952		}
23953		// match: (GE (CMPconst [0] z:(MADD a x y)) yes no)
23954		// cond: z.Uses==1
23955		// result: (GEnoov (CMN a (MUL <x.Type> x y)) yes no)
23956		for b.Controls[0].Op == OpARM64CMPconst {
23957			v_0 := b.Controls[0]
23958			if auxIntToInt64(v_0.AuxInt) != 0 {
23959				break
23960			}
23961			z := v_0.Args[0]
23962			if z.Op != OpARM64MADD {
23963				break
23964			}
23965			y := z.Args[2]
23966			a := z.Args[0]
23967			x := z.Args[1]
23968			if !(z.Uses == 1) {
23969				break
23970			}
23971			v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
23972			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
23973			v1.AddArg2(x, y)
23974			v0.AddArg2(a, v1)
23975			b.resetWithControl(BlockARM64GEnoov, v0)
23976			return true
23977		}
23978		// match: (GE (CMPconst [0] z:(MSUB a x y)) yes no)
23979		// cond: z.Uses==1
23980		// result: (GEnoov (CMP a (MUL <x.Type> x y)) yes no)
23981		for b.Controls[0].Op == OpARM64CMPconst {
23982			v_0 := b.Controls[0]
23983			if auxIntToInt64(v_0.AuxInt) != 0 {
23984				break
23985			}
23986			z := v_0.Args[0]
23987			if z.Op != OpARM64MSUB {
23988				break
23989			}
23990			y := z.Args[2]
23991			a := z.Args[0]
23992			x := z.Args[1]
23993			if !(z.Uses == 1) {
23994				break
23995			}
23996			v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
23997			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
23998			v1.AddArg2(x, y)
23999			v0.AddArg2(a, v1)
24000			b.resetWithControl(BlockARM64GEnoov, v0)
24001			return true
24002		}
24003		// match: (GE (CMPWconst [0] z:(MADDW a x y)) yes no)
24004		// cond: z.Uses==1
24005		// result: (GEnoov (CMNW a (MULW <x.Type> x y)) yes no)
24006		for b.Controls[0].Op == OpARM64CMPWconst {
24007			v_0 := b.Controls[0]
24008			if auxIntToInt32(v_0.AuxInt) != 0 {
24009				break
24010			}
24011			z := v_0.Args[0]
24012			if z.Op != OpARM64MADDW {
24013				break
24014			}
24015			y := z.Args[2]
24016			a := z.Args[0]
24017			x := z.Args[1]
24018			if !(z.Uses == 1) {
24019				break
24020			}
24021			v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
24022			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
24023			v1.AddArg2(x, y)
24024			v0.AddArg2(a, v1)
24025			b.resetWithControl(BlockARM64GEnoov, v0)
24026			return true
24027		}
24028		// match: (GE (CMPWconst [0] z:(MSUBW a x y)) yes no)
24029		// cond: z.Uses==1
24030		// result: (GEnoov (CMPW a (MULW <x.Type> x y)) yes no)
24031		for b.Controls[0].Op == OpARM64CMPWconst {
24032			v_0 := b.Controls[0]
24033			if auxIntToInt32(v_0.AuxInt) != 0 {
24034				break
24035			}
24036			z := v_0.Args[0]
24037			if z.Op != OpARM64MSUBW {
24038				break
24039			}
24040			y := z.Args[2]
24041			a := z.Args[0]
24042			x := z.Args[1]
24043			if !(z.Uses == 1) {
24044				break
24045			}
24046			v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
24047			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
24048			v1.AddArg2(x, y)
24049			v0.AddArg2(a, v1)
24050			b.resetWithControl(BlockARM64GEnoov, v0)
24051			return true
24052		}
24053		// match: (GE (CMPWconst [0] x) yes no)
24054		// result: (TBZ [31] x yes no)
24055		for b.Controls[0].Op == OpARM64CMPWconst {
24056			v_0 := b.Controls[0]
24057			if auxIntToInt32(v_0.AuxInt) != 0 {
24058				break
24059			}
24060			x := v_0.Args[0]
24061			b.resetWithControl(BlockARM64TBZ, x)
24062			b.AuxInt = int64ToAuxInt(31)
24063			return true
24064		}
24065		// match: (GE (CMPconst [0] x) yes no)
24066		// result: (TBZ [63] x yes no)
24067		for b.Controls[0].Op == OpARM64CMPconst {
24068			v_0 := b.Controls[0]
24069			if auxIntToInt64(v_0.AuxInt) != 0 {
24070				break
24071			}
24072			x := v_0.Args[0]
24073			b.resetWithControl(BlockARM64TBZ, x)
24074			b.AuxInt = int64ToAuxInt(63)
24075			return true
24076		}
24077		// match: (GE (FlagConstant [fc]) yes no)
24078		// cond: fc.ge()
24079		// result: (First yes no)
24080		for b.Controls[0].Op == OpARM64FlagConstant {
24081			v_0 := b.Controls[0]
24082			fc := auxIntToFlagConstant(v_0.AuxInt)
24083			if !(fc.ge()) {
24084				break
24085			}
24086			b.Reset(BlockFirst)
24087			return true
24088		}
24089		// match: (GE (FlagConstant [fc]) yes no)
24090		// cond: !fc.ge()
24091		// result: (First no yes)
24092		for b.Controls[0].Op == OpARM64FlagConstant {
24093			v_0 := b.Controls[0]
24094			fc := auxIntToFlagConstant(v_0.AuxInt)
24095			if !(!fc.ge()) {
24096				break
24097			}
24098			b.Reset(BlockFirst)
24099			b.swapSuccessors()
24100			return true
24101		}
24102		// match: (GE (InvertFlags cmp) yes no)
24103		// result: (LE cmp yes no)
24104		for b.Controls[0].Op == OpARM64InvertFlags {
24105			v_0 := b.Controls[0]
24106			cmp := v_0.Args[0]
24107			b.resetWithControl(BlockARM64LE, cmp)
24108			return true
24109		}
24110	case BlockARM64GEnoov:
24111		// match: (GEnoov (FlagConstant [fc]) yes no)
24112		// cond: fc.geNoov()
24113		// result: (First yes no)
24114		for b.Controls[0].Op == OpARM64FlagConstant {
24115			v_0 := b.Controls[0]
24116			fc := auxIntToFlagConstant(v_0.AuxInt)
24117			if !(fc.geNoov()) {
24118				break
24119			}
24120			b.Reset(BlockFirst)
24121			return true
24122		}
24123		// match: (GEnoov (FlagConstant [fc]) yes no)
24124		// cond: !fc.geNoov()
24125		// result: (First no yes)
24126		for b.Controls[0].Op == OpARM64FlagConstant {
24127			v_0 := b.Controls[0]
24128			fc := auxIntToFlagConstant(v_0.AuxInt)
24129			if !(!fc.geNoov()) {
24130				break
24131			}
24132			b.Reset(BlockFirst)
24133			b.swapSuccessors()
24134			return true
24135		}
24136		// match: (GEnoov (InvertFlags cmp) yes no)
24137		// result: (LEnoov cmp yes no)
24138		for b.Controls[0].Op == OpARM64InvertFlags {
24139			v_0 := b.Controls[0]
24140			cmp := v_0.Args[0]
24141			b.resetWithControl(BlockARM64LEnoov, cmp)
24142			return true
24143		}
24144	case BlockARM64GT:
24145		// match: (GT (CMPconst [0] z:(AND x y)) yes no)
24146		// cond: z.Uses == 1
24147		// result: (GT (TST x y) yes no)
24148		for b.Controls[0].Op == OpARM64CMPconst {
24149			v_0 := b.Controls[0]
24150			if auxIntToInt64(v_0.AuxInt) != 0 {
24151				break
24152			}
24153			z := v_0.Args[0]
24154			if z.Op != OpARM64AND {
24155				break
24156			}
24157			_ = z.Args[1]
24158			z_0 := z.Args[0]
24159			z_1 := z.Args[1]
24160			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
24161				x := z_0
24162				y := z_1
24163				if !(z.Uses == 1) {
24164					continue
24165				}
24166				v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
24167				v0.AddArg2(x, y)
24168				b.resetWithControl(BlockARM64GT, v0)
24169				return true
24170			}
24171			break
24172		}
24173		// match: (GT (CMPconst [0] x:(ANDconst [c] y)) yes no)
24174		// cond: x.Uses == 1
24175		// result: (GT (TSTconst [c] y) yes no)
24176		for b.Controls[0].Op == OpARM64CMPconst {
24177			v_0 := b.Controls[0]
24178			if auxIntToInt64(v_0.AuxInt) != 0 {
24179				break
24180			}
24181			x := v_0.Args[0]
24182			if x.Op != OpARM64ANDconst {
24183				break
24184			}
24185			c := auxIntToInt64(x.AuxInt)
24186			y := x.Args[0]
24187			if !(x.Uses == 1) {
24188				break
24189			}
24190			v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
24191			v0.AuxInt = int64ToAuxInt(c)
24192			v0.AddArg(y)
24193			b.resetWithControl(BlockARM64GT, v0)
24194			return true
24195		}
24196		// match: (GT (CMPWconst [0] z:(AND x y)) yes no)
24197		// cond: z.Uses == 1
24198		// result: (GT (TSTW x y) yes no)
24199		for b.Controls[0].Op == OpARM64CMPWconst {
24200			v_0 := b.Controls[0]
24201			if auxIntToInt32(v_0.AuxInt) != 0 {
24202				break
24203			}
24204			z := v_0.Args[0]
24205			if z.Op != OpARM64AND {
24206				break
24207			}
24208			_ = z.Args[1]
24209			z_0 := z.Args[0]
24210			z_1 := z.Args[1]
24211			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
24212				x := z_0
24213				y := z_1
24214				if !(z.Uses == 1) {
24215					continue
24216				}
24217				v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
24218				v0.AddArg2(x, y)
24219				b.resetWithControl(BlockARM64GT, v0)
24220				return true
24221			}
24222			break
24223		}
24224		// match: (GT (CMPWconst [0] x:(ANDconst [c] y)) yes no)
24225		// cond: x.Uses == 1
24226		// result: (GT (TSTWconst [int32(c)] y) yes no)
24227		for b.Controls[0].Op == OpARM64CMPWconst {
24228			v_0 := b.Controls[0]
24229			if auxIntToInt32(v_0.AuxInt) != 0 {
24230				break
24231			}
24232			x := v_0.Args[0]
24233			if x.Op != OpARM64ANDconst {
24234				break
24235			}
24236			c := auxIntToInt64(x.AuxInt)
24237			y := x.Args[0]
24238			if !(x.Uses == 1) {
24239				break
24240			}
24241			v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
24242			v0.AuxInt = int32ToAuxInt(int32(c))
24243			v0.AddArg(y)
24244			b.resetWithControl(BlockARM64GT, v0)
24245			return true
24246		}
24247		// match: (GT (CMPconst [0] x:(ADDconst [c] y)) yes no)
24248		// cond: x.Uses == 1
24249		// result: (GTnoov (CMNconst [c] y) yes no)
24250		for b.Controls[0].Op == OpARM64CMPconst {
24251			v_0 := b.Controls[0]
24252			if auxIntToInt64(v_0.AuxInt) != 0 {
24253				break
24254			}
24255			x := v_0.Args[0]
24256			if x.Op != OpARM64ADDconst {
24257				break
24258			}
24259			c := auxIntToInt64(x.AuxInt)
24260			y := x.Args[0]
24261			if !(x.Uses == 1) {
24262				break
24263			}
24264			v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
24265			v0.AuxInt = int64ToAuxInt(c)
24266			v0.AddArg(y)
24267			b.resetWithControl(BlockARM64GTnoov, v0)
24268			return true
24269		}
24270		// match: (GT (CMPWconst [0] x:(ADDconst [c] y)) yes no)
24271		// cond: x.Uses == 1
24272		// result: (GTnoov (CMNWconst [int32(c)] y) yes no)
24273		for b.Controls[0].Op == OpARM64CMPWconst {
24274			v_0 := b.Controls[0]
24275			if auxIntToInt32(v_0.AuxInt) != 0 {
24276				break
24277			}
24278			x := v_0.Args[0]
24279			if x.Op != OpARM64ADDconst {
24280				break
24281			}
24282			c := auxIntToInt64(x.AuxInt)
24283			y := x.Args[0]
24284			if !(x.Uses == 1) {
24285				break
24286			}
24287			v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
24288			v0.AuxInt = int32ToAuxInt(int32(c))
24289			v0.AddArg(y)
24290			b.resetWithControl(BlockARM64GTnoov, v0)
24291			return true
24292		}
24293		// match: (GT (CMPconst [0] z:(ADD x y)) yes no)
24294		// cond: z.Uses == 1
24295		// result: (GTnoov (CMN x y) yes no)
24296		for b.Controls[0].Op == OpARM64CMPconst {
24297			v_0 := b.Controls[0]
24298			if auxIntToInt64(v_0.AuxInt) != 0 {
24299				break
24300			}
24301			z := v_0.Args[0]
24302			if z.Op != OpARM64ADD {
24303				break
24304			}
24305			_ = z.Args[1]
24306			z_0 := z.Args[0]
24307			z_1 := z.Args[1]
24308			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
24309				x := z_0
24310				y := z_1
24311				if !(z.Uses == 1) {
24312					continue
24313				}
24314				v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
24315				v0.AddArg2(x, y)
24316				b.resetWithControl(BlockARM64GTnoov, v0)
24317				return true
24318			}
24319			break
24320		}
24321		// match: (GT (CMPWconst [0] z:(ADD x y)) yes no)
24322		// cond: z.Uses == 1
24323		// result: (GTnoov (CMNW x y) yes no)
24324		for b.Controls[0].Op == OpARM64CMPWconst {
24325			v_0 := b.Controls[0]
24326			if auxIntToInt32(v_0.AuxInt) != 0 {
24327				break
24328			}
24329			z := v_0.Args[0]
24330			if z.Op != OpARM64ADD {
24331				break
24332			}
24333			_ = z.Args[1]
24334			z_0 := z.Args[0]
24335			z_1 := z.Args[1]
24336			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
24337				x := z_0
24338				y := z_1
24339				if !(z.Uses == 1) {
24340					continue
24341				}
24342				v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
24343				v0.AddArg2(x, y)
24344				b.resetWithControl(BlockARM64GTnoov, v0)
24345				return true
24346			}
24347			break
24348		}
24349		// match: (GT (CMPconst [0] z:(MADD a x y)) yes no)
24350		// cond: z.Uses==1
24351		// result: (GTnoov (CMN a (MUL <x.Type> x y)) yes no)
24352		for b.Controls[0].Op == OpARM64CMPconst {
24353			v_0 := b.Controls[0]
24354			if auxIntToInt64(v_0.AuxInt) != 0 {
24355				break
24356			}
24357			z := v_0.Args[0]
24358			if z.Op != OpARM64MADD {
24359				break
24360			}
24361			y := z.Args[2]
24362			a := z.Args[0]
24363			x := z.Args[1]
24364			if !(z.Uses == 1) {
24365				break
24366			}
24367			v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
24368			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
24369			v1.AddArg2(x, y)
24370			v0.AddArg2(a, v1)
24371			b.resetWithControl(BlockARM64GTnoov, v0)
24372			return true
24373		}
24374		// match: (GT (CMPconst [0] z:(MSUB a x y)) yes no)
24375		// cond: z.Uses==1
24376		// result: (GTnoov (CMP a (MUL <x.Type> x y)) yes no)
24377		for b.Controls[0].Op == OpARM64CMPconst {
24378			v_0 := b.Controls[0]
24379			if auxIntToInt64(v_0.AuxInt) != 0 {
24380				break
24381			}
24382			z := v_0.Args[0]
24383			if z.Op != OpARM64MSUB {
24384				break
24385			}
24386			y := z.Args[2]
24387			a := z.Args[0]
24388			x := z.Args[1]
24389			if !(z.Uses == 1) {
24390				break
24391			}
24392			v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
24393			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
24394			v1.AddArg2(x, y)
24395			v0.AddArg2(a, v1)
24396			b.resetWithControl(BlockARM64GTnoov, v0)
24397			return true
24398		}
24399		// match: (GT (CMPWconst [0] z:(MADDW a x y)) yes no)
24400		// cond: z.Uses==1
24401		// result: (GTnoov (CMNW a (MULW <x.Type> x y)) yes no)
24402		for b.Controls[0].Op == OpARM64CMPWconst {
24403			v_0 := b.Controls[0]
24404			if auxIntToInt32(v_0.AuxInt) != 0 {
24405				break
24406			}
24407			z := v_0.Args[0]
24408			if z.Op != OpARM64MADDW {
24409				break
24410			}
24411			y := z.Args[2]
24412			a := z.Args[0]
24413			x := z.Args[1]
24414			if !(z.Uses == 1) {
24415				break
24416			}
24417			v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
24418			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
24419			v1.AddArg2(x, y)
24420			v0.AddArg2(a, v1)
24421			b.resetWithControl(BlockARM64GTnoov, v0)
24422			return true
24423		}
24424		// match: (GT (CMPWconst [0] z:(MSUBW a x y)) yes no)
24425		// cond: z.Uses==1
24426		// result: (GTnoov (CMPW a (MULW <x.Type> x y)) yes no)
24427		for b.Controls[0].Op == OpARM64CMPWconst {
24428			v_0 := b.Controls[0]
24429			if auxIntToInt32(v_0.AuxInt) != 0 {
24430				break
24431			}
24432			z := v_0.Args[0]
24433			if z.Op != OpARM64MSUBW {
24434				break
24435			}
24436			y := z.Args[2]
24437			a := z.Args[0]
24438			x := z.Args[1]
24439			if !(z.Uses == 1) {
24440				break
24441			}
24442			v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
24443			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
24444			v1.AddArg2(x, y)
24445			v0.AddArg2(a, v1)
24446			b.resetWithControl(BlockARM64GTnoov, v0)
24447			return true
24448		}
24449		// match: (GT (FlagConstant [fc]) yes no)
24450		// cond: fc.gt()
24451		// result: (First yes no)
24452		for b.Controls[0].Op == OpARM64FlagConstant {
24453			v_0 := b.Controls[0]
24454			fc := auxIntToFlagConstant(v_0.AuxInt)
24455			if !(fc.gt()) {
24456				break
24457			}
24458			b.Reset(BlockFirst)
24459			return true
24460		}
24461		// match: (GT (FlagConstant [fc]) yes no)
24462		// cond: !fc.gt()
24463		// result: (First no yes)
24464		for b.Controls[0].Op == OpARM64FlagConstant {
24465			v_0 := b.Controls[0]
24466			fc := auxIntToFlagConstant(v_0.AuxInt)
24467			if !(!fc.gt()) {
24468				break
24469			}
24470			b.Reset(BlockFirst)
24471			b.swapSuccessors()
24472			return true
24473		}
24474		// match: (GT (InvertFlags cmp) yes no)
24475		// result: (LT cmp yes no)
24476		for b.Controls[0].Op == OpARM64InvertFlags {
24477			v_0 := b.Controls[0]
24478			cmp := v_0.Args[0]
24479			b.resetWithControl(BlockARM64LT, cmp)
24480			return true
24481		}
24482	case BlockARM64GTnoov:
24483		// match: (GTnoov (FlagConstant [fc]) yes no)
24484		// cond: fc.gtNoov()
24485		// result: (First yes no)
24486		for b.Controls[0].Op == OpARM64FlagConstant {
24487			v_0 := b.Controls[0]
24488			fc := auxIntToFlagConstant(v_0.AuxInt)
24489			if !(fc.gtNoov()) {
24490				break
24491			}
24492			b.Reset(BlockFirst)
24493			return true
24494		}
24495		// match: (GTnoov (FlagConstant [fc]) yes no)
24496		// cond: !fc.gtNoov()
24497		// result: (First no yes)
24498		for b.Controls[0].Op == OpARM64FlagConstant {
24499			v_0 := b.Controls[0]
24500			fc := auxIntToFlagConstant(v_0.AuxInt)
24501			if !(!fc.gtNoov()) {
24502				break
24503			}
24504			b.Reset(BlockFirst)
24505			b.swapSuccessors()
24506			return true
24507		}
24508		// match: (GTnoov (InvertFlags cmp) yes no)
24509		// result: (LTnoov cmp yes no)
24510		for b.Controls[0].Op == OpARM64InvertFlags {
24511			v_0 := b.Controls[0]
24512			cmp := v_0.Args[0]
24513			b.resetWithControl(BlockARM64LTnoov, cmp)
24514			return true
24515		}
24516	case BlockIf:
24517		// match: (If (Equal cc) yes no)
24518		// result: (EQ cc yes no)
24519		for b.Controls[0].Op == OpARM64Equal {
24520			v_0 := b.Controls[0]
24521			cc := v_0.Args[0]
24522			b.resetWithControl(BlockARM64EQ, cc)
24523			return true
24524		}
24525		// match: (If (NotEqual cc) yes no)
24526		// result: (NE cc yes no)
24527		for b.Controls[0].Op == OpARM64NotEqual {
24528			v_0 := b.Controls[0]
24529			cc := v_0.Args[0]
24530			b.resetWithControl(BlockARM64NE, cc)
24531			return true
24532		}
24533		// match: (If (LessThan cc) yes no)
24534		// result: (LT cc yes no)
24535		for b.Controls[0].Op == OpARM64LessThan {
24536			v_0 := b.Controls[0]
24537			cc := v_0.Args[0]
24538			b.resetWithControl(BlockARM64LT, cc)
24539			return true
24540		}
24541		// match: (If (LessThanU cc) yes no)
24542		// result: (ULT cc yes no)
24543		for b.Controls[0].Op == OpARM64LessThanU {
24544			v_0 := b.Controls[0]
24545			cc := v_0.Args[0]
24546			b.resetWithControl(BlockARM64ULT, cc)
24547			return true
24548		}
24549		// match: (If (LessEqual cc) yes no)
24550		// result: (LE cc yes no)
24551		for b.Controls[0].Op == OpARM64LessEqual {
24552			v_0 := b.Controls[0]
24553			cc := v_0.Args[0]
24554			b.resetWithControl(BlockARM64LE, cc)
24555			return true
24556		}
24557		// match: (If (LessEqualU cc) yes no)
24558		// result: (ULE cc yes no)
24559		for b.Controls[0].Op == OpARM64LessEqualU {
24560			v_0 := b.Controls[0]
24561			cc := v_0.Args[0]
24562			b.resetWithControl(BlockARM64ULE, cc)
24563			return true
24564		}
24565		// match: (If (GreaterThan cc) yes no)
24566		// result: (GT cc yes no)
24567		for b.Controls[0].Op == OpARM64GreaterThan {
24568			v_0 := b.Controls[0]
24569			cc := v_0.Args[0]
24570			b.resetWithControl(BlockARM64GT, cc)
24571			return true
24572		}
24573		// match: (If (GreaterThanU cc) yes no)
24574		// result: (UGT cc yes no)
24575		for b.Controls[0].Op == OpARM64GreaterThanU {
24576			v_0 := b.Controls[0]
24577			cc := v_0.Args[0]
24578			b.resetWithControl(BlockARM64UGT, cc)
24579			return true
24580		}
24581		// match: (If (GreaterEqual cc) yes no)
24582		// result: (GE cc yes no)
24583		for b.Controls[0].Op == OpARM64GreaterEqual {
24584			v_0 := b.Controls[0]
24585			cc := v_0.Args[0]
24586			b.resetWithControl(BlockARM64GE, cc)
24587			return true
24588		}
24589		// match: (If (GreaterEqualU cc) yes no)
24590		// result: (UGE cc yes no)
24591		for b.Controls[0].Op == OpARM64GreaterEqualU {
24592			v_0 := b.Controls[0]
24593			cc := v_0.Args[0]
24594			b.resetWithControl(BlockARM64UGE, cc)
24595			return true
24596		}
24597		// match: (If (LessThanF cc) yes no)
24598		// result: (FLT cc yes no)
24599		for b.Controls[0].Op == OpARM64LessThanF {
24600			v_0 := b.Controls[0]
24601			cc := v_0.Args[0]
24602			b.resetWithControl(BlockARM64FLT, cc)
24603			return true
24604		}
24605		// match: (If (LessEqualF cc) yes no)
24606		// result: (FLE cc yes no)
24607		for b.Controls[0].Op == OpARM64LessEqualF {
24608			v_0 := b.Controls[0]
24609			cc := v_0.Args[0]
24610			b.resetWithControl(BlockARM64FLE, cc)
24611			return true
24612		}
24613		// match: (If (GreaterThanF cc) yes no)
24614		// result: (FGT cc yes no)
24615		for b.Controls[0].Op == OpARM64GreaterThanF {
24616			v_0 := b.Controls[0]
24617			cc := v_0.Args[0]
24618			b.resetWithControl(BlockARM64FGT, cc)
24619			return true
24620		}
24621		// match: (If (GreaterEqualF cc) yes no)
24622		// result: (FGE cc yes no)
24623		for b.Controls[0].Op == OpARM64GreaterEqualF {
24624			v_0 := b.Controls[0]
24625			cc := v_0.Args[0]
24626			b.resetWithControl(BlockARM64FGE, cc)
24627			return true
24628		}
24629		// match: (If cond yes no)
24630		// result: (TBNZ [0] cond yes no)
24631		for {
24632			cond := b.Controls[0]
24633			b.resetWithControl(BlockARM64TBNZ, cond)
24634			b.AuxInt = int64ToAuxInt(0)
24635			return true
24636		}
24637	case BlockJumpTable:
24638		// match: (JumpTable idx)
24639		// result: (JUMPTABLE {makeJumpTableSym(b)} idx (MOVDaddr <typ.Uintptr> {makeJumpTableSym(b)} (SB)))
24640		for {
24641			idx := b.Controls[0]
24642			v0 := b.NewValue0(b.Pos, OpARM64MOVDaddr, typ.Uintptr)
24643			v0.Aux = symToAux(makeJumpTableSym(b))
24644			v1 := b.NewValue0(b.Pos, OpSB, typ.Uintptr)
24645			v0.AddArg(v1)
24646			b.resetWithControl2(BlockARM64JUMPTABLE, idx, v0)
24647			b.Aux = symToAux(makeJumpTableSym(b))
24648			return true
24649		}
24650	case BlockARM64LE:
24651		// match: (LE (CMPconst [0] z:(AND x y)) yes no)
24652		// cond: z.Uses == 1
24653		// result: (LE (TST x y) yes no)
24654		for b.Controls[0].Op == OpARM64CMPconst {
24655			v_0 := b.Controls[0]
24656			if auxIntToInt64(v_0.AuxInt) != 0 {
24657				break
24658			}
24659			z := v_0.Args[0]
24660			if z.Op != OpARM64AND {
24661				break
24662			}
24663			_ = z.Args[1]
24664			z_0 := z.Args[0]
24665			z_1 := z.Args[1]
24666			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
24667				x := z_0
24668				y := z_1
24669				if !(z.Uses == 1) {
24670					continue
24671				}
24672				v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
24673				v0.AddArg2(x, y)
24674				b.resetWithControl(BlockARM64LE, v0)
24675				return true
24676			}
24677			break
24678		}
24679		// match: (LE (CMPconst [0] x:(ANDconst [c] y)) yes no)
24680		// cond: x.Uses == 1
24681		// result: (LE (TSTconst [c] y) yes no)
24682		for b.Controls[0].Op == OpARM64CMPconst {
24683			v_0 := b.Controls[0]
24684			if auxIntToInt64(v_0.AuxInt) != 0 {
24685				break
24686			}
24687			x := v_0.Args[0]
24688			if x.Op != OpARM64ANDconst {
24689				break
24690			}
24691			c := auxIntToInt64(x.AuxInt)
24692			y := x.Args[0]
24693			if !(x.Uses == 1) {
24694				break
24695			}
24696			v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
24697			v0.AuxInt = int64ToAuxInt(c)
24698			v0.AddArg(y)
24699			b.resetWithControl(BlockARM64LE, v0)
24700			return true
24701		}
24702		// match: (LE (CMPWconst [0] z:(AND x y)) yes no)
24703		// cond: z.Uses == 1
24704		// result: (LE (TSTW x y) yes no)
24705		for b.Controls[0].Op == OpARM64CMPWconst {
24706			v_0 := b.Controls[0]
24707			if auxIntToInt32(v_0.AuxInt) != 0 {
24708				break
24709			}
24710			z := v_0.Args[0]
24711			if z.Op != OpARM64AND {
24712				break
24713			}
24714			_ = z.Args[1]
24715			z_0 := z.Args[0]
24716			z_1 := z.Args[1]
24717			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
24718				x := z_0
24719				y := z_1
24720				if !(z.Uses == 1) {
24721					continue
24722				}
24723				v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
24724				v0.AddArg2(x, y)
24725				b.resetWithControl(BlockARM64LE, v0)
24726				return true
24727			}
24728			break
24729		}
24730		// match: (LE (CMPWconst [0] x:(ANDconst [c] y)) yes no)
24731		// cond: x.Uses == 1
24732		// result: (LE (TSTWconst [int32(c)] y) yes no)
24733		for b.Controls[0].Op == OpARM64CMPWconst {
24734			v_0 := b.Controls[0]
24735			if auxIntToInt32(v_0.AuxInt) != 0 {
24736				break
24737			}
24738			x := v_0.Args[0]
24739			if x.Op != OpARM64ANDconst {
24740				break
24741			}
24742			c := auxIntToInt64(x.AuxInt)
24743			y := x.Args[0]
24744			if !(x.Uses == 1) {
24745				break
24746			}
24747			v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
24748			v0.AuxInt = int32ToAuxInt(int32(c))
24749			v0.AddArg(y)
24750			b.resetWithControl(BlockARM64LE, v0)
24751			return true
24752		}
24753		// match: (LE (CMPconst [0] x:(ADDconst [c] y)) yes no)
24754		// cond: x.Uses == 1
24755		// result: (LEnoov (CMNconst [c] y) yes no)
24756		for b.Controls[0].Op == OpARM64CMPconst {
24757			v_0 := b.Controls[0]
24758			if auxIntToInt64(v_0.AuxInt) != 0 {
24759				break
24760			}
24761			x := v_0.Args[0]
24762			if x.Op != OpARM64ADDconst {
24763				break
24764			}
24765			c := auxIntToInt64(x.AuxInt)
24766			y := x.Args[0]
24767			if !(x.Uses == 1) {
24768				break
24769			}
24770			v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
24771			v0.AuxInt = int64ToAuxInt(c)
24772			v0.AddArg(y)
24773			b.resetWithControl(BlockARM64LEnoov, v0)
24774			return true
24775		}
24776		// match: (LE (CMPWconst [0] x:(ADDconst [c] y)) yes no)
24777		// cond: x.Uses == 1
24778		// result: (LEnoov (CMNWconst [int32(c)] y) yes no)
24779		for b.Controls[0].Op == OpARM64CMPWconst {
24780			v_0 := b.Controls[0]
24781			if auxIntToInt32(v_0.AuxInt) != 0 {
24782				break
24783			}
24784			x := v_0.Args[0]
24785			if x.Op != OpARM64ADDconst {
24786				break
24787			}
24788			c := auxIntToInt64(x.AuxInt)
24789			y := x.Args[0]
24790			if !(x.Uses == 1) {
24791				break
24792			}
24793			v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
24794			v0.AuxInt = int32ToAuxInt(int32(c))
24795			v0.AddArg(y)
24796			b.resetWithControl(BlockARM64LEnoov, v0)
24797			return true
24798		}
24799		// match: (LE (CMPconst [0] z:(ADD x y)) yes no)
24800		// cond: z.Uses == 1
24801		// result: (LEnoov (CMN x y) yes no)
24802		for b.Controls[0].Op == OpARM64CMPconst {
24803			v_0 := b.Controls[0]
24804			if auxIntToInt64(v_0.AuxInt) != 0 {
24805				break
24806			}
24807			z := v_0.Args[0]
24808			if z.Op != OpARM64ADD {
24809				break
24810			}
24811			_ = z.Args[1]
24812			z_0 := z.Args[0]
24813			z_1 := z.Args[1]
24814			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
24815				x := z_0
24816				y := z_1
24817				if !(z.Uses == 1) {
24818					continue
24819				}
24820				v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
24821				v0.AddArg2(x, y)
24822				b.resetWithControl(BlockARM64LEnoov, v0)
24823				return true
24824			}
24825			break
24826		}
24827		// match: (LE (CMPWconst [0] z:(ADD x y)) yes no)
24828		// cond: z.Uses == 1
24829		// result: (LEnoov (CMNW x y) yes no)
24830		for b.Controls[0].Op == OpARM64CMPWconst {
24831			v_0 := b.Controls[0]
24832			if auxIntToInt32(v_0.AuxInt) != 0 {
24833				break
24834			}
24835			z := v_0.Args[0]
24836			if z.Op != OpARM64ADD {
24837				break
24838			}
24839			_ = z.Args[1]
24840			z_0 := z.Args[0]
24841			z_1 := z.Args[1]
24842			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
24843				x := z_0
24844				y := z_1
24845				if !(z.Uses == 1) {
24846					continue
24847				}
24848				v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
24849				v0.AddArg2(x, y)
24850				b.resetWithControl(BlockARM64LEnoov, v0)
24851				return true
24852			}
24853			break
24854		}
24855		// match: (LE (CMPconst [0] z:(MADD a x y)) yes no)
24856		// cond: z.Uses==1
24857		// result: (LEnoov (CMN a (MUL <x.Type> x y)) yes no)
24858		for b.Controls[0].Op == OpARM64CMPconst {
24859			v_0 := b.Controls[0]
24860			if auxIntToInt64(v_0.AuxInt) != 0 {
24861				break
24862			}
24863			z := v_0.Args[0]
24864			if z.Op != OpARM64MADD {
24865				break
24866			}
24867			y := z.Args[2]
24868			a := z.Args[0]
24869			x := z.Args[1]
24870			if !(z.Uses == 1) {
24871				break
24872			}
24873			v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
24874			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
24875			v1.AddArg2(x, y)
24876			v0.AddArg2(a, v1)
24877			b.resetWithControl(BlockARM64LEnoov, v0)
24878			return true
24879		}
24880		// match: (LE (CMPconst [0] z:(MSUB a x y)) yes no)
24881		// cond: z.Uses==1
24882		// result: (LEnoov (CMP a (MUL <x.Type> x y)) yes no)
24883		for b.Controls[0].Op == OpARM64CMPconst {
24884			v_0 := b.Controls[0]
24885			if auxIntToInt64(v_0.AuxInt) != 0 {
24886				break
24887			}
24888			z := v_0.Args[0]
24889			if z.Op != OpARM64MSUB {
24890				break
24891			}
24892			y := z.Args[2]
24893			a := z.Args[0]
24894			x := z.Args[1]
24895			if !(z.Uses == 1) {
24896				break
24897			}
24898			v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
24899			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
24900			v1.AddArg2(x, y)
24901			v0.AddArg2(a, v1)
24902			b.resetWithControl(BlockARM64LEnoov, v0)
24903			return true
24904		}
24905		// match: (LE (CMPWconst [0] z:(MADDW a x y)) yes no)
24906		// cond: z.Uses==1
24907		// result: (LEnoov (CMNW a (MULW <x.Type> x y)) yes no)
24908		for b.Controls[0].Op == OpARM64CMPWconst {
24909			v_0 := b.Controls[0]
24910			if auxIntToInt32(v_0.AuxInt) != 0 {
24911				break
24912			}
24913			z := v_0.Args[0]
24914			if z.Op != OpARM64MADDW {
24915				break
24916			}
24917			y := z.Args[2]
24918			a := z.Args[0]
24919			x := z.Args[1]
24920			if !(z.Uses == 1) {
24921				break
24922			}
24923			v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
24924			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
24925			v1.AddArg2(x, y)
24926			v0.AddArg2(a, v1)
24927			b.resetWithControl(BlockARM64LEnoov, v0)
24928			return true
24929		}
24930		// match: (LE (CMPWconst [0] z:(MSUBW a x y)) yes no)
24931		// cond: z.Uses==1
24932		// result: (LEnoov (CMPW a (MULW <x.Type> x y)) yes no)
24933		for b.Controls[0].Op == OpARM64CMPWconst {
24934			v_0 := b.Controls[0]
24935			if auxIntToInt32(v_0.AuxInt) != 0 {
24936				break
24937			}
24938			z := v_0.Args[0]
24939			if z.Op != OpARM64MSUBW {
24940				break
24941			}
24942			y := z.Args[2]
24943			a := z.Args[0]
24944			x := z.Args[1]
24945			if !(z.Uses == 1) {
24946				break
24947			}
24948			v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
24949			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
24950			v1.AddArg2(x, y)
24951			v0.AddArg2(a, v1)
24952			b.resetWithControl(BlockARM64LEnoov, v0)
24953			return true
24954		}
24955		// match: (LE (FlagConstant [fc]) yes no)
24956		// cond: fc.le()
24957		// result: (First yes no)
24958		for b.Controls[0].Op == OpARM64FlagConstant {
24959			v_0 := b.Controls[0]
24960			fc := auxIntToFlagConstant(v_0.AuxInt)
24961			if !(fc.le()) {
24962				break
24963			}
24964			b.Reset(BlockFirst)
24965			return true
24966		}
24967		// match: (LE (FlagConstant [fc]) yes no)
24968		// cond: !fc.le()
24969		// result: (First no yes)
24970		for b.Controls[0].Op == OpARM64FlagConstant {
24971			v_0 := b.Controls[0]
24972			fc := auxIntToFlagConstant(v_0.AuxInt)
24973			if !(!fc.le()) {
24974				break
24975			}
24976			b.Reset(BlockFirst)
24977			b.swapSuccessors()
24978			return true
24979		}
24980		// match: (LE (InvertFlags cmp) yes no)
24981		// result: (GE cmp yes no)
24982		for b.Controls[0].Op == OpARM64InvertFlags {
24983			v_0 := b.Controls[0]
24984			cmp := v_0.Args[0]
24985			b.resetWithControl(BlockARM64GE, cmp)
24986			return true
24987		}
24988	case BlockARM64LEnoov:
24989		// match: (LEnoov (FlagConstant [fc]) yes no)
24990		// cond: fc.leNoov()
24991		// result: (First yes no)
24992		for b.Controls[0].Op == OpARM64FlagConstant {
24993			v_0 := b.Controls[0]
24994			fc := auxIntToFlagConstant(v_0.AuxInt)
24995			if !(fc.leNoov()) {
24996				break
24997			}
24998			b.Reset(BlockFirst)
24999			return true
25000		}
25001		// match: (LEnoov (FlagConstant [fc]) yes no)
25002		// cond: !fc.leNoov()
25003		// result: (First no yes)
25004		for b.Controls[0].Op == OpARM64FlagConstant {
25005			v_0 := b.Controls[0]
25006			fc := auxIntToFlagConstant(v_0.AuxInt)
25007			if !(!fc.leNoov()) {
25008				break
25009			}
25010			b.Reset(BlockFirst)
25011			b.swapSuccessors()
25012			return true
25013		}
25014		// match: (LEnoov (InvertFlags cmp) yes no)
25015		// result: (GEnoov cmp yes no)
25016		for b.Controls[0].Op == OpARM64InvertFlags {
25017			v_0 := b.Controls[0]
25018			cmp := v_0.Args[0]
25019			b.resetWithControl(BlockARM64GEnoov, cmp)
25020			return true
25021		}
25022	case BlockARM64LT:
25023		// match: (LT (CMPconst [0] z:(AND x y)) yes no)
25024		// cond: z.Uses == 1
25025		// result: (LT (TST x y) yes no)
25026		for b.Controls[0].Op == OpARM64CMPconst {
25027			v_0 := b.Controls[0]
25028			if auxIntToInt64(v_0.AuxInt) != 0 {
25029				break
25030			}
25031			z := v_0.Args[0]
25032			if z.Op != OpARM64AND {
25033				break
25034			}
25035			_ = z.Args[1]
25036			z_0 := z.Args[0]
25037			z_1 := z.Args[1]
25038			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
25039				x := z_0
25040				y := z_1
25041				if !(z.Uses == 1) {
25042					continue
25043				}
25044				v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
25045				v0.AddArg2(x, y)
25046				b.resetWithControl(BlockARM64LT, v0)
25047				return true
25048			}
25049			break
25050		}
25051		// match: (LT (CMPconst [0] x:(ANDconst [c] y)) yes no)
25052		// cond: x.Uses == 1
25053		// result: (LT (TSTconst [c] y) yes no)
25054		for b.Controls[0].Op == OpARM64CMPconst {
25055			v_0 := b.Controls[0]
25056			if auxIntToInt64(v_0.AuxInt) != 0 {
25057				break
25058			}
25059			x := v_0.Args[0]
25060			if x.Op != OpARM64ANDconst {
25061				break
25062			}
25063			c := auxIntToInt64(x.AuxInt)
25064			y := x.Args[0]
25065			if !(x.Uses == 1) {
25066				break
25067			}
25068			v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
25069			v0.AuxInt = int64ToAuxInt(c)
25070			v0.AddArg(y)
25071			b.resetWithControl(BlockARM64LT, v0)
25072			return true
25073		}
25074		// match: (LT (CMPWconst [0] z:(AND x y)) yes no)
25075		// cond: z.Uses == 1
25076		// result: (LT (TSTW x y) yes no)
25077		for b.Controls[0].Op == OpARM64CMPWconst {
25078			v_0 := b.Controls[0]
25079			if auxIntToInt32(v_0.AuxInt) != 0 {
25080				break
25081			}
25082			z := v_0.Args[0]
25083			if z.Op != OpARM64AND {
25084				break
25085			}
25086			_ = z.Args[1]
25087			z_0 := z.Args[0]
25088			z_1 := z.Args[1]
25089			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
25090				x := z_0
25091				y := z_1
25092				if !(z.Uses == 1) {
25093					continue
25094				}
25095				v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
25096				v0.AddArg2(x, y)
25097				b.resetWithControl(BlockARM64LT, v0)
25098				return true
25099			}
25100			break
25101		}
25102		// match: (LT (CMPWconst [0] x:(ANDconst [c] y)) yes no)
25103		// cond: x.Uses == 1
25104		// result: (LT (TSTWconst [int32(c)] y) yes no)
25105		for b.Controls[0].Op == OpARM64CMPWconst {
25106			v_0 := b.Controls[0]
25107			if auxIntToInt32(v_0.AuxInt) != 0 {
25108				break
25109			}
25110			x := v_0.Args[0]
25111			if x.Op != OpARM64ANDconst {
25112				break
25113			}
25114			c := auxIntToInt64(x.AuxInt)
25115			y := x.Args[0]
25116			if !(x.Uses == 1) {
25117				break
25118			}
25119			v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
25120			v0.AuxInt = int32ToAuxInt(int32(c))
25121			v0.AddArg(y)
25122			b.resetWithControl(BlockARM64LT, v0)
25123			return true
25124		}
25125		// match: (LT (CMPconst [0] x:(ADDconst [c] y)) yes no)
25126		// cond: x.Uses == 1
25127		// result: (LTnoov (CMNconst [c] y) yes no)
25128		for b.Controls[0].Op == OpARM64CMPconst {
25129			v_0 := b.Controls[0]
25130			if auxIntToInt64(v_0.AuxInt) != 0 {
25131				break
25132			}
25133			x := v_0.Args[0]
25134			if x.Op != OpARM64ADDconst {
25135				break
25136			}
25137			c := auxIntToInt64(x.AuxInt)
25138			y := x.Args[0]
25139			if !(x.Uses == 1) {
25140				break
25141			}
25142			v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
25143			v0.AuxInt = int64ToAuxInt(c)
25144			v0.AddArg(y)
25145			b.resetWithControl(BlockARM64LTnoov, v0)
25146			return true
25147		}
25148		// match: (LT (CMPWconst [0] x:(ADDconst [c] y)) yes no)
25149		// cond: x.Uses == 1
25150		// result: (LTnoov (CMNWconst [int32(c)] y) yes no)
25151		for b.Controls[0].Op == OpARM64CMPWconst {
25152			v_0 := b.Controls[0]
25153			if auxIntToInt32(v_0.AuxInt) != 0 {
25154				break
25155			}
25156			x := v_0.Args[0]
25157			if x.Op != OpARM64ADDconst {
25158				break
25159			}
25160			c := auxIntToInt64(x.AuxInt)
25161			y := x.Args[0]
25162			if !(x.Uses == 1) {
25163				break
25164			}
25165			v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
25166			v0.AuxInt = int32ToAuxInt(int32(c))
25167			v0.AddArg(y)
25168			b.resetWithControl(BlockARM64LTnoov, v0)
25169			return true
25170		}
25171		// match: (LT (CMPconst [0] z:(ADD x y)) yes no)
25172		// cond: z.Uses == 1
25173		// result: (LTnoov (CMN x y) yes no)
25174		for b.Controls[0].Op == OpARM64CMPconst {
25175			v_0 := b.Controls[0]
25176			if auxIntToInt64(v_0.AuxInt) != 0 {
25177				break
25178			}
25179			z := v_0.Args[0]
25180			if z.Op != OpARM64ADD {
25181				break
25182			}
25183			_ = z.Args[1]
25184			z_0 := z.Args[0]
25185			z_1 := z.Args[1]
25186			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
25187				x := z_0
25188				y := z_1
25189				if !(z.Uses == 1) {
25190					continue
25191				}
25192				v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
25193				v0.AddArg2(x, y)
25194				b.resetWithControl(BlockARM64LTnoov, v0)
25195				return true
25196			}
25197			break
25198		}
25199		// match: (LT (CMPWconst [0] z:(ADD x y)) yes no)
25200		// cond: z.Uses == 1
25201		// result: (LTnoov (CMNW x y) yes no)
25202		for b.Controls[0].Op == OpARM64CMPWconst {
25203			v_0 := b.Controls[0]
25204			if auxIntToInt32(v_0.AuxInt) != 0 {
25205				break
25206			}
25207			z := v_0.Args[0]
25208			if z.Op != OpARM64ADD {
25209				break
25210			}
25211			_ = z.Args[1]
25212			z_0 := z.Args[0]
25213			z_1 := z.Args[1]
25214			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
25215				x := z_0
25216				y := z_1
25217				if !(z.Uses == 1) {
25218					continue
25219				}
25220				v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
25221				v0.AddArg2(x, y)
25222				b.resetWithControl(BlockARM64LTnoov, v0)
25223				return true
25224			}
25225			break
25226		}
25227		// match: (LT (CMPconst [0] z:(MADD a x y)) yes no)
25228		// cond: z.Uses==1
25229		// result: (LTnoov (CMN a (MUL <x.Type> x y)) yes no)
25230		for b.Controls[0].Op == OpARM64CMPconst {
25231			v_0 := b.Controls[0]
25232			if auxIntToInt64(v_0.AuxInt) != 0 {
25233				break
25234			}
25235			z := v_0.Args[0]
25236			if z.Op != OpARM64MADD {
25237				break
25238			}
25239			y := z.Args[2]
25240			a := z.Args[0]
25241			x := z.Args[1]
25242			if !(z.Uses == 1) {
25243				break
25244			}
25245			v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
25246			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
25247			v1.AddArg2(x, y)
25248			v0.AddArg2(a, v1)
25249			b.resetWithControl(BlockARM64LTnoov, v0)
25250			return true
25251		}
25252		// match: (LT (CMPconst [0] z:(MSUB a x y)) yes no)
25253		// cond: z.Uses==1
25254		// result: (LTnoov (CMP a (MUL <x.Type> x y)) yes no)
25255		for b.Controls[0].Op == OpARM64CMPconst {
25256			v_0 := b.Controls[0]
25257			if auxIntToInt64(v_0.AuxInt) != 0 {
25258				break
25259			}
25260			z := v_0.Args[0]
25261			if z.Op != OpARM64MSUB {
25262				break
25263			}
25264			y := z.Args[2]
25265			a := z.Args[0]
25266			x := z.Args[1]
25267			if !(z.Uses == 1) {
25268				break
25269			}
25270			v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
25271			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
25272			v1.AddArg2(x, y)
25273			v0.AddArg2(a, v1)
25274			b.resetWithControl(BlockARM64LTnoov, v0)
25275			return true
25276		}
25277		// match: (LT (CMPWconst [0] z:(MADDW a x y)) yes no)
25278		// cond: z.Uses==1
25279		// result: (LTnoov (CMNW a (MULW <x.Type> x y)) yes no)
25280		for b.Controls[0].Op == OpARM64CMPWconst {
25281			v_0 := b.Controls[0]
25282			if auxIntToInt32(v_0.AuxInt) != 0 {
25283				break
25284			}
25285			z := v_0.Args[0]
25286			if z.Op != OpARM64MADDW {
25287				break
25288			}
25289			y := z.Args[2]
25290			a := z.Args[0]
25291			x := z.Args[1]
25292			if !(z.Uses == 1) {
25293				break
25294			}
25295			v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
25296			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
25297			v1.AddArg2(x, y)
25298			v0.AddArg2(a, v1)
25299			b.resetWithControl(BlockARM64LTnoov, v0)
25300			return true
25301		}
25302		// match: (LT (CMPWconst [0] z:(MSUBW a x y)) yes no)
25303		// cond: z.Uses==1
25304		// result: (LTnoov (CMPW a (MULW <x.Type> x y)) yes no)
25305		for b.Controls[0].Op == OpARM64CMPWconst {
25306			v_0 := b.Controls[0]
25307			if auxIntToInt32(v_0.AuxInt) != 0 {
25308				break
25309			}
25310			z := v_0.Args[0]
25311			if z.Op != OpARM64MSUBW {
25312				break
25313			}
25314			y := z.Args[2]
25315			a := z.Args[0]
25316			x := z.Args[1]
25317			if !(z.Uses == 1) {
25318				break
25319			}
25320			v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
25321			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
25322			v1.AddArg2(x, y)
25323			v0.AddArg2(a, v1)
25324			b.resetWithControl(BlockARM64LTnoov, v0)
25325			return true
25326		}
25327		// match: (LT (CMPWconst [0] x) yes no)
25328		// result: (TBNZ [31] x yes no)
25329		for b.Controls[0].Op == OpARM64CMPWconst {
25330			v_0 := b.Controls[0]
25331			if auxIntToInt32(v_0.AuxInt) != 0 {
25332				break
25333			}
25334			x := v_0.Args[0]
25335			b.resetWithControl(BlockARM64TBNZ, x)
25336			b.AuxInt = int64ToAuxInt(31)
25337			return true
25338		}
25339		// match: (LT (CMPconst [0] x) yes no)
25340		// result: (TBNZ [63] x yes no)
25341		for b.Controls[0].Op == OpARM64CMPconst {
25342			v_0 := b.Controls[0]
25343			if auxIntToInt64(v_0.AuxInt) != 0 {
25344				break
25345			}
25346			x := v_0.Args[0]
25347			b.resetWithControl(BlockARM64TBNZ, x)
25348			b.AuxInt = int64ToAuxInt(63)
25349			return true
25350		}
25351		// match: (LT (FlagConstant [fc]) yes no)
25352		// cond: fc.lt()
25353		// result: (First yes no)
25354		for b.Controls[0].Op == OpARM64FlagConstant {
25355			v_0 := b.Controls[0]
25356			fc := auxIntToFlagConstant(v_0.AuxInt)
25357			if !(fc.lt()) {
25358				break
25359			}
25360			b.Reset(BlockFirst)
25361			return true
25362		}
25363		// match: (LT (FlagConstant [fc]) yes no)
25364		// cond: !fc.lt()
25365		// result: (First no yes)
25366		for b.Controls[0].Op == OpARM64FlagConstant {
25367			v_0 := b.Controls[0]
25368			fc := auxIntToFlagConstant(v_0.AuxInt)
25369			if !(!fc.lt()) {
25370				break
25371			}
25372			b.Reset(BlockFirst)
25373			b.swapSuccessors()
25374			return true
25375		}
25376		// match: (LT (InvertFlags cmp) yes no)
25377		// result: (GT cmp yes no)
25378		for b.Controls[0].Op == OpARM64InvertFlags {
25379			v_0 := b.Controls[0]
25380			cmp := v_0.Args[0]
25381			b.resetWithControl(BlockARM64GT, cmp)
25382			return true
25383		}
25384	case BlockARM64LTnoov:
25385		// match: (LTnoov (FlagConstant [fc]) yes no)
25386		// cond: fc.ltNoov()
25387		// result: (First yes no)
25388		for b.Controls[0].Op == OpARM64FlagConstant {
25389			v_0 := b.Controls[0]
25390			fc := auxIntToFlagConstant(v_0.AuxInt)
25391			if !(fc.ltNoov()) {
25392				break
25393			}
25394			b.Reset(BlockFirst)
25395			return true
25396		}
25397		// match: (LTnoov (FlagConstant [fc]) yes no)
25398		// cond: !fc.ltNoov()
25399		// result: (First no yes)
25400		for b.Controls[0].Op == OpARM64FlagConstant {
25401			v_0 := b.Controls[0]
25402			fc := auxIntToFlagConstant(v_0.AuxInt)
25403			if !(!fc.ltNoov()) {
25404				break
25405			}
25406			b.Reset(BlockFirst)
25407			b.swapSuccessors()
25408			return true
25409		}
25410		// match: (LTnoov (InvertFlags cmp) yes no)
25411		// result: (GTnoov cmp yes no)
25412		for b.Controls[0].Op == OpARM64InvertFlags {
25413			v_0 := b.Controls[0]
25414			cmp := v_0.Args[0]
25415			b.resetWithControl(BlockARM64GTnoov, cmp)
25416			return true
25417		}
25418	case BlockARM64NE:
25419		// match: (NE (CMPconst [0] z:(AND x y)) yes no)
25420		// cond: z.Uses == 1
25421		// result: (NE (TST x y) yes no)
25422		for b.Controls[0].Op == OpARM64CMPconst {
25423			v_0 := b.Controls[0]
25424			if auxIntToInt64(v_0.AuxInt) != 0 {
25425				break
25426			}
25427			z := v_0.Args[0]
25428			if z.Op != OpARM64AND {
25429				break
25430			}
25431			_ = z.Args[1]
25432			z_0 := z.Args[0]
25433			z_1 := z.Args[1]
25434			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
25435				x := z_0
25436				y := z_1
25437				if !(z.Uses == 1) {
25438					continue
25439				}
25440				v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
25441				v0.AddArg2(x, y)
25442				b.resetWithControl(BlockARM64NE, v0)
25443				return true
25444			}
25445			break
25446		}
25447		// match: (NE (CMPconst [0] x:(ANDconst [c] y)) yes no)
25448		// cond: x.Uses == 1
25449		// result: (NE (TSTconst [c] y) yes no)
25450		for b.Controls[0].Op == OpARM64CMPconst {
25451			v_0 := b.Controls[0]
25452			if auxIntToInt64(v_0.AuxInt) != 0 {
25453				break
25454			}
25455			x := v_0.Args[0]
25456			if x.Op != OpARM64ANDconst {
25457				break
25458			}
25459			c := auxIntToInt64(x.AuxInt)
25460			y := x.Args[0]
25461			if !(x.Uses == 1) {
25462				break
25463			}
25464			v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
25465			v0.AuxInt = int64ToAuxInt(c)
25466			v0.AddArg(y)
25467			b.resetWithControl(BlockARM64NE, v0)
25468			return true
25469		}
25470		// match: (NE (CMPWconst [0] z:(AND x y)) yes no)
25471		// cond: z.Uses == 1
25472		// result: (NE (TSTW x y) yes no)
25473		for b.Controls[0].Op == OpARM64CMPWconst {
25474			v_0 := b.Controls[0]
25475			if auxIntToInt32(v_0.AuxInt) != 0 {
25476				break
25477			}
25478			z := v_0.Args[0]
25479			if z.Op != OpARM64AND {
25480				break
25481			}
25482			_ = z.Args[1]
25483			z_0 := z.Args[0]
25484			z_1 := z.Args[1]
25485			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
25486				x := z_0
25487				y := z_1
25488				if !(z.Uses == 1) {
25489					continue
25490				}
25491				v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
25492				v0.AddArg2(x, y)
25493				b.resetWithControl(BlockARM64NE, v0)
25494				return true
25495			}
25496			break
25497		}
25498		// match: (NE (CMPWconst [0] x:(ANDconst [c] y)) yes no)
25499		// cond: x.Uses == 1
25500		// result: (NE (TSTWconst [int32(c)] y) yes no)
25501		for b.Controls[0].Op == OpARM64CMPWconst {
25502			v_0 := b.Controls[0]
25503			if auxIntToInt32(v_0.AuxInt) != 0 {
25504				break
25505			}
25506			x := v_0.Args[0]
25507			if x.Op != OpARM64ANDconst {
25508				break
25509			}
25510			c := auxIntToInt64(x.AuxInt)
25511			y := x.Args[0]
25512			if !(x.Uses == 1) {
25513				break
25514			}
25515			v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
25516			v0.AuxInt = int32ToAuxInt(int32(c))
25517			v0.AddArg(y)
25518			b.resetWithControl(BlockARM64NE, v0)
25519			return true
25520		}
25521		// match: (NE (CMPconst [0] x:(ADDconst [c] y)) yes no)
25522		// cond: x.Uses == 1
25523		// result: (NE (CMNconst [c] y) yes no)
25524		for b.Controls[0].Op == OpARM64CMPconst {
25525			v_0 := b.Controls[0]
25526			if auxIntToInt64(v_0.AuxInt) != 0 {
25527				break
25528			}
25529			x := v_0.Args[0]
25530			if x.Op != OpARM64ADDconst {
25531				break
25532			}
25533			c := auxIntToInt64(x.AuxInt)
25534			y := x.Args[0]
25535			if !(x.Uses == 1) {
25536				break
25537			}
25538			v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
25539			v0.AuxInt = int64ToAuxInt(c)
25540			v0.AddArg(y)
25541			b.resetWithControl(BlockARM64NE, v0)
25542			return true
25543		}
25544		// match: (NE (CMPWconst [0] x:(ADDconst [c] y)) yes no)
25545		// cond: x.Uses == 1
25546		// result: (NE (CMNWconst [int32(c)] y) yes no)
25547		for b.Controls[0].Op == OpARM64CMPWconst {
25548			v_0 := b.Controls[0]
25549			if auxIntToInt32(v_0.AuxInt) != 0 {
25550				break
25551			}
25552			x := v_0.Args[0]
25553			if x.Op != OpARM64ADDconst {
25554				break
25555			}
25556			c := auxIntToInt64(x.AuxInt)
25557			y := x.Args[0]
25558			if !(x.Uses == 1) {
25559				break
25560			}
25561			v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
25562			v0.AuxInt = int32ToAuxInt(int32(c))
25563			v0.AddArg(y)
25564			b.resetWithControl(BlockARM64NE, v0)
25565			return true
25566		}
25567		// match: (NE (CMPconst [0] z:(ADD x y)) yes no)
25568		// cond: z.Uses == 1
25569		// result: (NE (CMN x y) yes no)
25570		for b.Controls[0].Op == OpARM64CMPconst {
25571			v_0 := b.Controls[0]
25572			if auxIntToInt64(v_0.AuxInt) != 0 {
25573				break
25574			}
25575			z := v_0.Args[0]
25576			if z.Op != OpARM64ADD {
25577				break
25578			}
25579			_ = z.Args[1]
25580			z_0 := z.Args[0]
25581			z_1 := z.Args[1]
25582			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
25583				x := z_0
25584				y := z_1
25585				if !(z.Uses == 1) {
25586					continue
25587				}
25588				v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
25589				v0.AddArg2(x, y)
25590				b.resetWithControl(BlockARM64NE, v0)
25591				return true
25592			}
25593			break
25594		}
25595		// match: (NE (CMPWconst [0] z:(ADD x y)) yes no)
25596		// cond: z.Uses == 1
25597		// result: (NE (CMNW x y) yes no)
25598		for b.Controls[0].Op == OpARM64CMPWconst {
25599			v_0 := b.Controls[0]
25600			if auxIntToInt32(v_0.AuxInt) != 0 {
25601				break
25602			}
25603			z := v_0.Args[0]
25604			if z.Op != OpARM64ADD {
25605				break
25606			}
25607			_ = z.Args[1]
25608			z_0 := z.Args[0]
25609			z_1 := z.Args[1]
25610			for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
25611				x := z_0
25612				y := z_1
25613				if !(z.Uses == 1) {
25614					continue
25615				}
25616				v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
25617				v0.AddArg2(x, y)
25618				b.resetWithControl(BlockARM64NE, v0)
25619				return true
25620			}
25621			break
25622		}
25623		// match: (NE (CMP x z:(NEG y)) yes no)
25624		// cond: z.Uses == 1
25625		// result: (NE (CMN x y) yes no)
25626		for b.Controls[0].Op == OpARM64CMP {
25627			v_0 := b.Controls[0]
25628			_ = v_0.Args[1]
25629			x := v_0.Args[0]
25630			z := v_0.Args[1]
25631			if z.Op != OpARM64NEG {
25632				break
25633			}
25634			y := z.Args[0]
25635			if !(z.Uses == 1) {
25636				break
25637			}
25638			v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
25639			v0.AddArg2(x, y)
25640			b.resetWithControl(BlockARM64NE, v0)
25641			return true
25642		}
25643		// match: (NE (CMPW x z:(NEG y)) yes no)
25644		// cond: z.Uses == 1
25645		// result: (NE (CMNW x y) yes no)
25646		for b.Controls[0].Op == OpARM64CMPW {
25647			v_0 := b.Controls[0]
25648			_ = v_0.Args[1]
25649			x := v_0.Args[0]
25650			z := v_0.Args[1]
25651			if z.Op != OpARM64NEG {
25652				break
25653			}
25654			y := z.Args[0]
25655			if !(z.Uses == 1) {
25656				break
25657			}
25658			v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
25659			v0.AddArg2(x, y)
25660			b.resetWithControl(BlockARM64NE, v0)
25661			return true
25662		}
25663		// match: (NE (CMPconst [0] x) yes no)
25664		// result: (NZ x yes no)
25665		for b.Controls[0].Op == OpARM64CMPconst {
25666			v_0 := b.Controls[0]
25667			if auxIntToInt64(v_0.AuxInt) != 0 {
25668				break
25669			}
25670			x := v_0.Args[0]
25671			b.resetWithControl(BlockARM64NZ, x)
25672			return true
25673		}
25674		// match: (NE (CMPWconst [0] x) yes no)
25675		// result: (NZW x yes no)
25676		for b.Controls[0].Op == OpARM64CMPWconst {
25677			v_0 := b.Controls[0]
25678			if auxIntToInt32(v_0.AuxInt) != 0 {
25679				break
25680			}
25681			x := v_0.Args[0]
25682			b.resetWithControl(BlockARM64NZW, x)
25683			return true
25684		}
25685		// match: (NE (CMPconst [0] z:(MADD a x y)) yes no)
25686		// cond: z.Uses==1
25687		// result: (NE (CMN a (MUL <x.Type> x y)) yes no)
25688		for b.Controls[0].Op == OpARM64CMPconst {
25689			v_0 := b.Controls[0]
25690			if auxIntToInt64(v_0.AuxInt) != 0 {
25691				break
25692			}
25693			z := v_0.Args[0]
25694			if z.Op != OpARM64MADD {
25695				break
25696			}
25697			y := z.Args[2]
25698			a := z.Args[0]
25699			x := z.Args[1]
25700			if !(z.Uses == 1) {
25701				break
25702			}
25703			v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
25704			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
25705			v1.AddArg2(x, y)
25706			v0.AddArg2(a, v1)
25707			b.resetWithControl(BlockARM64NE, v0)
25708			return true
25709		}
25710		// match: (NE (CMPconst [0] z:(MSUB a x y)) yes no)
25711		// cond: z.Uses==1
25712		// result: (NE (CMP a (MUL <x.Type> x y)) yes no)
25713		for b.Controls[0].Op == OpARM64CMPconst {
25714			v_0 := b.Controls[0]
25715			if auxIntToInt64(v_0.AuxInt) != 0 {
25716				break
25717			}
25718			z := v_0.Args[0]
25719			if z.Op != OpARM64MSUB {
25720				break
25721			}
25722			y := z.Args[2]
25723			a := z.Args[0]
25724			x := z.Args[1]
25725			if !(z.Uses == 1) {
25726				break
25727			}
25728			v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
25729			v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
25730			v1.AddArg2(x, y)
25731			v0.AddArg2(a, v1)
25732			b.resetWithControl(BlockARM64NE, v0)
25733			return true
25734		}
25735		// match: (NE (CMPWconst [0] z:(MADDW a x y)) yes no)
25736		// cond: z.Uses==1
25737		// result: (NE (CMNW a (MULW <x.Type> x y)) yes no)
25738		for b.Controls[0].Op == OpARM64CMPWconst {
25739			v_0 := b.Controls[0]
25740			if auxIntToInt32(v_0.AuxInt) != 0 {
25741				break
25742			}
25743			z := v_0.Args[0]
25744			if z.Op != OpARM64MADDW {
25745				break
25746			}
25747			y := z.Args[2]
25748			a := z.Args[0]
25749			x := z.Args[1]
25750			if !(z.Uses == 1) {
25751				break
25752			}
25753			v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
25754			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
25755			v1.AddArg2(x, y)
25756			v0.AddArg2(a, v1)
25757			b.resetWithControl(BlockARM64NE, v0)
25758			return true
25759		}
25760		// match: (NE (CMPWconst [0] z:(MSUBW a x y)) yes no)
25761		// cond: z.Uses==1
25762		// result: (NE (CMPW a (MULW <x.Type> x y)) yes no)
25763		for b.Controls[0].Op == OpARM64CMPWconst {
25764			v_0 := b.Controls[0]
25765			if auxIntToInt32(v_0.AuxInt) != 0 {
25766				break
25767			}
25768			z := v_0.Args[0]
25769			if z.Op != OpARM64MSUBW {
25770				break
25771			}
25772			y := z.Args[2]
25773			a := z.Args[0]
25774			x := z.Args[1]
25775			if !(z.Uses == 1) {
25776				break
25777			}
25778			v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
25779			v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
25780			v1.AddArg2(x, y)
25781			v0.AddArg2(a, v1)
25782			b.resetWithControl(BlockARM64NE, v0)
25783			return true
25784		}
25785		// match: (NE (TSTconst [c] x) yes no)
25786		// cond: oneBit(c)
25787		// result: (TBNZ [int64(ntz64(c))] x yes no)
25788		for b.Controls[0].Op == OpARM64TSTconst {
25789			v_0 := b.Controls[0]
25790			c := auxIntToInt64(v_0.AuxInt)
25791			x := v_0.Args[0]
25792			if !(oneBit(c)) {
25793				break
25794			}
25795			b.resetWithControl(BlockARM64TBNZ, x)
25796			b.AuxInt = int64ToAuxInt(int64(ntz64(c)))
25797			return true
25798		}
25799		// match: (NE (TSTWconst [c] x) yes no)
25800		// cond: oneBit(int64(uint32(c)))
25801		// result: (TBNZ [int64(ntz64(int64(uint32(c))))] x yes no)
25802		for b.Controls[0].Op == OpARM64TSTWconst {
25803			v_0 := b.Controls[0]
25804			c := auxIntToInt32(v_0.AuxInt)
25805			x := v_0.Args[0]
25806			if !(oneBit(int64(uint32(c)))) {
25807				break
25808			}
25809			b.resetWithControl(BlockARM64TBNZ, x)
25810			b.AuxInt = int64ToAuxInt(int64(ntz64(int64(uint32(c)))))
25811			return true
25812		}
25813		// match: (NE (FlagConstant [fc]) yes no)
25814		// cond: fc.ne()
25815		// result: (First yes no)
25816		for b.Controls[0].Op == OpARM64FlagConstant {
25817			v_0 := b.Controls[0]
25818			fc := auxIntToFlagConstant(v_0.AuxInt)
25819			if !(fc.ne()) {
25820				break
25821			}
25822			b.Reset(BlockFirst)
25823			return true
25824		}
25825		// match: (NE (FlagConstant [fc]) yes no)
25826		// cond: !fc.ne()
25827		// result: (First no yes)
25828		for b.Controls[0].Op == OpARM64FlagConstant {
25829			v_0 := b.Controls[0]
25830			fc := auxIntToFlagConstant(v_0.AuxInt)
25831			if !(!fc.ne()) {
25832				break
25833			}
25834			b.Reset(BlockFirst)
25835			b.swapSuccessors()
25836			return true
25837		}
25838		// match: (NE (InvertFlags cmp) yes no)
25839		// result: (NE cmp yes no)
25840		for b.Controls[0].Op == OpARM64InvertFlags {
25841			v_0 := b.Controls[0]
25842			cmp := v_0.Args[0]
25843			b.resetWithControl(BlockARM64NE, cmp)
25844			return true
25845		}
25846	case BlockARM64NZ:
25847		// match: (NZ (Equal cc) yes no)
25848		// result: (EQ cc yes no)
25849		for b.Controls[0].Op == OpARM64Equal {
25850			v_0 := b.Controls[0]
25851			cc := v_0.Args[0]
25852			b.resetWithControl(BlockARM64EQ, cc)
25853			return true
25854		}
25855		// match: (NZ (NotEqual cc) yes no)
25856		// result: (NE cc yes no)
25857		for b.Controls[0].Op == OpARM64NotEqual {
25858			v_0 := b.Controls[0]
25859			cc := v_0.Args[0]
25860			b.resetWithControl(BlockARM64NE, cc)
25861			return true
25862		}
25863		// match: (NZ (LessThan cc) yes no)
25864		// result: (LT cc yes no)
25865		for b.Controls[0].Op == OpARM64LessThan {
25866			v_0 := b.Controls[0]
25867			cc := v_0.Args[0]
25868			b.resetWithControl(BlockARM64LT, cc)
25869			return true
25870		}
25871		// match: (NZ (LessThanU cc) yes no)
25872		// result: (ULT cc yes no)
25873		for b.Controls[0].Op == OpARM64LessThanU {
25874			v_0 := b.Controls[0]
25875			cc := v_0.Args[0]
25876			b.resetWithControl(BlockARM64ULT, cc)
25877			return true
25878		}
25879		// match: (NZ (LessEqual cc) yes no)
25880		// result: (LE cc yes no)
25881		for b.Controls[0].Op == OpARM64LessEqual {
25882			v_0 := b.Controls[0]
25883			cc := v_0.Args[0]
25884			b.resetWithControl(BlockARM64LE, cc)
25885			return true
25886		}
25887		// match: (NZ (LessEqualU cc) yes no)
25888		// result: (ULE cc yes no)
25889		for b.Controls[0].Op == OpARM64LessEqualU {
25890			v_0 := b.Controls[0]
25891			cc := v_0.Args[0]
25892			b.resetWithControl(BlockARM64ULE, cc)
25893			return true
25894		}
25895		// match: (NZ (GreaterThan cc) yes no)
25896		// result: (GT cc yes no)
25897		for b.Controls[0].Op == OpARM64GreaterThan {
25898			v_0 := b.Controls[0]
25899			cc := v_0.Args[0]
25900			b.resetWithControl(BlockARM64GT, cc)
25901			return true
25902		}
25903		// match: (NZ (GreaterThanU cc) yes no)
25904		// result: (UGT cc yes no)
25905		for b.Controls[0].Op == OpARM64GreaterThanU {
25906			v_0 := b.Controls[0]
25907			cc := v_0.Args[0]
25908			b.resetWithControl(BlockARM64UGT, cc)
25909			return true
25910		}
25911		// match: (NZ (GreaterEqual cc) yes no)
25912		// result: (GE cc yes no)
25913		for b.Controls[0].Op == OpARM64GreaterEqual {
25914			v_0 := b.Controls[0]
25915			cc := v_0.Args[0]
25916			b.resetWithControl(BlockARM64GE, cc)
25917			return true
25918		}
25919		// match: (NZ (GreaterEqualU cc) yes no)
25920		// result: (UGE cc yes no)
25921		for b.Controls[0].Op == OpARM64GreaterEqualU {
25922			v_0 := b.Controls[0]
25923			cc := v_0.Args[0]
25924			b.resetWithControl(BlockARM64UGE, cc)
25925			return true
25926		}
25927		// match: (NZ (LessThanF cc) yes no)
25928		// result: (FLT cc yes no)
25929		for b.Controls[0].Op == OpARM64LessThanF {
25930			v_0 := b.Controls[0]
25931			cc := v_0.Args[0]
25932			b.resetWithControl(BlockARM64FLT, cc)
25933			return true
25934		}
25935		// match: (NZ (LessEqualF cc) yes no)
25936		// result: (FLE cc yes no)
25937		for b.Controls[0].Op == OpARM64LessEqualF {
25938			v_0 := b.Controls[0]
25939			cc := v_0.Args[0]
25940			b.resetWithControl(BlockARM64FLE, cc)
25941			return true
25942		}
25943		// match: (NZ (GreaterThanF cc) yes no)
25944		// result: (FGT cc yes no)
25945		for b.Controls[0].Op == OpARM64GreaterThanF {
25946			v_0 := b.Controls[0]
25947			cc := v_0.Args[0]
25948			b.resetWithControl(BlockARM64FGT, cc)
25949			return true
25950		}
25951		// match: (NZ (GreaterEqualF cc) yes no)
25952		// result: (FGE cc yes no)
25953		for b.Controls[0].Op == OpARM64GreaterEqualF {
25954			v_0 := b.Controls[0]
25955			cc := v_0.Args[0]
25956			b.resetWithControl(BlockARM64FGE, cc)
25957			return true
25958		}
25959		// match: (NZ (ANDconst [c] x) yes no)
25960		// cond: oneBit(c)
25961		// result: (TBNZ [int64(ntz64(c))] x yes no)
25962		for b.Controls[0].Op == OpARM64ANDconst {
25963			v_0 := b.Controls[0]
25964			c := auxIntToInt64(v_0.AuxInt)
25965			x := v_0.Args[0]
25966			if !(oneBit(c)) {
25967				break
25968			}
25969			b.resetWithControl(BlockARM64TBNZ, x)
25970			b.AuxInt = int64ToAuxInt(int64(ntz64(c)))
25971			return true
25972		}
25973		// match: (NZ (MOVDconst [0]) yes no)
25974		// result: (First no yes)
25975		for b.Controls[0].Op == OpARM64MOVDconst {
25976			v_0 := b.Controls[0]
25977			if auxIntToInt64(v_0.AuxInt) != 0 {
25978				break
25979			}
25980			b.Reset(BlockFirst)
25981			b.swapSuccessors()
25982			return true
25983		}
25984		// match: (NZ (MOVDconst [c]) yes no)
25985		// cond: c != 0
25986		// result: (First yes no)
25987		for b.Controls[0].Op == OpARM64MOVDconst {
25988			v_0 := b.Controls[0]
25989			c := auxIntToInt64(v_0.AuxInt)
25990			if !(c != 0) {
25991				break
25992			}
25993			b.Reset(BlockFirst)
25994			return true
25995		}
25996	case BlockARM64NZW:
25997		// match: (NZW (ANDconst [c] x) yes no)
25998		// cond: oneBit(int64(uint32(c)))
25999		// result: (TBNZ [int64(ntz64(int64(uint32(c))))] x yes no)
26000		for b.Controls[0].Op == OpARM64ANDconst {
26001			v_0 := b.Controls[0]
26002			c := auxIntToInt64(v_0.AuxInt)
26003			x := v_0.Args[0]
26004			if !(oneBit(int64(uint32(c)))) {
26005				break
26006			}
26007			b.resetWithControl(BlockARM64TBNZ, x)
26008			b.AuxInt = int64ToAuxInt(int64(ntz64(int64(uint32(c)))))
26009			return true
26010		}
26011		// match: (NZW (MOVDconst [c]) yes no)
26012		// cond: int32(c) == 0
26013		// result: (First no yes)
26014		for b.Controls[0].Op == OpARM64MOVDconst {
26015			v_0 := b.Controls[0]
26016			c := auxIntToInt64(v_0.AuxInt)
26017			if !(int32(c) == 0) {
26018				break
26019			}
26020			b.Reset(BlockFirst)
26021			b.swapSuccessors()
26022			return true
26023		}
26024		// match: (NZW (MOVDconst [c]) yes no)
26025		// cond: int32(c) != 0
26026		// result: (First yes no)
26027		for b.Controls[0].Op == OpARM64MOVDconst {
26028			v_0 := b.Controls[0]
26029			c := auxIntToInt64(v_0.AuxInt)
26030			if !(int32(c) != 0) {
26031				break
26032			}
26033			b.Reset(BlockFirst)
26034			return true
26035		}
26036	case BlockARM64TBNZ:
26037		// match: (TBNZ [0] (Equal cc) yes no)
26038		// result: (EQ cc yes no)
26039		for b.Controls[0].Op == OpARM64Equal {
26040			v_0 := b.Controls[0]
26041			cc := v_0.Args[0]
26042			if auxIntToInt64(b.AuxInt) != 0 {
26043				break
26044			}
26045			b.resetWithControl(BlockARM64EQ, cc)
26046			return true
26047		}
26048		// match: (TBNZ [0] (NotEqual cc) yes no)
26049		// result: (NE cc yes no)
26050		for b.Controls[0].Op == OpARM64NotEqual {
26051			v_0 := b.Controls[0]
26052			cc := v_0.Args[0]
26053			if auxIntToInt64(b.AuxInt) != 0 {
26054				break
26055			}
26056			b.resetWithControl(BlockARM64NE, cc)
26057			return true
26058		}
26059		// match: (TBNZ [0] (LessThan cc) yes no)
26060		// result: (LT cc yes no)
26061		for b.Controls[0].Op == OpARM64LessThan {
26062			v_0 := b.Controls[0]
26063			cc := v_0.Args[0]
26064			if auxIntToInt64(b.AuxInt) != 0 {
26065				break
26066			}
26067			b.resetWithControl(BlockARM64LT, cc)
26068			return true
26069		}
26070		// match: (TBNZ [0] (LessThanU cc) yes no)
26071		// result: (ULT cc yes no)
26072		for b.Controls[0].Op == OpARM64LessThanU {
26073			v_0 := b.Controls[0]
26074			cc := v_0.Args[0]
26075			if auxIntToInt64(b.AuxInt) != 0 {
26076				break
26077			}
26078			b.resetWithControl(BlockARM64ULT, cc)
26079			return true
26080		}
26081		// match: (TBNZ [0] (LessEqual cc) yes no)
26082		// result: (LE cc yes no)
26083		for b.Controls[0].Op == OpARM64LessEqual {
26084			v_0 := b.Controls[0]
26085			cc := v_0.Args[0]
26086			if auxIntToInt64(b.AuxInt) != 0 {
26087				break
26088			}
26089			b.resetWithControl(BlockARM64LE, cc)
26090			return true
26091		}
26092		// match: (TBNZ [0] (LessEqualU cc) yes no)
26093		// result: (ULE cc yes no)
26094		for b.Controls[0].Op == OpARM64LessEqualU {
26095			v_0 := b.Controls[0]
26096			cc := v_0.Args[0]
26097			if auxIntToInt64(b.AuxInt) != 0 {
26098				break
26099			}
26100			b.resetWithControl(BlockARM64ULE, cc)
26101			return true
26102		}
26103		// match: (TBNZ [0] (GreaterThan cc) yes no)
26104		// result: (GT cc yes no)
26105		for b.Controls[0].Op == OpARM64GreaterThan {
26106			v_0 := b.Controls[0]
26107			cc := v_0.Args[0]
26108			if auxIntToInt64(b.AuxInt) != 0 {
26109				break
26110			}
26111			b.resetWithControl(BlockARM64GT, cc)
26112			return true
26113		}
26114		// match: (TBNZ [0] (GreaterThanU cc) yes no)
26115		// result: (UGT cc yes no)
26116		for b.Controls[0].Op == OpARM64GreaterThanU {
26117			v_0 := b.Controls[0]
26118			cc := v_0.Args[0]
26119			if auxIntToInt64(b.AuxInt) != 0 {
26120				break
26121			}
26122			b.resetWithControl(BlockARM64UGT, cc)
26123			return true
26124		}
26125		// match: (TBNZ [0] (GreaterEqual cc) yes no)
26126		// result: (GE cc yes no)
26127		for b.Controls[0].Op == OpARM64GreaterEqual {
26128			v_0 := b.Controls[0]
26129			cc := v_0.Args[0]
26130			if auxIntToInt64(b.AuxInt) != 0 {
26131				break
26132			}
26133			b.resetWithControl(BlockARM64GE, cc)
26134			return true
26135		}
26136		// match: (TBNZ [0] (GreaterEqualU cc) yes no)
26137		// result: (UGE cc yes no)
26138		for b.Controls[0].Op == OpARM64GreaterEqualU {
26139			v_0 := b.Controls[0]
26140			cc := v_0.Args[0]
26141			if auxIntToInt64(b.AuxInt) != 0 {
26142				break
26143			}
26144			b.resetWithControl(BlockARM64UGE, cc)
26145			return true
26146		}
26147		// match: (TBNZ [0] (LessThanF cc) yes no)
26148		// result: (FLT cc yes no)
26149		for b.Controls[0].Op == OpARM64LessThanF {
26150			v_0 := b.Controls[0]
26151			cc := v_0.Args[0]
26152			if auxIntToInt64(b.AuxInt) != 0 {
26153				break
26154			}
26155			b.resetWithControl(BlockARM64FLT, cc)
26156			return true
26157		}
26158		// match: (TBNZ [0] (LessEqualF cc) yes no)
26159		// result: (FLE cc yes no)
26160		for b.Controls[0].Op == OpARM64LessEqualF {
26161			v_0 := b.Controls[0]
26162			cc := v_0.Args[0]
26163			if auxIntToInt64(b.AuxInt) != 0 {
26164				break
26165			}
26166			b.resetWithControl(BlockARM64FLE, cc)
26167			return true
26168		}
26169		// match: (TBNZ [0] (GreaterThanF cc) yes no)
26170		// result: (FGT cc yes no)
26171		for b.Controls[0].Op == OpARM64GreaterThanF {
26172			v_0 := b.Controls[0]
26173			cc := v_0.Args[0]
26174			if auxIntToInt64(b.AuxInt) != 0 {
26175				break
26176			}
26177			b.resetWithControl(BlockARM64FGT, cc)
26178			return true
26179		}
26180		// match: (TBNZ [0] (GreaterEqualF cc) yes no)
26181		// result: (FGE cc yes no)
26182		for b.Controls[0].Op == OpARM64GreaterEqualF {
26183			v_0 := b.Controls[0]
26184			cc := v_0.Args[0]
26185			if auxIntToInt64(b.AuxInt) != 0 {
26186				break
26187			}
26188			b.resetWithControl(BlockARM64FGE, cc)
26189			return true
26190		}
26191	case BlockARM64UGE:
26192		// match: (UGE (FlagConstant [fc]) yes no)
26193		// cond: fc.uge()
26194		// result: (First yes no)
26195		for b.Controls[0].Op == OpARM64FlagConstant {
26196			v_0 := b.Controls[0]
26197			fc := auxIntToFlagConstant(v_0.AuxInt)
26198			if !(fc.uge()) {
26199				break
26200			}
26201			b.Reset(BlockFirst)
26202			return true
26203		}
26204		// match: (UGE (FlagConstant [fc]) yes no)
26205		// cond: !fc.uge()
26206		// result: (First no yes)
26207		for b.Controls[0].Op == OpARM64FlagConstant {
26208			v_0 := b.Controls[0]
26209			fc := auxIntToFlagConstant(v_0.AuxInt)
26210			if !(!fc.uge()) {
26211				break
26212			}
26213			b.Reset(BlockFirst)
26214			b.swapSuccessors()
26215			return true
26216		}
26217		// match: (UGE (InvertFlags cmp) yes no)
26218		// result: (ULE cmp yes no)
26219		for b.Controls[0].Op == OpARM64InvertFlags {
26220			v_0 := b.Controls[0]
26221			cmp := v_0.Args[0]
26222			b.resetWithControl(BlockARM64ULE, cmp)
26223			return true
26224		}
26225	case BlockARM64UGT:
26226		// match: (UGT (FlagConstant [fc]) yes no)
26227		// cond: fc.ugt()
26228		// result: (First yes no)
26229		for b.Controls[0].Op == OpARM64FlagConstant {
26230			v_0 := b.Controls[0]
26231			fc := auxIntToFlagConstant(v_0.AuxInt)
26232			if !(fc.ugt()) {
26233				break
26234			}
26235			b.Reset(BlockFirst)
26236			return true
26237		}
26238		// match: (UGT (FlagConstant [fc]) yes no)
26239		// cond: !fc.ugt()
26240		// result: (First no yes)
26241		for b.Controls[0].Op == OpARM64FlagConstant {
26242			v_0 := b.Controls[0]
26243			fc := auxIntToFlagConstant(v_0.AuxInt)
26244			if !(!fc.ugt()) {
26245				break
26246			}
26247			b.Reset(BlockFirst)
26248			b.swapSuccessors()
26249			return true
26250		}
26251		// match: (UGT (InvertFlags cmp) yes no)
26252		// result: (ULT cmp yes no)
26253		for b.Controls[0].Op == OpARM64InvertFlags {
26254			v_0 := b.Controls[0]
26255			cmp := v_0.Args[0]
26256			b.resetWithControl(BlockARM64ULT, cmp)
26257			return true
26258		}
26259	case BlockARM64ULE:
26260		// match: (ULE (FlagConstant [fc]) yes no)
26261		// cond: fc.ule()
26262		// result: (First yes no)
26263		for b.Controls[0].Op == OpARM64FlagConstant {
26264			v_0 := b.Controls[0]
26265			fc := auxIntToFlagConstant(v_0.AuxInt)
26266			if !(fc.ule()) {
26267				break
26268			}
26269			b.Reset(BlockFirst)
26270			return true
26271		}
26272		// match: (ULE (FlagConstant [fc]) yes no)
26273		// cond: !fc.ule()
26274		// result: (First no yes)
26275		for b.Controls[0].Op == OpARM64FlagConstant {
26276			v_0 := b.Controls[0]
26277			fc := auxIntToFlagConstant(v_0.AuxInt)
26278			if !(!fc.ule()) {
26279				break
26280			}
26281			b.Reset(BlockFirst)
26282			b.swapSuccessors()
26283			return true
26284		}
26285		// match: (ULE (InvertFlags cmp) yes no)
26286		// result: (UGE cmp yes no)
26287		for b.Controls[0].Op == OpARM64InvertFlags {
26288			v_0 := b.Controls[0]
26289			cmp := v_0.Args[0]
26290			b.resetWithControl(BlockARM64UGE, cmp)
26291			return true
26292		}
26293	case BlockARM64ULT:
26294		// match: (ULT (FlagConstant [fc]) yes no)
26295		// cond: fc.ult()
26296		// result: (First yes no)
26297		for b.Controls[0].Op == OpARM64FlagConstant {
26298			v_0 := b.Controls[0]
26299			fc := auxIntToFlagConstant(v_0.AuxInt)
26300			if !(fc.ult()) {
26301				break
26302			}
26303			b.Reset(BlockFirst)
26304			return true
26305		}
26306		// match: (ULT (FlagConstant [fc]) yes no)
26307		// cond: !fc.ult()
26308		// result: (First no yes)
26309		for b.Controls[0].Op == OpARM64FlagConstant {
26310			v_0 := b.Controls[0]
26311			fc := auxIntToFlagConstant(v_0.AuxInt)
26312			if !(!fc.ult()) {
26313				break
26314			}
26315			b.Reset(BlockFirst)
26316			b.swapSuccessors()
26317			return true
26318		}
26319		// match: (ULT (InvertFlags cmp) yes no)
26320		// result: (UGT cmp yes no)
26321		for b.Controls[0].Op == OpARM64InvertFlags {
26322			v_0 := b.Controls[0]
26323			cmp := v_0.Args[0]
26324			b.resetWithControl(BlockARM64UGT, cmp)
26325			return true
26326		}
26327	case BlockARM64Z:
26328		// match: (Z (ANDconst [c] x) yes no)
26329		// cond: oneBit(c)
26330		// result: (TBZ [int64(ntz64(c))] x yes no)
26331		for b.Controls[0].Op == OpARM64ANDconst {
26332			v_0 := b.Controls[0]
26333			c := auxIntToInt64(v_0.AuxInt)
26334			x := v_0.Args[0]
26335			if !(oneBit(c)) {
26336				break
26337			}
26338			b.resetWithControl(BlockARM64TBZ, x)
26339			b.AuxInt = int64ToAuxInt(int64(ntz64(c)))
26340			return true
26341		}
26342		// match: (Z (MOVDconst [0]) yes no)
26343		// result: (First yes no)
26344		for b.Controls[0].Op == OpARM64MOVDconst {
26345			v_0 := b.Controls[0]
26346			if auxIntToInt64(v_0.AuxInt) != 0 {
26347				break
26348			}
26349			b.Reset(BlockFirst)
26350			return true
26351		}
26352		// match: (Z (MOVDconst [c]) yes no)
26353		// cond: c != 0
26354		// result: (First no yes)
26355		for b.Controls[0].Op == OpARM64MOVDconst {
26356			v_0 := b.Controls[0]
26357			c := auxIntToInt64(v_0.AuxInt)
26358			if !(c != 0) {
26359				break
26360			}
26361			b.Reset(BlockFirst)
26362			b.swapSuccessors()
26363			return true
26364		}
26365	case BlockARM64ZW:
26366		// match: (ZW (ANDconst [c] x) yes no)
26367		// cond: oneBit(int64(uint32(c)))
26368		// result: (TBZ [int64(ntz64(int64(uint32(c))))] x yes no)
26369		for b.Controls[0].Op == OpARM64ANDconst {
26370			v_0 := b.Controls[0]
26371			c := auxIntToInt64(v_0.AuxInt)
26372			x := v_0.Args[0]
26373			if !(oneBit(int64(uint32(c)))) {
26374				break
26375			}
26376			b.resetWithControl(BlockARM64TBZ, x)
26377			b.AuxInt = int64ToAuxInt(int64(ntz64(int64(uint32(c)))))
26378			return true
26379		}
26380		// match: (ZW (MOVDconst [c]) yes no)
26381		// cond: int32(c) == 0
26382		// result: (First yes no)
26383		for b.Controls[0].Op == OpARM64MOVDconst {
26384			v_0 := b.Controls[0]
26385			c := auxIntToInt64(v_0.AuxInt)
26386			if !(int32(c) == 0) {
26387				break
26388			}
26389			b.Reset(BlockFirst)
26390			return true
26391		}
26392		// match: (ZW (MOVDconst [c]) yes no)
26393		// cond: int32(c) != 0
26394		// result: (First no yes)
26395		for b.Controls[0].Op == OpARM64MOVDconst {
26396			v_0 := b.Controls[0]
26397			c := auxIntToInt64(v_0.AuxInt)
26398			if !(int32(c) != 0) {
26399				break
26400			}
26401			b.Reset(BlockFirst)
26402			b.swapSuccessors()
26403			return true
26404		}
26405	}
26406	return false
26407}
26408