1// Code generated from _gen/ARM64latelower.rules using 'go generate'; DO NOT EDIT.
2
3package ssa
4
5func rewriteValueARM64latelower(v *Value) bool {
6	switch v.Op {
7	case OpARM64ADDSconstflags:
8		return rewriteValueARM64latelower_OpARM64ADDSconstflags(v)
9	case OpARM64ADDconst:
10		return rewriteValueARM64latelower_OpARM64ADDconst(v)
11	case OpARM64ANDconst:
12		return rewriteValueARM64latelower_OpARM64ANDconst(v)
13	case OpARM64CMNWconst:
14		return rewriteValueARM64latelower_OpARM64CMNWconst(v)
15	case OpARM64CMNconst:
16		return rewriteValueARM64latelower_OpARM64CMNconst(v)
17	case OpARM64CMPWconst:
18		return rewriteValueARM64latelower_OpARM64CMPWconst(v)
19	case OpARM64CMPconst:
20		return rewriteValueARM64latelower_OpARM64CMPconst(v)
21	case OpARM64MOVBUreg:
22		return rewriteValueARM64latelower_OpARM64MOVBUreg(v)
23	case OpARM64MOVBreg:
24		return rewriteValueARM64latelower_OpARM64MOVBreg(v)
25	case OpARM64MOVHUreg:
26		return rewriteValueARM64latelower_OpARM64MOVHUreg(v)
27	case OpARM64MOVHreg:
28		return rewriteValueARM64latelower_OpARM64MOVHreg(v)
29	case OpARM64MOVWUreg:
30		return rewriteValueARM64latelower_OpARM64MOVWUreg(v)
31	case OpARM64MOVWreg:
32		return rewriteValueARM64latelower_OpARM64MOVWreg(v)
33	case OpARM64ORconst:
34		return rewriteValueARM64latelower_OpARM64ORconst(v)
35	case OpARM64SUBconst:
36		return rewriteValueARM64latelower_OpARM64SUBconst(v)
37	case OpARM64TSTWconst:
38		return rewriteValueARM64latelower_OpARM64TSTWconst(v)
39	case OpARM64TSTconst:
40		return rewriteValueARM64latelower_OpARM64TSTconst(v)
41	case OpARM64XORconst:
42		return rewriteValueARM64latelower_OpARM64XORconst(v)
43	}
44	return false
45}
46func rewriteValueARM64latelower_OpARM64ADDSconstflags(v *Value) bool {
47	v_0 := v.Args[0]
48	b := v.Block
49	typ := &b.Func.Config.Types
50	// match: (ADDSconstflags [c] x)
51	// cond: !isARM64addcon(c)
52	// result: (ADDSflags x (MOVDconst [c]))
53	for {
54		c := auxIntToInt64(v.AuxInt)
55		x := v_0
56		if !(!isARM64addcon(c)) {
57			break
58		}
59		v.reset(OpARM64ADDSflags)
60		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
61		v0.AuxInt = int64ToAuxInt(c)
62		v.AddArg2(x, v0)
63		return true
64	}
65	return false
66}
67func rewriteValueARM64latelower_OpARM64ADDconst(v *Value) bool {
68	v_0 := v.Args[0]
69	b := v.Block
70	typ := &b.Func.Config.Types
71	// match: (ADDconst [c] x)
72	// cond: !isARM64addcon(c)
73	// result: (ADD x (MOVDconst [c]))
74	for {
75		c := auxIntToInt64(v.AuxInt)
76		x := v_0
77		if !(!isARM64addcon(c)) {
78			break
79		}
80		v.reset(OpARM64ADD)
81		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
82		v0.AuxInt = int64ToAuxInt(c)
83		v.AddArg2(x, v0)
84		return true
85	}
86	return false
87}
88func rewriteValueARM64latelower_OpARM64ANDconst(v *Value) bool {
89	v_0 := v.Args[0]
90	b := v.Block
91	typ := &b.Func.Config.Types
92	// match: (ANDconst [c] x)
93	// cond: !isARM64bitcon(uint64(c))
94	// result: (AND x (MOVDconst [c]))
95	for {
96		c := auxIntToInt64(v.AuxInt)
97		x := v_0
98		if !(!isARM64bitcon(uint64(c))) {
99			break
100		}
101		v.reset(OpARM64AND)
102		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
103		v0.AuxInt = int64ToAuxInt(c)
104		v.AddArg2(x, v0)
105		return true
106	}
107	return false
108}
109func rewriteValueARM64latelower_OpARM64CMNWconst(v *Value) bool {
110	v_0 := v.Args[0]
111	b := v.Block
112	typ := &b.Func.Config.Types
113	// match: (CMNWconst [c] x)
114	// cond: !isARM64addcon(int64(c))
115	// result: (CMNW x (MOVDconst [int64(c)]))
116	for {
117		c := auxIntToInt32(v.AuxInt)
118		x := v_0
119		if !(!isARM64addcon(int64(c))) {
120			break
121		}
122		v.reset(OpARM64CMNW)
123		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
124		v0.AuxInt = int64ToAuxInt(int64(c))
125		v.AddArg2(x, v0)
126		return true
127	}
128	return false
129}
130func rewriteValueARM64latelower_OpARM64CMNconst(v *Value) bool {
131	v_0 := v.Args[0]
132	b := v.Block
133	typ := &b.Func.Config.Types
134	// match: (CMNconst [c] x)
135	// cond: !isARM64addcon(c)
136	// result: (CMN x (MOVDconst [c]))
137	for {
138		c := auxIntToInt64(v.AuxInt)
139		x := v_0
140		if !(!isARM64addcon(c)) {
141			break
142		}
143		v.reset(OpARM64CMN)
144		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
145		v0.AuxInt = int64ToAuxInt(c)
146		v.AddArg2(x, v0)
147		return true
148	}
149	return false
150}
151func rewriteValueARM64latelower_OpARM64CMPWconst(v *Value) bool {
152	v_0 := v.Args[0]
153	b := v.Block
154	typ := &b.Func.Config.Types
155	// match: (CMPWconst [c] x)
156	// cond: !isARM64addcon(int64(c))
157	// result: (CMPW x (MOVDconst [int64(c)]))
158	for {
159		c := auxIntToInt32(v.AuxInt)
160		x := v_0
161		if !(!isARM64addcon(int64(c))) {
162			break
163		}
164		v.reset(OpARM64CMPW)
165		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
166		v0.AuxInt = int64ToAuxInt(int64(c))
167		v.AddArg2(x, v0)
168		return true
169	}
170	return false
171}
172func rewriteValueARM64latelower_OpARM64CMPconst(v *Value) bool {
173	v_0 := v.Args[0]
174	b := v.Block
175	typ := &b.Func.Config.Types
176	// match: (CMPconst [c] x)
177	// cond: !isARM64addcon(c)
178	// result: (CMP x (MOVDconst [c]))
179	for {
180		c := auxIntToInt64(v.AuxInt)
181		x := v_0
182		if !(!isARM64addcon(c)) {
183			break
184		}
185		v.reset(OpARM64CMP)
186		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
187		v0.AuxInt = int64ToAuxInt(c)
188		v.AddArg2(x, v0)
189		return true
190	}
191	return false
192}
193func rewriteValueARM64latelower_OpARM64MOVBUreg(v *Value) bool {
194	v_0 := v.Args[0]
195	// match: (MOVBUreg x:(Equal _))
196	// result: x
197	for {
198		x := v_0
199		if x.Op != OpARM64Equal {
200			break
201		}
202		v.copyOf(x)
203		return true
204	}
205	// match: (MOVBUreg x:(NotEqual _))
206	// result: x
207	for {
208		x := v_0
209		if x.Op != OpARM64NotEqual {
210			break
211		}
212		v.copyOf(x)
213		return true
214	}
215	// match: (MOVBUreg x:(LessThan _))
216	// result: x
217	for {
218		x := v_0
219		if x.Op != OpARM64LessThan {
220			break
221		}
222		v.copyOf(x)
223		return true
224	}
225	// match: (MOVBUreg x:(LessThanU _))
226	// result: x
227	for {
228		x := v_0
229		if x.Op != OpARM64LessThanU {
230			break
231		}
232		v.copyOf(x)
233		return true
234	}
235	// match: (MOVBUreg x:(LessThanF _))
236	// result: x
237	for {
238		x := v_0
239		if x.Op != OpARM64LessThanF {
240			break
241		}
242		v.copyOf(x)
243		return true
244	}
245	// match: (MOVBUreg x:(LessEqual _))
246	// result: x
247	for {
248		x := v_0
249		if x.Op != OpARM64LessEqual {
250			break
251		}
252		v.copyOf(x)
253		return true
254	}
255	// match: (MOVBUreg x:(LessEqualU _))
256	// result: x
257	for {
258		x := v_0
259		if x.Op != OpARM64LessEqualU {
260			break
261		}
262		v.copyOf(x)
263		return true
264	}
265	// match: (MOVBUreg x:(LessEqualF _))
266	// result: x
267	for {
268		x := v_0
269		if x.Op != OpARM64LessEqualF {
270			break
271		}
272		v.copyOf(x)
273		return true
274	}
275	// match: (MOVBUreg x:(GreaterThan _))
276	// result: x
277	for {
278		x := v_0
279		if x.Op != OpARM64GreaterThan {
280			break
281		}
282		v.copyOf(x)
283		return true
284	}
285	// match: (MOVBUreg x:(GreaterThanU _))
286	// result: x
287	for {
288		x := v_0
289		if x.Op != OpARM64GreaterThanU {
290			break
291		}
292		v.copyOf(x)
293		return true
294	}
295	// match: (MOVBUreg x:(GreaterThanF _))
296	// result: x
297	for {
298		x := v_0
299		if x.Op != OpARM64GreaterThanF {
300			break
301		}
302		v.copyOf(x)
303		return true
304	}
305	// match: (MOVBUreg x:(GreaterEqual _))
306	// result: x
307	for {
308		x := v_0
309		if x.Op != OpARM64GreaterEqual {
310			break
311		}
312		v.copyOf(x)
313		return true
314	}
315	// match: (MOVBUreg x:(GreaterEqualU _))
316	// result: x
317	for {
318		x := v_0
319		if x.Op != OpARM64GreaterEqualU {
320			break
321		}
322		v.copyOf(x)
323		return true
324	}
325	// match: (MOVBUreg x:(GreaterEqualF _))
326	// result: x
327	for {
328		x := v_0
329		if x.Op != OpARM64GreaterEqualF {
330			break
331		}
332		v.copyOf(x)
333		return true
334	}
335	// match: (MOVBUreg x:(MOVBUload _ _))
336	// result: (MOVDreg x)
337	for {
338		x := v_0
339		if x.Op != OpARM64MOVBUload {
340			break
341		}
342		v.reset(OpARM64MOVDreg)
343		v.AddArg(x)
344		return true
345	}
346	// match: (MOVBUreg x:(MOVBUloadidx _ _ _))
347	// result: (MOVDreg x)
348	for {
349		x := v_0
350		if x.Op != OpARM64MOVBUloadidx {
351			break
352		}
353		v.reset(OpARM64MOVDreg)
354		v.AddArg(x)
355		return true
356	}
357	// match: (MOVBUreg x:(MOVBUreg _))
358	// result: (MOVDreg x)
359	for {
360		x := v_0
361		if x.Op != OpARM64MOVBUreg {
362			break
363		}
364		v.reset(OpARM64MOVDreg)
365		v.AddArg(x)
366		return true
367	}
368	return false
369}
370func rewriteValueARM64latelower_OpARM64MOVBreg(v *Value) bool {
371	v_0 := v.Args[0]
372	// match: (MOVBreg x:(MOVBload _ _))
373	// result: (MOVDreg x)
374	for {
375		x := v_0
376		if x.Op != OpARM64MOVBload {
377			break
378		}
379		v.reset(OpARM64MOVDreg)
380		v.AddArg(x)
381		return true
382	}
383	// match: (MOVBreg x:(MOVBloadidx _ _ _))
384	// result: (MOVDreg x)
385	for {
386		x := v_0
387		if x.Op != OpARM64MOVBloadidx {
388			break
389		}
390		v.reset(OpARM64MOVDreg)
391		v.AddArg(x)
392		return true
393	}
394	// match: (MOVBreg x:(MOVBreg _))
395	// result: (MOVDreg x)
396	for {
397		x := v_0
398		if x.Op != OpARM64MOVBreg {
399			break
400		}
401		v.reset(OpARM64MOVDreg)
402		v.AddArg(x)
403		return true
404	}
405	return false
406}
407func rewriteValueARM64latelower_OpARM64MOVHUreg(v *Value) bool {
408	v_0 := v.Args[0]
409	// match: (MOVHUreg x:(MOVBUload _ _))
410	// result: (MOVDreg x)
411	for {
412		x := v_0
413		if x.Op != OpARM64MOVBUload {
414			break
415		}
416		v.reset(OpARM64MOVDreg)
417		v.AddArg(x)
418		return true
419	}
420	// match: (MOVHUreg x:(MOVHUload _ _))
421	// result: (MOVDreg x)
422	for {
423		x := v_0
424		if x.Op != OpARM64MOVHUload {
425			break
426		}
427		v.reset(OpARM64MOVDreg)
428		v.AddArg(x)
429		return true
430	}
431	// match: (MOVHUreg x:(MOVBUloadidx _ _ _))
432	// result: (MOVDreg x)
433	for {
434		x := v_0
435		if x.Op != OpARM64MOVBUloadidx {
436			break
437		}
438		v.reset(OpARM64MOVDreg)
439		v.AddArg(x)
440		return true
441	}
442	// match: (MOVHUreg x:(MOVHUloadidx _ _ _))
443	// result: (MOVDreg x)
444	for {
445		x := v_0
446		if x.Op != OpARM64MOVHUloadidx {
447			break
448		}
449		v.reset(OpARM64MOVDreg)
450		v.AddArg(x)
451		return true
452	}
453	// match: (MOVHUreg x:(MOVHUloadidx2 _ _ _))
454	// result: (MOVDreg x)
455	for {
456		x := v_0
457		if x.Op != OpARM64MOVHUloadidx2 {
458			break
459		}
460		v.reset(OpARM64MOVDreg)
461		v.AddArg(x)
462		return true
463	}
464	// match: (MOVHUreg x:(MOVBUreg _))
465	// result: (MOVDreg x)
466	for {
467		x := v_0
468		if x.Op != OpARM64MOVBUreg {
469			break
470		}
471		v.reset(OpARM64MOVDreg)
472		v.AddArg(x)
473		return true
474	}
475	// match: (MOVHUreg x:(MOVHUreg _))
476	// result: (MOVDreg x)
477	for {
478		x := v_0
479		if x.Op != OpARM64MOVHUreg {
480			break
481		}
482		v.reset(OpARM64MOVDreg)
483		v.AddArg(x)
484		return true
485	}
486	return false
487}
488func rewriteValueARM64latelower_OpARM64MOVHreg(v *Value) bool {
489	v_0 := v.Args[0]
490	// match: (MOVHreg x:(MOVBload _ _))
491	// result: (MOVDreg x)
492	for {
493		x := v_0
494		if x.Op != OpARM64MOVBload {
495			break
496		}
497		v.reset(OpARM64MOVDreg)
498		v.AddArg(x)
499		return true
500	}
501	// match: (MOVHreg x:(MOVBUload _ _))
502	// result: (MOVDreg x)
503	for {
504		x := v_0
505		if x.Op != OpARM64MOVBUload {
506			break
507		}
508		v.reset(OpARM64MOVDreg)
509		v.AddArg(x)
510		return true
511	}
512	// match: (MOVHreg x:(MOVHload _ _))
513	// result: (MOVDreg x)
514	for {
515		x := v_0
516		if x.Op != OpARM64MOVHload {
517			break
518		}
519		v.reset(OpARM64MOVDreg)
520		v.AddArg(x)
521		return true
522	}
523	// match: (MOVHreg x:(MOVBloadidx _ _ _))
524	// result: (MOVDreg x)
525	for {
526		x := v_0
527		if x.Op != OpARM64MOVBloadidx {
528			break
529		}
530		v.reset(OpARM64MOVDreg)
531		v.AddArg(x)
532		return true
533	}
534	// match: (MOVHreg x:(MOVBUloadidx _ _ _))
535	// result: (MOVDreg x)
536	for {
537		x := v_0
538		if x.Op != OpARM64MOVBUloadidx {
539			break
540		}
541		v.reset(OpARM64MOVDreg)
542		v.AddArg(x)
543		return true
544	}
545	// match: (MOVHreg x:(MOVHloadidx _ _ _))
546	// result: (MOVDreg x)
547	for {
548		x := v_0
549		if x.Op != OpARM64MOVHloadidx {
550			break
551		}
552		v.reset(OpARM64MOVDreg)
553		v.AddArg(x)
554		return true
555	}
556	// match: (MOVHreg x:(MOVHloadidx2 _ _ _))
557	// result: (MOVDreg x)
558	for {
559		x := v_0
560		if x.Op != OpARM64MOVHloadidx2 {
561			break
562		}
563		v.reset(OpARM64MOVDreg)
564		v.AddArg(x)
565		return true
566	}
567	// match: (MOVHreg x:(MOVBreg _))
568	// result: (MOVDreg x)
569	for {
570		x := v_0
571		if x.Op != OpARM64MOVBreg {
572			break
573		}
574		v.reset(OpARM64MOVDreg)
575		v.AddArg(x)
576		return true
577	}
578	// match: (MOVHreg x:(MOVBUreg _))
579	// result: (MOVDreg x)
580	for {
581		x := v_0
582		if x.Op != OpARM64MOVBUreg {
583			break
584		}
585		v.reset(OpARM64MOVDreg)
586		v.AddArg(x)
587		return true
588	}
589	// match: (MOVHreg x:(MOVHreg _))
590	// result: (MOVDreg x)
591	for {
592		x := v_0
593		if x.Op != OpARM64MOVHreg {
594			break
595		}
596		v.reset(OpARM64MOVDreg)
597		v.AddArg(x)
598		return true
599	}
600	return false
601}
602func rewriteValueARM64latelower_OpARM64MOVWUreg(v *Value) bool {
603	v_0 := v.Args[0]
604	// match: (MOVWUreg x)
605	// cond: zeroUpper32Bits(x, 3)
606	// result: x
607	for {
608		x := v_0
609		if !(zeroUpper32Bits(x, 3)) {
610			break
611		}
612		v.copyOf(x)
613		return true
614	}
615	// match: (MOVWUreg x:(MOVBUload _ _))
616	// result: (MOVDreg x)
617	for {
618		x := v_0
619		if x.Op != OpARM64MOVBUload {
620			break
621		}
622		v.reset(OpARM64MOVDreg)
623		v.AddArg(x)
624		return true
625	}
626	// match: (MOVWUreg x:(MOVHUload _ _))
627	// result: (MOVDreg x)
628	for {
629		x := v_0
630		if x.Op != OpARM64MOVHUload {
631			break
632		}
633		v.reset(OpARM64MOVDreg)
634		v.AddArg(x)
635		return true
636	}
637	// match: (MOVWUreg x:(MOVWUload _ _))
638	// result: (MOVDreg x)
639	for {
640		x := v_0
641		if x.Op != OpARM64MOVWUload {
642			break
643		}
644		v.reset(OpARM64MOVDreg)
645		v.AddArg(x)
646		return true
647	}
648	// match: (MOVWUreg x:(MOVBUloadidx _ _ _))
649	// result: (MOVDreg x)
650	for {
651		x := v_0
652		if x.Op != OpARM64MOVBUloadidx {
653			break
654		}
655		v.reset(OpARM64MOVDreg)
656		v.AddArg(x)
657		return true
658	}
659	// match: (MOVWUreg x:(MOVHUloadidx _ _ _))
660	// result: (MOVDreg x)
661	for {
662		x := v_0
663		if x.Op != OpARM64MOVHUloadidx {
664			break
665		}
666		v.reset(OpARM64MOVDreg)
667		v.AddArg(x)
668		return true
669	}
670	// match: (MOVWUreg x:(MOVWUloadidx _ _ _))
671	// result: (MOVDreg x)
672	for {
673		x := v_0
674		if x.Op != OpARM64MOVWUloadidx {
675			break
676		}
677		v.reset(OpARM64MOVDreg)
678		v.AddArg(x)
679		return true
680	}
681	// match: (MOVWUreg x:(MOVHUloadidx2 _ _ _))
682	// result: (MOVDreg x)
683	for {
684		x := v_0
685		if x.Op != OpARM64MOVHUloadidx2 {
686			break
687		}
688		v.reset(OpARM64MOVDreg)
689		v.AddArg(x)
690		return true
691	}
692	// match: (MOVWUreg x:(MOVWUloadidx4 _ _ _))
693	// result: (MOVDreg x)
694	for {
695		x := v_0
696		if x.Op != OpARM64MOVWUloadidx4 {
697			break
698		}
699		v.reset(OpARM64MOVDreg)
700		v.AddArg(x)
701		return true
702	}
703	// match: (MOVWUreg x:(MOVBUreg _))
704	// result: (MOVDreg x)
705	for {
706		x := v_0
707		if x.Op != OpARM64MOVBUreg {
708			break
709		}
710		v.reset(OpARM64MOVDreg)
711		v.AddArg(x)
712		return true
713	}
714	// match: (MOVWUreg x:(MOVHUreg _))
715	// result: (MOVDreg x)
716	for {
717		x := v_0
718		if x.Op != OpARM64MOVHUreg {
719			break
720		}
721		v.reset(OpARM64MOVDreg)
722		v.AddArg(x)
723		return true
724	}
725	// match: (MOVWUreg x:(MOVWUreg _))
726	// result: (MOVDreg x)
727	for {
728		x := v_0
729		if x.Op != OpARM64MOVWUreg {
730			break
731		}
732		v.reset(OpARM64MOVDreg)
733		v.AddArg(x)
734		return true
735	}
736	return false
737}
738func rewriteValueARM64latelower_OpARM64MOVWreg(v *Value) bool {
739	v_0 := v.Args[0]
740	// match: (MOVWreg x:(MOVBload _ _))
741	// result: (MOVDreg x)
742	for {
743		x := v_0
744		if x.Op != OpARM64MOVBload {
745			break
746		}
747		v.reset(OpARM64MOVDreg)
748		v.AddArg(x)
749		return true
750	}
751	// match: (MOVWreg x:(MOVBUload _ _))
752	// result: (MOVDreg x)
753	for {
754		x := v_0
755		if x.Op != OpARM64MOVBUload {
756			break
757		}
758		v.reset(OpARM64MOVDreg)
759		v.AddArg(x)
760		return true
761	}
762	// match: (MOVWreg x:(MOVHload _ _))
763	// result: (MOVDreg x)
764	for {
765		x := v_0
766		if x.Op != OpARM64MOVHload {
767			break
768		}
769		v.reset(OpARM64MOVDreg)
770		v.AddArg(x)
771		return true
772	}
773	// match: (MOVWreg x:(MOVHUload _ _))
774	// result: (MOVDreg x)
775	for {
776		x := v_0
777		if x.Op != OpARM64MOVHUload {
778			break
779		}
780		v.reset(OpARM64MOVDreg)
781		v.AddArg(x)
782		return true
783	}
784	// match: (MOVWreg x:(MOVWload _ _))
785	// result: (MOVDreg x)
786	for {
787		x := v_0
788		if x.Op != OpARM64MOVWload {
789			break
790		}
791		v.reset(OpARM64MOVDreg)
792		v.AddArg(x)
793		return true
794	}
795	// match: (MOVWreg x:(MOVBloadidx _ _ _))
796	// result: (MOVDreg x)
797	for {
798		x := v_0
799		if x.Op != OpARM64MOVBloadidx {
800			break
801		}
802		v.reset(OpARM64MOVDreg)
803		v.AddArg(x)
804		return true
805	}
806	// match: (MOVWreg x:(MOVBUloadidx _ _ _))
807	// result: (MOVDreg x)
808	for {
809		x := v_0
810		if x.Op != OpARM64MOVBUloadidx {
811			break
812		}
813		v.reset(OpARM64MOVDreg)
814		v.AddArg(x)
815		return true
816	}
817	// match: (MOVWreg x:(MOVHloadidx _ _ _))
818	// result: (MOVDreg x)
819	for {
820		x := v_0
821		if x.Op != OpARM64MOVHloadidx {
822			break
823		}
824		v.reset(OpARM64MOVDreg)
825		v.AddArg(x)
826		return true
827	}
828	// match: (MOVWreg x:(MOVHUloadidx _ _ _))
829	// result: (MOVDreg x)
830	for {
831		x := v_0
832		if x.Op != OpARM64MOVHUloadidx {
833			break
834		}
835		v.reset(OpARM64MOVDreg)
836		v.AddArg(x)
837		return true
838	}
839	// match: (MOVWreg x:(MOVWloadidx _ _ _))
840	// result: (MOVDreg x)
841	for {
842		x := v_0
843		if x.Op != OpARM64MOVWloadidx {
844			break
845		}
846		v.reset(OpARM64MOVDreg)
847		v.AddArg(x)
848		return true
849	}
850	// match: (MOVWreg x:(MOVHloadidx2 _ _ _))
851	// result: (MOVDreg x)
852	for {
853		x := v_0
854		if x.Op != OpARM64MOVHloadidx2 {
855			break
856		}
857		v.reset(OpARM64MOVDreg)
858		v.AddArg(x)
859		return true
860	}
861	// match: (MOVWreg x:(MOVHUloadidx2 _ _ _))
862	// result: (MOVDreg x)
863	for {
864		x := v_0
865		if x.Op != OpARM64MOVHUloadidx2 {
866			break
867		}
868		v.reset(OpARM64MOVDreg)
869		v.AddArg(x)
870		return true
871	}
872	// match: (MOVWreg x:(MOVWloadidx4 _ _ _))
873	// result: (MOVDreg x)
874	for {
875		x := v_0
876		if x.Op != OpARM64MOVWloadidx4 {
877			break
878		}
879		v.reset(OpARM64MOVDreg)
880		v.AddArg(x)
881		return true
882	}
883	// match: (MOVWreg x:(MOVBreg _))
884	// result: (MOVDreg x)
885	for {
886		x := v_0
887		if x.Op != OpARM64MOVBreg {
888			break
889		}
890		v.reset(OpARM64MOVDreg)
891		v.AddArg(x)
892		return true
893	}
894	// match: (MOVWreg x:(MOVBUreg _))
895	// result: (MOVDreg x)
896	for {
897		x := v_0
898		if x.Op != OpARM64MOVBUreg {
899			break
900		}
901		v.reset(OpARM64MOVDreg)
902		v.AddArg(x)
903		return true
904	}
905	// match: (MOVWreg x:(MOVHreg _))
906	// result: (MOVDreg x)
907	for {
908		x := v_0
909		if x.Op != OpARM64MOVHreg {
910			break
911		}
912		v.reset(OpARM64MOVDreg)
913		v.AddArg(x)
914		return true
915	}
916	// match: (MOVWreg x:(MOVWreg _))
917	// result: (MOVDreg x)
918	for {
919		x := v_0
920		if x.Op != OpARM64MOVWreg {
921			break
922		}
923		v.reset(OpARM64MOVDreg)
924		v.AddArg(x)
925		return true
926	}
927	return false
928}
929func rewriteValueARM64latelower_OpARM64ORconst(v *Value) bool {
930	v_0 := v.Args[0]
931	b := v.Block
932	typ := &b.Func.Config.Types
933	// match: (ORconst [c] x)
934	// cond: !isARM64bitcon(uint64(c))
935	// result: (OR x (MOVDconst [c]))
936	for {
937		c := auxIntToInt64(v.AuxInt)
938		x := v_0
939		if !(!isARM64bitcon(uint64(c))) {
940			break
941		}
942		v.reset(OpARM64OR)
943		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
944		v0.AuxInt = int64ToAuxInt(c)
945		v.AddArg2(x, v0)
946		return true
947	}
948	return false
949}
950func rewriteValueARM64latelower_OpARM64SUBconst(v *Value) bool {
951	v_0 := v.Args[0]
952	b := v.Block
953	typ := &b.Func.Config.Types
954	// match: (SUBconst [c] x)
955	// cond: !isARM64addcon(c)
956	// result: (SUB x (MOVDconst [c]))
957	for {
958		c := auxIntToInt64(v.AuxInt)
959		x := v_0
960		if !(!isARM64addcon(c)) {
961			break
962		}
963		v.reset(OpARM64SUB)
964		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
965		v0.AuxInt = int64ToAuxInt(c)
966		v.AddArg2(x, v0)
967		return true
968	}
969	return false
970}
971func rewriteValueARM64latelower_OpARM64TSTWconst(v *Value) bool {
972	v_0 := v.Args[0]
973	b := v.Block
974	typ := &b.Func.Config.Types
975	// match: (TSTWconst [c] x)
976	// cond: !isARM64bitcon(uint64(c)|uint64(c)<<32)
977	// result: (TSTW x (MOVDconst [int64(c)]))
978	for {
979		c := auxIntToInt32(v.AuxInt)
980		x := v_0
981		if !(!isARM64bitcon(uint64(c) | uint64(c)<<32)) {
982			break
983		}
984		v.reset(OpARM64TSTW)
985		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
986		v0.AuxInt = int64ToAuxInt(int64(c))
987		v.AddArg2(x, v0)
988		return true
989	}
990	return false
991}
992func rewriteValueARM64latelower_OpARM64TSTconst(v *Value) bool {
993	v_0 := v.Args[0]
994	b := v.Block
995	typ := &b.Func.Config.Types
996	// match: (TSTconst [c] x)
997	// cond: !isARM64bitcon(uint64(c))
998	// result: (TST x (MOVDconst [c]))
999	for {
1000		c := auxIntToInt64(v.AuxInt)
1001		x := v_0
1002		if !(!isARM64bitcon(uint64(c))) {
1003			break
1004		}
1005		v.reset(OpARM64TST)
1006		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
1007		v0.AuxInt = int64ToAuxInt(c)
1008		v.AddArg2(x, v0)
1009		return true
1010	}
1011	return false
1012}
1013func rewriteValueARM64latelower_OpARM64XORconst(v *Value) bool {
1014	v_0 := v.Args[0]
1015	b := v.Block
1016	typ := &b.Func.Config.Types
1017	// match: (XORconst [c] x)
1018	// cond: !isARM64bitcon(uint64(c))
1019	// result: (XOR x (MOVDconst [c]))
1020	for {
1021		c := auxIntToInt64(v.AuxInt)
1022		x := v_0
1023		if !(!isARM64bitcon(uint64(c))) {
1024			break
1025		}
1026		v.reset(OpARM64XOR)
1027		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
1028		v0.AuxInt = int64ToAuxInt(c)
1029		v.AddArg2(x, v0)
1030		return true
1031	}
1032	return false
1033}
1034func rewriteBlockARM64latelower(b *Block) bool {
1035	return false
1036}
1037