1// Code generated from _gen/AMD64splitload.rules using 'go generate'; DO NOT EDIT.
2
3package ssa
4
5func rewriteValueAMD64splitload(v *Value) bool {
6	switch v.Op {
7	case OpAMD64CMPBconstload:
8		return rewriteValueAMD64splitload_OpAMD64CMPBconstload(v)
9	case OpAMD64CMPBconstloadidx1:
10		return rewriteValueAMD64splitload_OpAMD64CMPBconstloadidx1(v)
11	case OpAMD64CMPBload:
12		return rewriteValueAMD64splitload_OpAMD64CMPBload(v)
13	case OpAMD64CMPBloadidx1:
14		return rewriteValueAMD64splitload_OpAMD64CMPBloadidx1(v)
15	case OpAMD64CMPLconstload:
16		return rewriteValueAMD64splitload_OpAMD64CMPLconstload(v)
17	case OpAMD64CMPLconstloadidx1:
18		return rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx1(v)
19	case OpAMD64CMPLconstloadidx4:
20		return rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx4(v)
21	case OpAMD64CMPLload:
22		return rewriteValueAMD64splitload_OpAMD64CMPLload(v)
23	case OpAMD64CMPLloadidx1:
24		return rewriteValueAMD64splitload_OpAMD64CMPLloadidx1(v)
25	case OpAMD64CMPLloadidx4:
26		return rewriteValueAMD64splitload_OpAMD64CMPLloadidx4(v)
27	case OpAMD64CMPQconstload:
28		return rewriteValueAMD64splitload_OpAMD64CMPQconstload(v)
29	case OpAMD64CMPQconstloadidx1:
30		return rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx1(v)
31	case OpAMD64CMPQconstloadidx8:
32		return rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx8(v)
33	case OpAMD64CMPQload:
34		return rewriteValueAMD64splitload_OpAMD64CMPQload(v)
35	case OpAMD64CMPQloadidx1:
36		return rewriteValueAMD64splitload_OpAMD64CMPQloadidx1(v)
37	case OpAMD64CMPQloadidx8:
38		return rewriteValueAMD64splitload_OpAMD64CMPQloadidx8(v)
39	case OpAMD64CMPWconstload:
40		return rewriteValueAMD64splitload_OpAMD64CMPWconstload(v)
41	case OpAMD64CMPWconstloadidx1:
42		return rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx1(v)
43	case OpAMD64CMPWconstloadidx2:
44		return rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx2(v)
45	case OpAMD64CMPWload:
46		return rewriteValueAMD64splitload_OpAMD64CMPWload(v)
47	case OpAMD64CMPWloadidx1:
48		return rewriteValueAMD64splitload_OpAMD64CMPWloadidx1(v)
49	case OpAMD64CMPWloadidx2:
50		return rewriteValueAMD64splitload_OpAMD64CMPWloadidx2(v)
51	}
52	return false
53}
54func rewriteValueAMD64splitload_OpAMD64CMPBconstload(v *Value) bool {
55	v_1 := v.Args[1]
56	v_0 := v.Args[0]
57	b := v.Block
58	typ := &b.Func.Config.Types
59	// match: (CMPBconstload {sym} [vo] ptr mem)
60	// cond: vo.Val() == 0
61	// result: (TESTB x:(MOVBload {sym} [vo.Off()] ptr mem) x)
62	for {
63		vo := auxIntToValAndOff(v.AuxInt)
64		sym := auxToSym(v.Aux)
65		ptr := v_0
66		mem := v_1
67		if !(vo.Val() == 0) {
68			break
69		}
70		v.reset(OpAMD64TESTB)
71		x := b.NewValue0(v.Pos, OpAMD64MOVBload, typ.UInt8)
72		x.AuxInt = int32ToAuxInt(vo.Off())
73		x.Aux = symToAux(sym)
74		x.AddArg2(ptr, mem)
75		v.AddArg2(x, x)
76		return true
77	}
78	// match: (CMPBconstload {sym} [vo] ptr mem)
79	// cond: vo.Val() != 0
80	// result: (CMPBconst (MOVBload {sym} [vo.Off()] ptr mem) [vo.Val8()])
81	for {
82		vo := auxIntToValAndOff(v.AuxInt)
83		sym := auxToSym(v.Aux)
84		ptr := v_0
85		mem := v_1
86		if !(vo.Val() != 0) {
87			break
88		}
89		v.reset(OpAMD64CMPBconst)
90		v.AuxInt = int8ToAuxInt(vo.Val8())
91		v0 := b.NewValue0(v.Pos, OpAMD64MOVBload, typ.UInt8)
92		v0.AuxInt = int32ToAuxInt(vo.Off())
93		v0.Aux = symToAux(sym)
94		v0.AddArg2(ptr, mem)
95		v.AddArg(v0)
96		return true
97	}
98	return false
99}
100func rewriteValueAMD64splitload_OpAMD64CMPBconstloadidx1(v *Value) bool {
101	v_2 := v.Args[2]
102	v_1 := v.Args[1]
103	v_0 := v.Args[0]
104	b := v.Block
105	typ := &b.Func.Config.Types
106	// match: (CMPBconstloadidx1 {sym} [vo] ptr idx mem)
107	// cond: vo.Val() == 0
108	// result: (TESTB x:(MOVBloadidx1 {sym} [vo.Off()] ptr idx mem) x)
109	for {
110		vo := auxIntToValAndOff(v.AuxInt)
111		sym := auxToSym(v.Aux)
112		ptr := v_0
113		idx := v_1
114		mem := v_2
115		if !(vo.Val() == 0) {
116			break
117		}
118		v.reset(OpAMD64TESTB)
119		x := b.NewValue0(v.Pos, OpAMD64MOVBloadidx1, typ.UInt8)
120		x.AuxInt = int32ToAuxInt(vo.Off())
121		x.Aux = symToAux(sym)
122		x.AddArg3(ptr, idx, mem)
123		v.AddArg2(x, x)
124		return true
125	}
126	// match: (CMPBconstloadidx1 {sym} [vo] ptr idx mem)
127	// cond: vo.Val() != 0
128	// result: (CMPBconst (MOVBloadidx1 {sym} [vo.Off()] ptr idx mem) [vo.Val8()])
129	for {
130		vo := auxIntToValAndOff(v.AuxInt)
131		sym := auxToSym(v.Aux)
132		ptr := v_0
133		idx := v_1
134		mem := v_2
135		if !(vo.Val() != 0) {
136			break
137		}
138		v.reset(OpAMD64CMPBconst)
139		v.AuxInt = int8ToAuxInt(vo.Val8())
140		v0 := b.NewValue0(v.Pos, OpAMD64MOVBloadidx1, typ.UInt8)
141		v0.AuxInt = int32ToAuxInt(vo.Off())
142		v0.Aux = symToAux(sym)
143		v0.AddArg3(ptr, idx, mem)
144		v.AddArg(v0)
145		return true
146	}
147	return false
148}
149func rewriteValueAMD64splitload_OpAMD64CMPBload(v *Value) bool {
150	v_2 := v.Args[2]
151	v_1 := v.Args[1]
152	v_0 := v.Args[0]
153	b := v.Block
154	typ := &b.Func.Config.Types
155	// match: (CMPBload {sym} [off] ptr x mem)
156	// result: (CMPB (MOVBload {sym} [off] ptr mem) x)
157	for {
158		off := auxIntToInt32(v.AuxInt)
159		sym := auxToSym(v.Aux)
160		ptr := v_0
161		x := v_1
162		mem := v_2
163		v.reset(OpAMD64CMPB)
164		v0 := b.NewValue0(v.Pos, OpAMD64MOVBload, typ.UInt8)
165		v0.AuxInt = int32ToAuxInt(off)
166		v0.Aux = symToAux(sym)
167		v0.AddArg2(ptr, mem)
168		v.AddArg2(v0, x)
169		return true
170	}
171}
172func rewriteValueAMD64splitload_OpAMD64CMPBloadidx1(v *Value) bool {
173	v_3 := v.Args[3]
174	v_2 := v.Args[2]
175	v_1 := v.Args[1]
176	v_0 := v.Args[0]
177	b := v.Block
178	typ := &b.Func.Config.Types
179	// match: (CMPBloadidx1 {sym} [off] ptr idx x mem)
180	// result: (CMPB (MOVBloadidx1 {sym} [off] ptr idx mem) x)
181	for {
182		off := auxIntToInt32(v.AuxInt)
183		sym := auxToSym(v.Aux)
184		ptr := v_0
185		idx := v_1
186		x := v_2
187		mem := v_3
188		v.reset(OpAMD64CMPB)
189		v0 := b.NewValue0(v.Pos, OpAMD64MOVBloadidx1, typ.UInt8)
190		v0.AuxInt = int32ToAuxInt(off)
191		v0.Aux = symToAux(sym)
192		v0.AddArg3(ptr, idx, mem)
193		v.AddArg2(v0, x)
194		return true
195	}
196}
197func rewriteValueAMD64splitload_OpAMD64CMPLconstload(v *Value) bool {
198	v_1 := v.Args[1]
199	v_0 := v.Args[0]
200	b := v.Block
201	typ := &b.Func.Config.Types
202	// match: (CMPLconstload {sym} [vo] ptr mem)
203	// cond: vo.Val() == 0
204	// result: (TESTL x:(MOVLload {sym} [vo.Off()] ptr mem) x)
205	for {
206		vo := auxIntToValAndOff(v.AuxInt)
207		sym := auxToSym(v.Aux)
208		ptr := v_0
209		mem := v_1
210		if !(vo.Val() == 0) {
211			break
212		}
213		v.reset(OpAMD64TESTL)
214		x := b.NewValue0(v.Pos, OpAMD64MOVLload, typ.UInt32)
215		x.AuxInt = int32ToAuxInt(vo.Off())
216		x.Aux = symToAux(sym)
217		x.AddArg2(ptr, mem)
218		v.AddArg2(x, x)
219		return true
220	}
221	// match: (CMPLconstload {sym} [vo] ptr mem)
222	// cond: vo.Val() != 0
223	// result: (CMPLconst (MOVLload {sym} [vo.Off()] ptr mem) [vo.Val()])
224	for {
225		vo := auxIntToValAndOff(v.AuxInt)
226		sym := auxToSym(v.Aux)
227		ptr := v_0
228		mem := v_1
229		if !(vo.Val() != 0) {
230			break
231		}
232		v.reset(OpAMD64CMPLconst)
233		v.AuxInt = int32ToAuxInt(vo.Val())
234		v0 := b.NewValue0(v.Pos, OpAMD64MOVLload, typ.UInt32)
235		v0.AuxInt = int32ToAuxInt(vo.Off())
236		v0.Aux = symToAux(sym)
237		v0.AddArg2(ptr, mem)
238		v.AddArg(v0)
239		return true
240	}
241	return false
242}
243func rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx1(v *Value) bool {
244	v_2 := v.Args[2]
245	v_1 := v.Args[1]
246	v_0 := v.Args[0]
247	b := v.Block
248	typ := &b.Func.Config.Types
249	// match: (CMPLconstloadidx1 {sym} [vo] ptr idx mem)
250	// cond: vo.Val() == 0
251	// result: (TESTL x:(MOVLloadidx1 {sym} [vo.Off()] ptr idx mem) x)
252	for {
253		vo := auxIntToValAndOff(v.AuxInt)
254		sym := auxToSym(v.Aux)
255		ptr := v_0
256		idx := v_1
257		mem := v_2
258		if !(vo.Val() == 0) {
259			break
260		}
261		v.reset(OpAMD64TESTL)
262		x := b.NewValue0(v.Pos, OpAMD64MOVLloadidx1, typ.UInt32)
263		x.AuxInt = int32ToAuxInt(vo.Off())
264		x.Aux = symToAux(sym)
265		x.AddArg3(ptr, idx, mem)
266		v.AddArg2(x, x)
267		return true
268	}
269	// match: (CMPLconstloadidx1 {sym} [vo] ptr idx mem)
270	// cond: vo.Val() != 0
271	// result: (CMPLconst (MOVLloadidx1 {sym} [vo.Off()] ptr idx mem) [vo.Val()])
272	for {
273		vo := auxIntToValAndOff(v.AuxInt)
274		sym := auxToSym(v.Aux)
275		ptr := v_0
276		idx := v_1
277		mem := v_2
278		if !(vo.Val() != 0) {
279			break
280		}
281		v.reset(OpAMD64CMPLconst)
282		v.AuxInt = int32ToAuxInt(vo.Val())
283		v0 := b.NewValue0(v.Pos, OpAMD64MOVLloadidx1, typ.UInt32)
284		v0.AuxInt = int32ToAuxInt(vo.Off())
285		v0.Aux = symToAux(sym)
286		v0.AddArg3(ptr, idx, mem)
287		v.AddArg(v0)
288		return true
289	}
290	return false
291}
292func rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx4(v *Value) bool {
293	v_2 := v.Args[2]
294	v_1 := v.Args[1]
295	v_0 := v.Args[0]
296	b := v.Block
297	typ := &b.Func.Config.Types
298	// match: (CMPLconstloadidx4 {sym} [vo] ptr idx mem)
299	// cond: vo.Val() == 0
300	// result: (TESTL x:(MOVLloadidx4 {sym} [vo.Off()] ptr idx mem) x)
301	for {
302		vo := auxIntToValAndOff(v.AuxInt)
303		sym := auxToSym(v.Aux)
304		ptr := v_0
305		idx := v_1
306		mem := v_2
307		if !(vo.Val() == 0) {
308			break
309		}
310		v.reset(OpAMD64TESTL)
311		x := b.NewValue0(v.Pos, OpAMD64MOVLloadidx4, typ.UInt32)
312		x.AuxInt = int32ToAuxInt(vo.Off())
313		x.Aux = symToAux(sym)
314		x.AddArg3(ptr, idx, mem)
315		v.AddArg2(x, x)
316		return true
317	}
318	// match: (CMPLconstloadidx4 {sym} [vo] ptr idx mem)
319	// cond: vo.Val() != 0
320	// result: (CMPLconst (MOVLloadidx4 {sym} [vo.Off()] ptr idx mem) [vo.Val()])
321	for {
322		vo := auxIntToValAndOff(v.AuxInt)
323		sym := auxToSym(v.Aux)
324		ptr := v_0
325		idx := v_1
326		mem := v_2
327		if !(vo.Val() != 0) {
328			break
329		}
330		v.reset(OpAMD64CMPLconst)
331		v.AuxInt = int32ToAuxInt(vo.Val())
332		v0 := b.NewValue0(v.Pos, OpAMD64MOVLloadidx4, typ.UInt32)
333		v0.AuxInt = int32ToAuxInt(vo.Off())
334		v0.Aux = symToAux(sym)
335		v0.AddArg3(ptr, idx, mem)
336		v.AddArg(v0)
337		return true
338	}
339	return false
340}
341func rewriteValueAMD64splitload_OpAMD64CMPLload(v *Value) bool {
342	v_2 := v.Args[2]
343	v_1 := v.Args[1]
344	v_0 := v.Args[0]
345	b := v.Block
346	typ := &b.Func.Config.Types
347	// match: (CMPLload {sym} [off] ptr x mem)
348	// result: (CMPL (MOVLload {sym} [off] ptr mem) x)
349	for {
350		off := auxIntToInt32(v.AuxInt)
351		sym := auxToSym(v.Aux)
352		ptr := v_0
353		x := v_1
354		mem := v_2
355		v.reset(OpAMD64CMPL)
356		v0 := b.NewValue0(v.Pos, OpAMD64MOVLload, typ.UInt32)
357		v0.AuxInt = int32ToAuxInt(off)
358		v0.Aux = symToAux(sym)
359		v0.AddArg2(ptr, mem)
360		v.AddArg2(v0, x)
361		return true
362	}
363}
364func rewriteValueAMD64splitload_OpAMD64CMPLloadidx1(v *Value) bool {
365	v_3 := v.Args[3]
366	v_2 := v.Args[2]
367	v_1 := v.Args[1]
368	v_0 := v.Args[0]
369	b := v.Block
370	typ := &b.Func.Config.Types
371	// match: (CMPLloadidx1 {sym} [off] ptr idx x mem)
372	// result: (CMPL (MOVLloadidx1 {sym} [off] ptr idx mem) x)
373	for {
374		off := auxIntToInt32(v.AuxInt)
375		sym := auxToSym(v.Aux)
376		ptr := v_0
377		idx := v_1
378		x := v_2
379		mem := v_3
380		v.reset(OpAMD64CMPL)
381		v0 := b.NewValue0(v.Pos, OpAMD64MOVLloadidx1, typ.UInt32)
382		v0.AuxInt = int32ToAuxInt(off)
383		v0.Aux = symToAux(sym)
384		v0.AddArg3(ptr, idx, mem)
385		v.AddArg2(v0, x)
386		return true
387	}
388}
389func rewriteValueAMD64splitload_OpAMD64CMPLloadidx4(v *Value) bool {
390	v_3 := v.Args[3]
391	v_2 := v.Args[2]
392	v_1 := v.Args[1]
393	v_0 := v.Args[0]
394	b := v.Block
395	typ := &b.Func.Config.Types
396	// match: (CMPLloadidx4 {sym} [off] ptr idx x mem)
397	// result: (CMPL (MOVLloadidx4 {sym} [off] ptr idx mem) x)
398	for {
399		off := auxIntToInt32(v.AuxInt)
400		sym := auxToSym(v.Aux)
401		ptr := v_0
402		idx := v_1
403		x := v_2
404		mem := v_3
405		v.reset(OpAMD64CMPL)
406		v0 := b.NewValue0(v.Pos, OpAMD64MOVLloadidx4, typ.UInt32)
407		v0.AuxInt = int32ToAuxInt(off)
408		v0.Aux = symToAux(sym)
409		v0.AddArg3(ptr, idx, mem)
410		v.AddArg2(v0, x)
411		return true
412	}
413}
414func rewriteValueAMD64splitload_OpAMD64CMPQconstload(v *Value) bool {
415	v_1 := v.Args[1]
416	v_0 := v.Args[0]
417	b := v.Block
418	typ := &b.Func.Config.Types
419	// match: (CMPQconstload {sym} [vo] ptr mem)
420	// cond: vo.Val() == 0
421	// result: (TESTQ x:(MOVQload {sym} [vo.Off()] ptr mem) x)
422	for {
423		vo := auxIntToValAndOff(v.AuxInt)
424		sym := auxToSym(v.Aux)
425		ptr := v_0
426		mem := v_1
427		if !(vo.Val() == 0) {
428			break
429		}
430		v.reset(OpAMD64TESTQ)
431		x := b.NewValue0(v.Pos, OpAMD64MOVQload, typ.UInt64)
432		x.AuxInt = int32ToAuxInt(vo.Off())
433		x.Aux = symToAux(sym)
434		x.AddArg2(ptr, mem)
435		v.AddArg2(x, x)
436		return true
437	}
438	// match: (CMPQconstload {sym} [vo] ptr mem)
439	// cond: vo.Val() != 0
440	// result: (CMPQconst (MOVQload {sym} [vo.Off()] ptr mem) [vo.Val()])
441	for {
442		vo := auxIntToValAndOff(v.AuxInt)
443		sym := auxToSym(v.Aux)
444		ptr := v_0
445		mem := v_1
446		if !(vo.Val() != 0) {
447			break
448		}
449		v.reset(OpAMD64CMPQconst)
450		v.AuxInt = int32ToAuxInt(vo.Val())
451		v0 := b.NewValue0(v.Pos, OpAMD64MOVQload, typ.UInt64)
452		v0.AuxInt = int32ToAuxInt(vo.Off())
453		v0.Aux = symToAux(sym)
454		v0.AddArg2(ptr, mem)
455		v.AddArg(v0)
456		return true
457	}
458	return false
459}
460func rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx1(v *Value) bool {
461	v_2 := v.Args[2]
462	v_1 := v.Args[1]
463	v_0 := v.Args[0]
464	b := v.Block
465	typ := &b.Func.Config.Types
466	// match: (CMPQconstloadidx1 {sym} [vo] ptr idx mem)
467	// cond: vo.Val() == 0
468	// result: (TESTQ x:(MOVQloadidx1 {sym} [vo.Off()] ptr idx mem) x)
469	for {
470		vo := auxIntToValAndOff(v.AuxInt)
471		sym := auxToSym(v.Aux)
472		ptr := v_0
473		idx := v_1
474		mem := v_2
475		if !(vo.Val() == 0) {
476			break
477		}
478		v.reset(OpAMD64TESTQ)
479		x := b.NewValue0(v.Pos, OpAMD64MOVQloadidx1, typ.UInt64)
480		x.AuxInt = int32ToAuxInt(vo.Off())
481		x.Aux = symToAux(sym)
482		x.AddArg3(ptr, idx, mem)
483		v.AddArg2(x, x)
484		return true
485	}
486	// match: (CMPQconstloadidx1 {sym} [vo] ptr idx mem)
487	// cond: vo.Val() != 0
488	// result: (CMPQconst (MOVQloadidx1 {sym} [vo.Off()] ptr idx mem) [vo.Val()])
489	for {
490		vo := auxIntToValAndOff(v.AuxInt)
491		sym := auxToSym(v.Aux)
492		ptr := v_0
493		idx := v_1
494		mem := v_2
495		if !(vo.Val() != 0) {
496			break
497		}
498		v.reset(OpAMD64CMPQconst)
499		v.AuxInt = int32ToAuxInt(vo.Val())
500		v0 := b.NewValue0(v.Pos, OpAMD64MOVQloadidx1, typ.UInt64)
501		v0.AuxInt = int32ToAuxInt(vo.Off())
502		v0.Aux = symToAux(sym)
503		v0.AddArg3(ptr, idx, mem)
504		v.AddArg(v0)
505		return true
506	}
507	return false
508}
509func rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx8(v *Value) bool {
510	v_2 := v.Args[2]
511	v_1 := v.Args[1]
512	v_0 := v.Args[0]
513	b := v.Block
514	typ := &b.Func.Config.Types
515	// match: (CMPQconstloadidx8 {sym} [vo] ptr idx mem)
516	// cond: vo.Val() == 0
517	// result: (TESTQ x:(MOVQloadidx8 {sym} [vo.Off()] ptr idx mem) x)
518	for {
519		vo := auxIntToValAndOff(v.AuxInt)
520		sym := auxToSym(v.Aux)
521		ptr := v_0
522		idx := v_1
523		mem := v_2
524		if !(vo.Val() == 0) {
525			break
526		}
527		v.reset(OpAMD64TESTQ)
528		x := b.NewValue0(v.Pos, OpAMD64MOVQloadidx8, typ.UInt64)
529		x.AuxInt = int32ToAuxInt(vo.Off())
530		x.Aux = symToAux(sym)
531		x.AddArg3(ptr, idx, mem)
532		v.AddArg2(x, x)
533		return true
534	}
535	// match: (CMPQconstloadidx8 {sym} [vo] ptr idx mem)
536	// cond: vo.Val() != 0
537	// result: (CMPQconst (MOVQloadidx8 {sym} [vo.Off()] ptr idx mem) [vo.Val()])
538	for {
539		vo := auxIntToValAndOff(v.AuxInt)
540		sym := auxToSym(v.Aux)
541		ptr := v_0
542		idx := v_1
543		mem := v_2
544		if !(vo.Val() != 0) {
545			break
546		}
547		v.reset(OpAMD64CMPQconst)
548		v.AuxInt = int32ToAuxInt(vo.Val())
549		v0 := b.NewValue0(v.Pos, OpAMD64MOVQloadidx8, typ.UInt64)
550		v0.AuxInt = int32ToAuxInt(vo.Off())
551		v0.Aux = symToAux(sym)
552		v0.AddArg3(ptr, idx, mem)
553		v.AddArg(v0)
554		return true
555	}
556	return false
557}
558func rewriteValueAMD64splitload_OpAMD64CMPQload(v *Value) bool {
559	v_2 := v.Args[2]
560	v_1 := v.Args[1]
561	v_0 := v.Args[0]
562	b := v.Block
563	typ := &b.Func.Config.Types
564	// match: (CMPQload {sym} [off] ptr x mem)
565	// result: (CMPQ (MOVQload {sym} [off] ptr mem) x)
566	for {
567		off := auxIntToInt32(v.AuxInt)
568		sym := auxToSym(v.Aux)
569		ptr := v_0
570		x := v_1
571		mem := v_2
572		v.reset(OpAMD64CMPQ)
573		v0 := b.NewValue0(v.Pos, OpAMD64MOVQload, typ.UInt64)
574		v0.AuxInt = int32ToAuxInt(off)
575		v0.Aux = symToAux(sym)
576		v0.AddArg2(ptr, mem)
577		v.AddArg2(v0, x)
578		return true
579	}
580}
581func rewriteValueAMD64splitload_OpAMD64CMPQloadidx1(v *Value) bool {
582	v_3 := v.Args[3]
583	v_2 := v.Args[2]
584	v_1 := v.Args[1]
585	v_0 := v.Args[0]
586	b := v.Block
587	typ := &b.Func.Config.Types
588	// match: (CMPQloadidx1 {sym} [off] ptr idx x mem)
589	// result: (CMPQ (MOVQloadidx1 {sym} [off] ptr idx mem) x)
590	for {
591		off := auxIntToInt32(v.AuxInt)
592		sym := auxToSym(v.Aux)
593		ptr := v_0
594		idx := v_1
595		x := v_2
596		mem := v_3
597		v.reset(OpAMD64CMPQ)
598		v0 := b.NewValue0(v.Pos, OpAMD64MOVQloadidx1, typ.UInt64)
599		v0.AuxInt = int32ToAuxInt(off)
600		v0.Aux = symToAux(sym)
601		v0.AddArg3(ptr, idx, mem)
602		v.AddArg2(v0, x)
603		return true
604	}
605}
606func rewriteValueAMD64splitload_OpAMD64CMPQloadidx8(v *Value) bool {
607	v_3 := v.Args[3]
608	v_2 := v.Args[2]
609	v_1 := v.Args[1]
610	v_0 := v.Args[0]
611	b := v.Block
612	typ := &b.Func.Config.Types
613	// match: (CMPQloadidx8 {sym} [off] ptr idx x mem)
614	// result: (CMPQ (MOVQloadidx8 {sym} [off] ptr idx mem) x)
615	for {
616		off := auxIntToInt32(v.AuxInt)
617		sym := auxToSym(v.Aux)
618		ptr := v_0
619		idx := v_1
620		x := v_2
621		mem := v_3
622		v.reset(OpAMD64CMPQ)
623		v0 := b.NewValue0(v.Pos, OpAMD64MOVQloadidx8, typ.UInt64)
624		v0.AuxInt = int32ToAuxInt(off)
625		v0.Aux = symToAux(sym)
626		v0.AddArg3(ptr, idx, mem)
627		v.AddArg2(v0, x)
628		return true
629	}
630}
631func rewriteValueAMD64splitload_OpAMD64CMPWconstload(v *Value) bool {
632	v_1 := v.Args[1]
633	v_0 := v.Args[0]
634	b := v.Block
635	typ := &b.Func.Config.Types
636	// match: (CMPWconstload {sym} [vo] ptr mem)
637	// cond: vo.Val() == 0
638	// result: (TESTW x:(MOVWload {sym} [vo.Off()] ptr mem) x)
639	for {
640		vo := auxIntToValAndOff(v.AuxInt)
641		sym := auxToSym(v.Aux)
642		ptr := v_0
643		mem := v_1
644		if !(vo.Val() == 0) {
645			break
646		}
647		v.reset(OpAMD64TESTW)
648		x := b.NewValue0(v.Pos, OpAMD64MOVWload, typ.UInt16)
649		x.AuxInt = int32ToAuxInt(vo.Off())
650		x.Aux = symToAux(sym)
651		x.AddArg2(ptr, mem)
652		v.AddArg2(x, x)
653		return true
654	}
655	// match: (CMPWconstload {sym} [vo] ptr mem)
656	// cond: vo.Val() != 0
657	// result: (CMPWconst (MOVWload {sym} [vo.Off()] ptr mem) [vo.Val16()])
658	for {
659		vo := auxIntToValAndOff(v.AuxInt)
660		sym := auxToSym(v.Aux)
661		ptr := v_0
662		mem := v_1
663		if !(vo.Val() != 0) {
664			break
665		}
666		v.reset(OpAMD64CMPWconst)
667		v.AuxInt = int16ToAuxInt(vo.Val16())
668		v0 := b.NewValue0(v.Pos, OpAMD64MOVWload, typ.UInt16)
669		v0.AuxInt = int32ToAuxInt(vo.Off())
670		v0.Aux = symToAux(sym)
671		v0.AddArg2(ptr, mem)
672		v.AddArg(v0)
673		return true
674	}
675	return false
676}
677func rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx1(v *Value) bool {
678	v_2 := v.Args[2]
679	v_1 := v.Args[1]
680	v_0 := v.Args[0]
681	b := v.Block
682	typ := &b.Func.Config.Types
683	// match: (CMPWconstloadidx1 {sym} [vo] ptr idx mem)
684	// cond: vo.Val() == 0
685	// result: (TESTW x:(MOVWloadidx1 {sym} [vo.Off()] ptr idx mem) x)
686	for {
687		vo := auxIntToValAndOff(v.AuxInt)
688		sym := auxToSym(v.Aux)
689		ptr := v_0
690		idx := v_1
691		mem := v_2
692		if !(vo.Val() == 0) {
693			break
694		}
695		v.reset(OpAMD64TESTW)
696		x := b.NewValue0(v.Pos, OpAMD64MOVWloadidx1, typ.UInt16)
697		x.AuxInt = int32ToAuxInt(vo.Off())
698		x.Aux = symToAux(sym)
699		x.AddArg3(ptr, idx, mem)
700		v.AddArg2(x, x)
701		return true
702	}
703	// match: (CMPWconstloadidx1 {sym} [vo] ptr idx mem)
704	// cond: vo.Val() != 0
705	// result: (CMPWconst (MOVWloadidx1 {sym} [vo.Off()] ptr idx mem) [vo.Val16()])
706	for {
707		vo := auxIntToValAndOff(v.AuxInt)
708		sym := auxToSym(v.Aux)
709		ptr := v_0
710		idx := v_1
711		mem := v_2
712		if !(vo.Val() != 0) {
713			break
714		}
715		v.reset(OpAMD64CMPWconst)
716		v.AuxInt = int16ToAuxInt(vo.Val16())
717		v0 := b.NewValue0(v.Pos, OpAMD64MOVWloadidx1, typ.UInt16)
718		v0.AuxInt = int32ToAuxInt(vo.Off())
719		v0.Aux = symToAux(sym)
720		v0.AddArg3(ptr, idx, mem)
721		v.AddArg(v0)
722		return true
723	}
724	return false
725}
726func rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx2(v *Value) bool {
727	v_2 := v.Args[2]
728	v_1 := v.Args[1]
729	v_0 := v.Args[0]
730	b := v.Block
731	typ := &b.Func.Config.Types
732	// match: (CMPWconstloadidx2 {sym} [vo] ptr idx mem)
733	// cond: vo.Val() == 0
734	// result: (TESTW x:(MOVWloadidx2 {sym} [vo.Off()] ptr idx mem) x)
735	for {
736		vo := auxIntToValAndOff(v.AuxInt)
737		sym := auxToSym(v.Aux)
738		ptr := v_0
739		idx := v_1
740		mem := v_2
741		if !(vo.Val() == 0) {
742			break
743		}
744		v.reset(OpAMD64TESTW)
745		x := b.NewValue0(v.Pos, OpAMD64MOVWloadidx2, typ.UInt16)
746		x.AuxInt = int32ToAuxInt(vo.Off())
747		x.Aux = symToAux(sym)
748		x.AddArg3(ptr, idx, mem)
749		v.AddArg2(x, x)
750		return true
751	}
752	// match: (CMPWconstloadidx2 {sym} [vo] ptr idx mem)
753	// cond: vo.Val() != 0
754	// result: (CMPWconst (MOVWloadidx2 {sym} [vo.Off()] ptr idx mem) [vo.Val16()])
755	for {
756		vo := auxIntToValAndOff(v.AuxInt)
757		sym := auxToSym(v.Aux)
758		ptr := v_0
759		idx := v_1
760		mem := v_2
761		if !(vo.Val() != 0) {
762			break
763		}
764		v.reset(OpAMD64CMPWconst)
765		v.AuxInt = int16ToAuxInt(vo.Val16())
766		v0 := b.NewValue0(v.Pos, OpAMD64MOVWloadidx2, typ.UInt16)
767		v0.AuxInt = int32ToAuxInt(vo.Off())
768		v0.Aux = symToAux(sym)
769		v0.AddArg3(ptr, idx, mem)
770		v.AddArg(v0)
771		return true
772	}
773	return false
774}
775func rewriteValueAMD64splitload_OpAMD64CMPWload(v *Value) bool {
776	v_2 := v.Args[2]
777	v_1 := v.Args[1]
778	v_0 := v.Args[0]
779	b := v.Block
780	typ := &b.Func.Config.Types
781	// match: (CMPWload {sym} [off] ptr x mem)
782	// result: (CMPW (MOVWload {sym} [off] ptr mem) x)
783	for {
784		off := auxIntToInt32(v.AuxInt)
785		sym := auxToSym(v.Aux)
786		ptr := v_0
787		x := v_1
788		mem := v_2
789		v.reset(OpAMD64CMPW)
790		v0 := b.NewValue0(v.Pos, OpAMD64MOVWload, typ.UInt16)
791		v0.AuxInt = int32ToAuxInt(off)
792		v0.Aux = symToAux(sym)
793		v0.AddArg2(ptr, mem)
794		v.AddArg2(v0, x)
795		return true
796	}
797}
798func rewriteValueAMD64splitload_OpAMD64CMPWloadidx1(v *Value) bool {
799	v_3 := v.Args[3]
800	v_2 := v.Args[2]
801	v_1 := v.Args[1]
802	v_0 := v.Args[0]
803	b := v.Block
804	typ := &b.Func.Config.Types
805	// match: (CMPWloadidx1 {sym} [off] ptr idx x mem)
806	// result: (CMPW (MOVWloadidx1 {sym} [off] ptr idx mem) x)
807	for {
808		off := auxIntToInt32(v.AuxInt)
809		sym := auxToSym(v.Aux)
810		ptr := v_0
811		idx := v_1
812		x := v_2
813		mem := v_3
814		v.reset(OpAMD64CMPW)
815		v0 := b.NewValue0(v.Pos, OpAMD64MOVWloadidx1, typ.UInt16)
816		v0.AuxInt = int32ToAuxInt(off)
817		v0.Aux = symToAux(sym)
818		v0.AddArg3(ptr, idx, mem)
819		v.AddArg2(v0, x)
820		return true
821	}
822}
823func rewriteValueAMD64splitload_OpAMD64CMPWloadidx2(v *Value) bool {
824	v_3 := v.Args[3]
825	v_2 := v.Args[2]
826	v_1 := v.Args[1]
827	v_0 := v.Args[0]
828	b := v.Block
829	typ := &b.Func.Config.Types
830	// match: (CMPWloadidx2 {sym} [off] ptr idx x mem)
831	// result: (CMPW (MOVWloadidx2 {sym} [off] ptr idx mem) x)
832	for {
833		off := auxIntToInt32(v.AuxInt)
834		sym := auxToSym(v.Aux)
835		ptr := v_0
836		idx := v_1
837		x := v_2
838		mem := v_3
839		v.reset(OpAMD64CMPW)
840		v0 := b.NewValue0(v.Pos, OpAMD64MOVWloadidx2, typ.UInt16)
841		v0.AuxInt = int32ToAuxInt(off)
842		v0.Aux = symToAux(sym)
843		v0.AddArg3(ptr, idx, mem)
844		v.AddArg2(v0, x)
845		return true
846	}
847}
848func rewriteBlockAMD64splitload(b *Block) bool {
849	return false
850}
851