1 /**************************************************************************
2 *
3 * Copyright (C) 1999-2005 Brian Paul All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included
13 * in all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
16 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 **************************************************************************/
24
25 #include "util/detect.h"
26 #include "util/u_cpu_detect.h"
27
28 #if DETECT_ARCH_X86 || DETECT_ARCH_X86_64
29
30 #include "util/compiler.h"
31 #include "util/u_debug.h"
32 #include "util/u_pointer.h"
33
34 #include "rtasm_execmem.h"
35 #include "rtasm_x86sse.h"
36
37 #define DISASSEM 0
38 #define X86_TWOB 0x0f
39
40
41 #define DUMP_SSE 0
42
43
x86_print_reg(struct x86_reg reg)44 void x86_print_reg( struct x86_reg reg )
45 {
46 if (reg.mod != mod_REG)
47 debug_printf( "[" );
48
49 switch( reg.file ) {
50 case file_REG32:
51 switch( reg.idx ) {
52 case reg_AX: debug_printf( "EAX" ); break;
53 case reg_CX: debug_printf( "ECX" ); break;
54 case reg_DX: debug_printf( "EDX" ); break;
55 case reg_BX: debug_printf( "EBX" ); break;
56 case reg_SP: debug_printf( "ESP" ); break;
57 case reg_BP: debug_printf( "EBP" ); break;
58 case reg_SI: debug_printf( "ESI" ); break;
59 case reg_DI: debug_printf( "EDI" ); break;
60 }
61 break;
62 case file_MMX:
63 debug_printf( "MMX%u", reg.idx );
64 break;
65 case file_XMM:
66 debug_printf( "XMM%u", reg.idx );
67 break;
68 case file_x87:
69 debug_printf( "fp%u", reg.idx );
70 break;
71 }
72
73 if (reg.mod == mod_DISP8 ||
74 reg.mod == mod_DISP32)
75 debug_printf("+%d", reg.disp);
76
77 if (reg.mod != mod_REG)
78 debug_printf( "]" );
79 }
80
81 #if DUMP_SSE
82
83 #define DUMP_START() debug_printf( "\n" )
84 #define DUMP_END() debug_printf( "\n" )
85
86 #define DUMP() do { \
87 const char *foo = __func__; \
88 while (*foo && *foo != '_') \
89 foo++; \
90 if (*foo) \
91 foo++; \
92 debug_printf( "\n%4tx %14s ", p->csr - p->store, foo ); \
93 } while (0)
94
95 #define DUMP_I( I ) do { \
96 DUMP(); \
97 debug_printf( "%u", I ); \
98 } while( 0 )
99
100 #define DUMP_R( R0 ) do { \
101 DUMP(); \
102 x86_print_reg( R0 ); \
103 } while( 0 )
104
105 #define DUMP_RR( R0, R1 ) do { \
106 DUMP(); \
107 x86_print_reg( R0 ); \
108 debug_printf( ", " ); \
109 x86_print_reg( R1 ); \
110 } while( 0 )
111
112 #define DUMP_RI( R0, I ) do { \
113 DUMP(); \
114 x86_print_reg( R0 ); \
115 debug_printf( ", %u", I ); \
116 } while( 0 )
117
118 #define DUMP_RRI( R0, R1, I ) do { \
119 DUMP(); \
120 x86_print_reg( R0 ); \
121 debug_printf( ", " ); \
122 x86_print_reg( R1 ); \
123 debug_printf( ", %u", I ); \
124 } while( 0 )
125
126 #else
127
128 #define DUMP_START()
129 #define DUMP_END()
130 #define DUMP( )
131 #define DUMP_I( I )
132 #define DUMP_R( R0 )
133 #define DUMP_RR( R0, R1 )
134 #define DUMP_RI( R0, I )
135 #define DUMP_RRI( R0, R1, I )
136
137 #endif
138
139
do_realloc(struct x86_function * p)140 static void do_realloc( struct x86_function *p )
141 {
142 if (p->store == p->error_overflow) {
143 p->csr = p->store;
144 }
145 else if (p->size == 0) {
146 p->size = 1024;
147 p->store = rtasm_exec_malloc(p->size);
148 p->csr = p->store;
149 }
150 else {
151 uintptr_t used = pointer_to_uintptr( p->csr ) - pointer_to_uintptr( p->store );
152 unsigned char *tmp = p->store;
153 p->size *= 2;
154 p->store = rtasm_exec_malloc(p->size);
155
156 if (p->store) {
157 memcpy(p->store, tmp, used);
158 p->csr = p->store + used;
159 }
160 else {
161 p->csr = p->store;
162 }
163
164 rtasm_exec_free(tmp);
165 }
166
167 if (p->store == NULL) {
168 p->store = p->csr = p->error_overflow;
169 p->size = sizeof(p->error_overflow);
170 }
171 }
172
173 /* Emit bytes to the instruction stream:
174 */
reserve(struct x86_function * p,int bytes)175 static unsigned char *reserve( struct x86_function *p, int bytes )
176 {
177 if (p->csr - p->store + bytes > (int) p->size)
178 do_realloc(p);
179
180 {
181 unsigned char *csr = p->csr;
182 p->csr += bytes;
183 return csr;
184 }
185 }
186
187
188
emit_1b(struct x86_function * p,char b0)189 static void emit_1b( struct x86_function *p, char b0 )
190 {
191 char *csr = (char *)reserve(p, 1);
192 *csr = b0;
193 }
194
emit_1i(struct x86_function * p,int i0)195 static void emit_1i( struct x86_function *p, int i0 )
196 {
197 unsigned char *csr = reserve(p, sizeof(i0));
198 memcpy(csr, &i0, sizeof(i0));
199 }
200
emit_1ub(struct x86_function * p,unsigned char b0)201 static void emit_1ub( struct x86_function *p, unsigned char b0 )
202 {
203 unsigned char *csr = reserve(p, 1);
204 *csr++ = b0;
205 }
206
emit_2ub(struct x86_function * p,unsigned char b0,unsigned char b1)207 static void emit_2ub( struct x86_function *p, unsigned char b0, unsigned char b1 )
208 {
209 unsigned char *csr = reserve(p, 2);
210 *csr++ = b0;
211 *csr++ = b1;
212 }
213
emit_3ub(struct x86_function * p,unsigned char b0,unsigned char b1,unsigned char b2)214 static void emit_3ub( struct x86_function *p, unsigned char b0, unsigned char b1, unsigned char b2 )
215 {
216 unsigned char *csr = reserve(p, 3);
217 *csr++ = b0;
218 *csr++ = b1;
219 *csr++ = b2;
220 }
221
222
223 /* Build a modRM byte + possible displacement. No treatment of SIB
224 * indexing. BZZT - no way to encode an absolute address.
225 *
226 * This is the "/r" field in the x86 manuals...
227 */
emit_modrm(struct x86_function * p,struct x86_reg reg,struct x86_reg regmem)228 static void emit_modrm( struct x86_function *p,
229 struct x86_reg reg,
230 struct x86_reg regmem )
231 {
232 unsigned char val = 0;
233
234 assert(reg.mod == mod_REG);
235
236 /* TODO: support extended x86-64 registers */
237 assert(reg.idx < 8);
238 assert(regmem.idx < 8);
239
240 val |= regmem.mod << 6; /* mod field */
241 val |= reg.idx << 3; /* reg field */
242 val |= regmem.idx; /* r/m field */
243
244 emit_1ub(p, val);
245
246 /* Oh-oh we've stumbled into the SIB thing.
247 */
248 if (regmem.file == file_REG32 &&
249 regmem.idx == reg_SP &&
250 regmem.mod != mod_REG) {
251 emit_1ub(p, 0x24); /* simplistic! */
252 }
253
254 switch (regmem.mod) {
255 case mod_REG:
256 case mod_INDIRECT:
257 break;
258 case mod_DISP8:
259 emit_1b(p, (char) regmem.disp);
260 break;
261 case mod_DISP32:
262 emit_1i(p, regmem.disp);
263 break;
264 default:
265 assert(0);
266 break;
267 }
268 }
269
270 /* Emits the "/0".."/7" specialized versions of the modrm ("/r") bytes.
271 */
emit_modrm_noreg(struct x86_function * p,unsigned op,struct x86_reg regmem)272 static void emit_modrm_noreg( struct x86_function *p,
273 unsigned op,
274 struct x86_reg regmem )
275 {
276 struct x86_reg dummy = x86_make_reg(file_REG32, op);
277 emit_modrm(p, dummy, regmem);
278 }
279
280 /* Many x86 instructions have two opcodes to cope with the situations
281 * where the destination is a register or memory reference
282 * respectively. This function selects the correct opcode based on
283 * the arguments presented.
284 */
emit_op_modrm(struct x86_function * p,unsigned char op_dst_is_reg,unsigned char op_dst_is_mem,struct x86_reg dst,struct x86_reg src)285 static void emit_op_modrm( struct x86_function *p,
286 unsigned char op_dst_is_reg,
287 unsigned char op_dst_is_mem,
288 struct x86_reg dst,
289 struct x86_reg src )
290 {
291 switch (dst.mod) {
292 case mod_REG:
293 emit_1ub(p, op_dst_is_reg);
294 emit_modrm(p, dst, src);
295 break;
296 case mod_INDIRECT:
297 case mod_DISP32:
298 case mod_DISP8:
299 assert(src.mod == mod_REG);
300 emit_1ub(p, op_dst_is_mem);
301 emit_modrm(p, src, dst);
302 break;
303 default:
304 assert(0);
305 break;
306 }
307 }
308
309
310
311
312
313
314
315 /* Create and manipulate registers and regmem values:
316 */
x86_make_reg(enum x86_reg_file file,enum x86_reg_name idx)317 struct x86_reg x86_make_reg( enum x86_reg_file file,
318 enum x86_reg_name idx )
319 {
320 struct x86_reg reg;
321
322 reg.file = file;
323 reg.idx = idx;
324 reg.mod = mod_REG;
325 reg.disp = 0;
326
327 return reg;
328 }
329
x86_make_disp(struct x86_reg reg,int disp)330 struct x86_reg x86_make_disp( struct x86_reg reg,
331 int disp )
332 {
333 assert(reg.file == file_REG32);
334
335 if (reg.mod == mod_REG)
336 reg.disp = disp;
337 else
338 reg.disp += disp;
339
340 if (reg.disp == 0 && reg.idx != reg_BP)
341 reg.mod = mod_INDIRECT;
342 else if (reg.disp <= 127 && reg.disp >= -128)
343 reg.mod = mod_DISP8;
344 else
345 reg.mod = mod_DISP32;
346
347 return reg;
348 }
349
x86_deref(struct x86_reg reg)350 struct x86_reg x86_deref( struct x86_reg reg )
351 {
352 return x86_make_disp(reg, 0);
353 }
354
x86_get_base_reg(struct x86_reg reg)355 struct x86_reg x86_get_base_reg( struct x86_reg reg )
356 {
357 return x86_make_reg( reg.file, reg.idx );
358 }
359
x86_get_label(struct x86_function * p)360 int x86_get_label( struct x86_function *p )
361 {
362 return p->csr - p->store;
363 }
364
365
366
367 /***********************************************************************
368 * x86 instructions
369 */
370
371
x64_rexw(struct x86_function * p)372 void x64_rexw(struct x86_function *p)
373 {
374 if(x86_target(p) != X86_32)
375 emit_1ub(p, 0x48);
376 }
377
x86_jcc(struct x86_function * p,enum x86_cc cc,int label)378 void x86_jcc( struct x86_function *p,
379 enum x86_cc cc,
380 int label )
381 {
382 int offset = label - (x86_get_label(p) + 2);
383 DUMP_I(cc);
384
385 if (offset < 0) {
386 /*assert(p->csr - p->store > -offset);*/
387 if (p->csr - p->store <= -offset) {
388 /* probably out of memory (using the error_overflow buffer) */
389 return;
390 }
391 }
392
393 if (offset <= 127 && offset >= -128) {
394 emit_1ub(p, 0x70 + cc);
395 emit_1b(p, (char) offset);
396 }
397 else {
398 offset = label - (x86_get_label(p) + 6);
399 emit_2ub(p, 0x0f, 0x80 + cc);
400 emit_1i(p, offset);
401 }
402 }
403
404 /* Always use a 32bit offset for forward jumps:
405 */
x86_jcc_forward(struct x86_function * p,enum x86_cc cc)406 int x86_jcc_forward( struct x86_function *p,
407 enum x86_cc cc )
408 {
409 DUMP_I(cc);
410 emit_2ub(p, 0x0f, 0x80 + cc);
411 emit_1i(p, 0);
412 return x86_get_label(p);
413 }
414
x86_jmp_forward(struct x86_function * p)415 int x86_jmp_forward( struct x86_function *p)
416 {
417 DUMP();
418 emit_1ub(p, 0xe9);
419 emit_1i(p, 0);
420 return x86_get_label(p);
421 }
422
x86_call_forward(struct x86_function * p)423 int x86_call_forward( struct x86_function *p)
424 {
425 DUMP();
426
427 emit_1ub(p, 0xe8);
428 emit_1i(p, 0);
429 return x86_get_label(p);
430 }
431
432 /* Fixup offset from forward jump:
433 */
x86_fixup_fwd_jump(struct x86_function * p,int fixup)434 void x86_fixup_fwd_jump( struct x86_function *p,
435 int fixup )
436 {
437 int lblfixed = x86_get_label(p) - fixup;
438 memcpy(p->store + fixup - 4, &lblfixed, sizeof(lblfixed));
439 }
440
x86_jmp(struct x86_function * p,int label)441 void x86_jmp( struct x86_function *p, int label)
442 {
443 DUMP_I( label );
444 emit_1ub(p, 0xe9);
445 emit_1i(p, label - x86_get_label(p) - 4);
446 }
447
x86_call(struct x86_function * p,struct x86_reg reg)448 void x86_call( struct x86_function *p, struct x86_reg reg)
449 {
450 DUMP_R( reg );
451 emit_1ub(p, 0xff);
452 emit_modrm_noreg(p, 2, reg);
453 }
454
455
x86_mov_reg_imm(struct x86_function * p,struct x86_reg dst,int imm)456 void x86_mov_reg_imm( struct x86_function *p, struct x86_reg dst, int imm )
457 {
458 DUMP_RI( dst, imm );
459 assert(dst.file == file_REG32);
460 assert(dst.mod == mod_REG);
461 emit_1ub(p, 0xb8 + dst.idx);
462 emit_1i(p, imm);
463 }
464
x86_mov_imm(struct x86_function * p,struct x86_reg dst,int imm)465 void x86_mov_imm( struct x86_function *p, struct x86_reg dst, int imm )
466 {
467 DUMP_RI( dst, imm );
468 if(dst.mod == mod_REG)
469 x86_mov_reg_imm(p, dst, imm);
470 else
471 {
472 emit_1ub(p, 0xc7);
473 emit_modrm_noreg(p, 0, dst);
474 emit_1i(p, imm);
475 }
476 }
477
x86_mov16_imm(struct x86_function * p,struct x86_reg dst,uint16_t imm)478 void x86_mov16_imm( struct x86_function *p, struct x86_reg dst, uint16_t imm )
479 {
480 DUMP_RI( dst, imm );
481 emit_1ub(p, 0x66);
482 if(dst.mod == mod_REG)
483 {
484 emit_1ub(p, 0xb8 + dst.idx);
485 emit_2ub(p, imm & 0xff, imm >> 8);
486 }
487 else
488 {
489 emit_1ub(p, 0xc7);
490 emit_modrm_noreg(p, 0, dst);
491 emit_2ub(p, imm & 0xff, imm >> 8);
492 }
493 }
494
x86_mov8_imm(struct x86_function * p,struct x86_reg dst,uint8_t imm)495 void x86_mov8_imm( struct x86_function *p, struct x86_reg dst, uint8_t imm )
496 {
497 DUMP_RI( dst, imm );
498 if(dst.mod == mod_REG)
499 {
500 emit_1ub(p, 0xb0 + dst.idx);
501 emit_1ub(p, imm);
502 }
503 else
504 {
505 emit_1ub(p, 0xc6);
506 emit_modrm_noreg(p, 0, dst);
507 emit_1ub(p, imm);
508 }
509 }
510
511 /**
512 * Immediate group 1 instructions.
513 */
514 static inline void
x86_group1_imm(struct x86_function * p,unsigned op,struct x86_reg dst,int imm)515 x86_group1_imm( struct x86_function *p,
516 unsigned op, struct x86_reg dst, int imm )
517 {
518 assert(dst.file == file_REG32);
519 assert(dst.mod == mod_REG);
520 if(-0x80 <= imm && imm < 0x80) {
521 emit_1ub(p, 0x83);
522 emit_modrm_noreg(p, op, dst);
523 emit_1b(p, (char)imm);
524 }
525 else {
526 emit_1ub(p, 0x81);
527 emit_modrm_noreg(p, op, dst);
528 emit_1i(p, imm);
529 }
530 }
531
x86_add_imm(struct x86_function * p,struct x86_reg dst,int imm)532 void x86_add_imm( struct x86_function *p, struct x86_reg dst, int imm )
533 {
534 DUMP_RI( dst, imm );
535 x86_group1_imm(p, 0, dst, imm);
536 }
537
x86_or_imm(struct x86_function * p,struct x86_reg dst,int imm)538 void x86_or_imm( struct x86_function *p, struct x86_reg dst, int imm )
539 {
540 DUMP_RI( dst, imm );
541 x86_group1_imm(p, 1, dst, imm);
542 }
543
x86_and_imm(struct x86_function * p,struct x86_reg dst,int imm)544 void x86_and_imm( struct x86_function *p, struct x86_reg dst, int imm )
545 {
546 DUMP_RI( dst, imm );
547 x86_group1_imm(p, 4, dst, imm);
548 }
549
x86_sub_imm(struct x86_function * p,struct x86_reg dst,int imm)550 void x86_sub_imm( struct x86_function *p, struct x86_reg dst, int imm )
551 {
552 DUMP_RI( dst, imm );
553 x86_group1_imm(p, 5, dst, imm);
554 }
555
x86_xor_imm(struct x86_function * p,struct x86_reg dst,int imm)556 void x86_xor_imm( struct x86_function *p, struct x86_reg dst, int imm )
557 {
558 DUMP_RI( dst, imm );
559 x86_group1_imm(p, 6, dst, imm);
560 }
561
x86_cmp_imm(struct x86_function * p,struct x86_reg dst,int imm)562 void x86_cmp_imm( struct x86_function *p, struct x86_reg dst, int imm )
563 {
564 DUMP_RI( dst, imm );
565 x86_group1_imm(p, 7, dst, imm);
566 }
567
568
x86_push(struct x86_function * p,struct x86_reg reg)569 void x86_push( struct x86_function *p,
570 struct x86_reg reg )
571 {
572 DUMP_R( reg );
573 if (reg.mod == mod_REG)
574 emit_1ub(p, 0x50 + reg.idx);
575 else
576 {
577 emit_1ub(p, 0xff);
578 emit_modrm_noreg(p, 6, reg);
579 }
580
581
582 p->stack_offset += sizeof(void*);
583 }
584
x86_push_imm32(struct x86_function * p,int imm32)585 void x86_push_imm32( struct x86_function *p,
586 int imm32 )
587 {
588 DUMP_I( imm32 );
589 emit_1ub(p, 0x68);
590 emit_1i(p, imm32);
591
592 p->stack_offset += sizeof(void*);
593 }
594
595
x86_pop(struct x86_function * p,struct x86_reg reg)596 void x86_pop( struct x86_function *p,
597 struct x86_reg reg )
598 {
599 DUMP_R( reg );
600 assert(reg.mod == mod_REG);
601 emit_1ub(p, 0x58 + reg.idx);
602 p->stack_offset -= sizeof(void*);
603 }
604
x86_inc(struct x86_function * p,struct x86_reg reg)605 void x86_inc( struct x86_function *p,
606 struct x86_reg reg )
607 {
608 DUMP_R( reg );
609 if(x86_target(p) == X86_32 && reg.mod == mod_REG)
610 {
611 emit_1ub(p, 0x40 + reg.idx);
612 return;
613 }
614 emit_1ub(p, 0xff);
615 emit_modrm_noreg(p, 0, reg);
616 }
617
x86_dec(struct x86_function * p,struct x86_reg reg)618 void x86_dec( struct x86_function *p,
619 struct x86_reg reg )
620 {
621 DUMP_R( reg );
622 if(x86_target(p) == X86_32 && reg.mod == mod_REG)
623 {
624 emit_1ub(p, 0x48 + reg.idx);
625 return;
626 }
627 emit_1ub(p, 0xff);
628 emit_modrm_noreg(p, 1, reg);
629 }
630
x86_ret(struct x86_function * p)631 void x86_ret( struct x86_function *p )
632 {
633 DUMP();
634 assert(p->stack_offset == 0);
635 emit_1ub(p, 0xc3);
636 }
637
x86_retw(struct x86_function * p,unsigned short imm)638 void x86_retw( struct x86_function *p, unsigned short imm )
639 {
640 DUMP();
641 emit_3ub(p, 0xc2, imm & 0xff, (imm >> 8) & 0xff);
642 }
643
x86_sahf(struct x86_function * p)644 void x86_sahf( struct x86_function *p )
645 {
646 DUMP();
647 emit_1ub(p, 0x9e);
648 }
649
x86_mov(struct x86_function * p,struct x86_reg dst,struct x86_reg src)650 void x86_mov( struct x86_function *p,
651 struct x86_reg dst,
652 struct x86_reg src )
653 {
654 DUMP_RR( dst, src );
655 /* special hack for reading arguments until we support x86-64 registers everywhere */
656 if(src.mod == mod_REG && dst.mod == mod_REG && (src.idx >= 8 || dst.idx >= 8))
657 {
658 uint8_t rex = 0x40;
659 if(dst.idx >= 8)
660 {
661 rex |= 4;
662 dst.idx -= 8;
663 }
664 if(src.idx >= 8)
665 {
666 rex |= 1;
667 src.idx -= 8;
668 }
669 emit_1ub(p, rex);
670 }
671 emit_op_modrm( p, 0x8b, 0x89, dst, src );
672 }
673
x86_mov16(struct x86_function * p,struct x86_reg dst,struct x86_reg src)674 void x86_mov16( struct x86_function *p,
675 struct x86_reg dst,
676 struct x86_reg src )
677 {
678 DUMP_RR( dst, src );
679 emit_1ub(p, 0x66);
680 emit_op_modrm( p, 0x8b, 0x89, dst, src );
681 }
682
x86_mov8(struct x86_function * p,struct x86_reg dst,struct x86_reg src)683 void x86_mov8( struct x86_function *p,
684 struct x86_reg dst,
685 struct x86_reg src )
686 {
687 DUMP_RR( dst, src );
688 emit_op_modrm( p, 0x8a, 0x88, dst, src );
689 }
690
x64_mov64(struct x86_function * p,struct x86_reg dst,struct x86_reg src)691 void x64_mov64( struct x86_function *p,
692 struct x86_reg dst,
693 struct x86_reg src )
694 {
695 uint8_t rex = 0x48;
696 DUMP_RR( dst, src );
697 assert(x86_target(p) != X86_32);
698
699 /* special hack for reading arguments until we support x86-64 registers everywhere */
700 if(src.mod == mod_REG && dst.mod == mod_REG && (src.idx >= 8 || dst.idx >= 8))
701 {
702 if(dst.idx >= 8)
703 {
704 rex |= 4;
705 dst.idx -= 8;
706 }
707 if(src.idx >= 8)
708 {
709 rex |= 1;
710 src.idx -= 8;
711 }
712 }
713 emit_1ub(p, rex);
714 emit_op_modrm( p, 0x8b, 0x89, dst, src );
715 }
716
x86_movzx8(struct x86_function * p,struct x86_reg dst,struct x86_reg src)717 void x86_movzx8(struct x86_function *p, struct x86_reg dst, struct x86_reg src )
718 {
719 DUMP_RR( dst, src );
720 emit_2ub(p, 0x0f, 0xb6);
721 emit_modrm(p, dst, src);
722 }
723
x86_movzx16(struct x86_function * p,struct x86_reg dst,struct x86_reg src)724 void x86_movzx16(struct x86_function *p, struct x86_reg dst, struct x86_reg src )
725 {
726 DUMP_RR( dst, src );
727 emit_2ub(p, 0x0f, 0xb7);
728 emit_modrm(p, dst, src);
729 }
730
x86_cmovcc(struct x86_function * p,struct x86_reg dst,struct x86_reg src,enum x86_cc cc)731 void x86_cmovcc( struct x86_function *p,
732 struct x86_reg dst,
733 struct x86_reg src,
734 enum x86_cc cc)
735 {
736 DUMP_RRI( dst, src, cc );
737 emit_2ub( p, 0x0f, 0x40 + cc );
738 emit_modrm( p, dst, src );
739 }
740
x86_xor(struct x86_function * p,struct x86_reg dst,struct x86_reg src)741 void x86_xor( struct x86_function *p,
742 struct x86_reg dst,
743 struct x86_reg src )
744 {
745 DUMP_RR( dst, src );
746 emit_op_modrm( p, 0x33, 0x31, dst, src );
747 }
748
x86_cmp(struct x86_function * p,struct x86_reg dst,struct x86_reg src)749 void x86_cmp( struct x86_function *p,
750 struct x86_reg dst,
751 struct x86_reg src )
752 {
753 DUMP_RR( dst, src );
754 emit_op_modrm( p, 0x3b, 0x39, dst, src );
755 }
756
x86_lea(struct x86_function * p,struct x86_reg dst,struct x86_reg src)757 void x86_lea( struct x86_function *p,
758 struct x86_reg dst,
759 struct x86_reg src )
760 {
761 DUMP_RR( dst, src );
762 emit_1ub(p, 0x8d);
763 emit_modrm( p, dst, src );
764 }
765
x86_test(struct x86_function * p,struct x86_reg dst,struct x86_reg src)766 void x86_test( struct x86_function *p,
767 struct x86_reg dst,
768 struct x86_reg src )
769 {
770 DUMP_RR( dst, src );
771 emit_1ub(p, 0x85);
772 emit_modrm( p, dst, src );
773 }
774
x86_add(struct x86_function * p,struct x86_reg dst,struct x86_reg src)775 void x86_add( struct x86_function *p,
776 struct x86_reg dst,
777 struct x86_reg src )
778 {
779 DUMP_RR( dst, src );
780 emit_op_modrm(p, 0x03, 0x01, dst, src );
781 }
782
783 /* Calculate EAX * src, results in EDX:EAX.
784 */
x86_mul(struct x86_function * p,struct x86_reg src)785 void x86_mul( struct x86_function *p,
786 struct x86_reg src )
787 {
788 DUMP_R( src );
789 emit_1ub(p, 0xf7);
790 emit_modrm_noreg(p, 4, src );
791 }
792
793
x86_imul(struct x86_function * p,struct x86_reg dst,struct x86_reg src)794 void x86_imul( struct x86_function *p,
795 struct x86_reg dst,
796 struct x86_reg src )
797 {
798 DUMP_RR( dst, src );
799 emit_2ub(p, X86_TWOB, 0xAF);
800 emit_modrm(p, dst, src);
801 }
802
803
x86_sub(struct x86_function * p,struct x86_reg dst,struct x86_reg src)804 void x86_sub( struct x86_function *p,
805 struct x86_reg dst,
806 struct x86_reg src )
807 {
808 DUMP_RR( dst, src );
809 emit_op_modrm(p, 0x2b, 0x29, dst, src );
810 }
811
x86_or(struct x86_function * p,struct x86_reg dst,struct x86_reg src)812 void x86_or( struct x86_function *p,
813 struct x86_reg dst,
814 struct x86_reg src )
815 {
816 DUMP_RR( dst, src );
817 emit_op_modrm( p, 0x0b, 0x09, dst, src );
818 }
819
x86_and(struct x86_function * p,struct x86_reg dst,struct x86_reg src)820 void x86_and( struct x86_function *p,
821 struct x86_reg dst,
822 struct x86_reg src )
823 {
824 DUMP_RR( dst, src );
825 emit_op_modrm( p, 0x23, 0x21, dst, src );
826 }
827
x86_div(struct x86_function * p,struct x86_reg src)828 void x86_div( struct x86_function *p,
829 struct x86_reg src )
830 {
831 assert(src.file == file_REG32 && src.mod == mod_REG);
832 emit_op_modrm(p, 0xf7, 0, x86_make_reg(file_REG32, 6), src);
833 }
834
x86_bswap(struct x86_function * p,struct x86_reg reg)835 void x86_bswap( struct x86_function *p, struct x86_reg reg )
836 {
837 DUMP_R(reg);
838 assert(reg.file == file_REG32);
839 assert(reg.mod == mod_REG);
840 emit_2ub(p, 0x0f, 0xc8 + reg.idx);
841 }
842
x86_shr_imm(struct x86_function * p,struct x86_reg reg,unsigned imm)843 void x86_shr_imm( struct x86_function *p, struct x86_reg reg, unsigned imm )
844 {
845 DUMP_RI(reg, imm);
846 if(imm == 1)
847 {
848 emit_1ub(p, 0xd1);
849 emit_modrm_noreg(p, 5, reg);
850 }
851 else
852 {
853 emit_1ub(p, 0xc1);
854 emit_modrm_noreg(p, 5, reg);
855 emit_1ub(p, imm);
856 }
857 }
858
x86_sar_imm(struct x86_function * p,struct x86_reg reg,unsigned imm)859 void x86_sar_imm( struct x86_function *p, struct x86_reg reg, unsigned imm )
860 {
861 DUMP_RI(reg, imm);
862 if(imm == 1)
863 {
864 emit_1ub(p, 0xd1);
865 emit_modrm_noreg(p, 7, reg);
866 }
867 else
868 {
869 emit_1ub(p, 0xc1);
870 emit_modrm_noreg(p, 7, reg);
871 emit_1ub(p, imm);
872 }
873 }
874
x86_shl_imm(struct x86_function * p,struct x86_reg reg,unsigned imm)875 void x86_shl_imm( struct x86_function *p, struct x86_reg reg, unsigned imm )
876 {
877 DUMP_RI(reg, imm);
878 if(imm == 1)
879 {
880 emit_1ub(p, 0xd1);
881 emit_modrm_noreg(p, 4, reg);
882 }
883 else
884 {
885 emit_1ub(p, 0xc1);
886 emit_modrm_noreg(p, 4, reg);
887 emit_1ub(p, imm);
888 }
889 }
890
891
892 /***********************************************************************
893 * SSE instructions
894 */
895
sse_prefetchnta(struct x86_function * p,struct x86_reg ptr)896 void sse_prefetchnta( struct x86_function *p, struct x86_reg ptr)
897 {
898 DUMP_R( ptr );
899 assert(ptr.mod != mod_REG);
900 emit_2ub(p, 0x0f, 0x18);
901 emit_modrm_noreg(p, 0, ptr);
902 }
903
sse_prefetch0(struct x86_function * p,struct x86_reg ptr)904 void sse_prefetch0( struct x86_function *p, struct x86_reg ptr)
905 {
906 DUMP_R( ptr );
907 assert(ptr.mod != mod_REG);
908 emit_2ub(p, 0x0f, 0x18);
909 emit_modrm_noreg(p, 1, ptr);
910 }
911
sse_prefetch1(struct x86_function * p,struct x86_reg ptr)912 void sse_prefetch1( struct x86_function *p, struct x86_reg ptr)
913 {
914 DUMP_R( ptr );
915 assert(ptr.mod != mod_REG);
916 emit_2ub(p, 0x0f, 0x18);
917 emit_modrm_noreg(p, 2, ptr);
918 }
919
sse_movntps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)920 void sse_movntps( struct x86_function *p,
921 struct x86_reg dst,
922 struct x86_reg src)
923 {
924 DUMP_RR( dst, src );
925
926 assert(dst.mod != mod_REG);
927 assert(src.mod == mod_REG);
928 emit_2ub(p, 0x0f, 0x2b);
929 emit_modrm(p, src, dst);
930 }
931
932
933
934
sse_movss(struct x86_function * p,struct x86_reg dst,struct x86_reg src)935 void sse_movss( struct x86_function *p,
936 struct x86_reg dst,
937 struct x86_reg src )
938 {
939 DUMP_RR( dst, src );
940 emit_2ub(p, 0xF3, X86_TWOB);
941 emit_op_modrm( p, 0x10, 0x11, dst, src );
942 }
943
sse_movaps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)944 void sse_movaps( struct x86_function *p,
945 struct x86_reg dst,
946 struct x86_reg src )
947 {
948 DUMP_RR( dst, src );
949 emit_1ub(p, X86_TWOB);
950 emit_op_modrm( p, 0x28, 0x29, dst, src );
951 }
952
sse_movups(struct x86_function * p,struct x86_reg dst,struct x86_reg src)953 void sse_movups( struct x86_function *p,
954 struct x86_reg dst,
955 struct x86_reg src )
956 {
957 DUMP_RR( dst, src );
958 emit_1ub(p, X86_TWOB);
959 emit_op_modrm( p, 0x10, 0x11, dst, src );
960 }
961
sse_movhps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)962 void sse_movhps( struct x86_function *p,
963 struct x86_reg dst,
964 struct x86_reg src )
965 {
966 DUMP_RR( dst, src );
967 assert(dst.mod != mod_REG || src.mod != mod_REG);
968 emit_1ub(p, X86_TWOB);
969 emit_op_modrm( p, 0x16, 0x17, dst, src ); /* cf movlhps */
970 }
971
sse_movlps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)972 void sse_movlps( struct x86_function *p,
973 struct x86_reg dst,
974 struct x86_reg src )
975 {
976 DUMP_RR( dst, src );
977 assert(dst.mod != mod_REG || src.mod != mod_REG);
978 emit_1ub(p, X86_TWOB);
979 emit_op_modrm( p, 0x12, 0x13, dst, src ); /* cf movhlps */
980 }
981
sse_maxps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)982 void sse_maxps( struct x86_function *p,
983 struct x86_reg dst,
984 struct x86_reg src )
985 {
986 DUMP_RR( dst, src );
987 emit_2ub(p, X86_TWOB, 0x5F);
988 emit_modrm( p, dst, src );
989 }
990
sse_maxss(struct x86_function * p,struct x86_reg dst,struct x86_reg src)991 void sse_maxss( struct x86_function *p,
992 struct x86_reg dst,
993 struct x86_reg src )
994 {
995 DUMP_RR( dst, src );
996 emit_3ub(p, 0xF3, X86_TWOB, 0x5F);
997 emit_modrm( p, dst, src );
998 }
999
sse_divss(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1000 void sse_divss( struct x86_function *p,
1001 struct x86_reg dst,
1002 struct x86_reg src )
1003 {
1004 DUMP_RR( dst, src );
1005 emit_3ub(p, 0xF3, X86_TWOB, 0x5E);
1006 emit_modrm( p, dst, src );
1007 }
1008
sse_minps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1009 void sse_minps( struct x86_function *p,
1010 struct x86_reg dst,
1011 struct x86_reg src )
1012 {
1013 DUMP_RR( dst, src );
1014 emit_2ub(p, X86_TWOB, 0x5D);
1015 emit_modrm( p, dst, src );
1016 }
1017
sse_subps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1018 void sse_subps( struct x86_function *p,
1019 struct x86_reg dst,
1020 struct x86_reg src )
1021 {
1022 DUMP_RR( dst, src );
1023 emit_2ub(p, X86_TWOB, 0x5C);
1024 emit_modrm( p, dst, src );
1025 }
1026
sse_mulps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1027 void sse_mulps( struct x86_function *p,
1028 struct x86_reg dst,
1029 struct x86_reg src )
1030 {
1031 DUMP_RR( dst, src );
1032 emit_2ub(p, X86_TWOB, 0x59);
1033 emit_modrm( p, dst, src );
1034 }
1035
sse_mulss(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1036 void sse_mulss( struct x86_function *p,
1037 struct x86_reg dst,
1038 struct x86_reg src )
1039 {
1040 DUMP_RR( dst, src );
1041 emit_3ub(p, 0xF3, X86_TWOB, 0x59);
1042 emit_modrm( p, dst, src );
1043 }
1044
sse_addps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1045 void sse_addps( struct x86_function *p,
1046 struct x86_reg dst,
1047 struct x86_reg src )
1048 {
1049 DUMP_RR( dst, src );
1050 emit_2ub(p, X86_TWOB, 0x58);
1051 emit_modrm( p, dst, src );
1052 }
1053
sse_addss(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1054 void sse_addss( struct x86_function *p,
1055 struct x86_reg dst,
1056 struct x86_reg src )
1057 {
1058 DUMP_RR( dst, src );
1059 emit_3ub(p, 0xF3, X86_TWOB, 0x58);
1060 emit_modrm( p, dst, src );
1061 }
1062
sse_andnps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1063 void sse_andnps( struct x86_function *p,
1064 struct x86_reg dst,
1065 struct x86_reg src )
1066 {
1067 DUMP_RR( dst, src );
1068 emit_2ub(p, X86_TWOB, 0x55);
1069 emit_modrm( p, dst, src );
1070 }
1071
sse_andps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1072 void sse_andps( struct x86_function *p,
1073 struct x86_reg dst,
1074 struct x86_reg src )
1075 {
1076 DUMP_RR( dst, src );
1077 emit_2ub(p, X86_TWOB, 0x54);
1078 emit_modrm( p, dst, src );
1079 }
1080
sse_rsqrtps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1081 void sse_rsqrtps( struct x86_function *p,
1082 struct x86_reg dst,
1083 struct x86_reg src )
1084 {
1085 DUMP_RR( dst, src );
1086 emit_2ub(p, X86_TWOB, 0x52);
1087 emit_modrm( p, dst, src );
1088 }
1089
sse_rsqrtss(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1090 void sse_rsqrtss( struct x86_function *p,
1091 struct x86_reg dst,
1092 struct x86_reg src )
1093 {
1094 DUMP_RR( dst, src );
1095 emit_3ub(p, 0xF3, X86_TWOB, 0x52);
1096 emit_modrm( p, dst, src );
1097
1098 }
1099
sse_movhlps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1100 void sse_movhlps( struct x86_function *p,
1101 struct x86_reg dst,
1102 struct x86_reg src )
1103 {
1104 DUMP_RR( dst, src );
1105 assert(dst.mod == mod_REG && src.mod == mod_REG);
1106 emit_2ub(p, X86_TWOB, 0x12);
1107 emit_modrm( p, dst, src );
1108 }
1109
sse_movlhps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1110 void sse_movlhps( struct x86_function *p,
1111 struct x86_reg dst,
1112 struct x86_reg src )
1113 {
1114 DUMP_RR( dst, src );
1115 assert(dst.mod == mod_REG && src.mod == mod_REG);
1116 emit_2ub(p, X86_TWOB, 0x16);
1117 emit_modrm( p, dst, src );
1118 }
1119
sse_orps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1120 void sse_orps( struct x86_function *p,
1121 struct x86_reg dst,
1122 struct x86_reg src )
1123 {
1124 DUMP_RR( dst, src );
1125 emit_2ub(p, X86_TWOB, 0x56);
1126 emit_modrm( p, dst, src );
1127 }
1128
sse_xorps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1129 void sse_xorps( struct x86_function *p,
1130 struct x86_reg dst,
1131 struct x86_reg src )
1132 {
1133 DUMP_RR( dst, src );
1134 emit_2ub(p, X86_TWOB, 0x57);
1135 emit_modrm( p, dst, src );
1136 }
1137
sse_cvtps2pi(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1138 void sse_cvtps2pi( struct x86_function *p,
1139 struct x86_reg dst,
1140 struct x86_reg src )
1141 {
1142 DUMP_RR( dst, src );
1143 assert(dst.file == file_MMX &&
1144 (src.file == file_XMM || src.mod != mod_REG));
1145
1146 p->need_emms = 1;
1147
1148 emit_2ub(p, X86_TWOB, 0x2d);
1149 emit_modrm( p, dst, src );
1150 }
1151
sse2_cvtdq2ps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1152 void sse2_cvtdq2ps( struct x86_function *p,
1153 struct x86_reg dst,
1154 struct x86_reg src )
1155 {
1156 DUMP_RR( dst, src );
1157 emit_2ub(p, X86_TWOB, 0x5b);
1158 emit_modrm( p, dst, src );
1159 }
1160
1161
1162 /* Shufps can also be used to implement a reduced swizzle when dest ==
1163 * arg0.
1164 */
sse_shufps(struct x86_function * p,struct x86_reg dst,struct x86_reg src,unsigned char shuf)1165 void sse_shufps( struct x86_function *p,
1166 struct x86_reg dst,
1167 struct x86_reg src,
1168 unsigned char shuf)
1169 {
1170 DUMP_RRI( dst, src, shuf );
1171 emit_2ub(p, X86_TWOB, 0xC6);
1172 emit_modrm(p, dst, src);
1173 emit_1ub(p, shuf);
1174 }
1175
sse_unpckhps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1176 void sse_unpckhps( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1177 {
1178 DUMP_RR( dst, src );
1179 emit_2ub( p, X86_TWOB, 0x15 );
1180 emit_modrm( p, dst, src );
1181 }
1182
sse_unpcklps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1183 void sse_unpcklps( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1184 {
1185 DUMP_RR( dst, src );
1186 emit_2ub( p, X86_TWOB, 0x14 );
1187 emit_modrm( p, dst, src );
1188 }
1189
sse_cmpps(struct x86_function * p,struct x86_reg dst,struct x86_reg src,enum sse_cc cc)1190 void sse_cmpps( struct x86_function *p,
1191 struct x86_reg dst,
1192 struct x86_reg src,
1193 enum sse_cc cc)
1194 {
1195 DUMP_RRI( dst, src, cc );
1196 emit_2ub(p, X86_TWOB, 0xC2);
1197 emit_modrm(p, dst, src);
1198 emit_1ub(p, cc);
1199 }
1200
sse_pmovmskb(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1201 void sse_pmovmskb( struct x86_function *p,
1202 struct x86_reg dst,
1203 struct x86_reg src)
1204 {
1205 DUMP_RR( dst, src );
1206 emit_3ub(p, 0x66, X86_TWOB, 0xD7);
1207 emit_modrm(p, dst, src);
1208 }
1209
sse_movmskps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1210 void sse_movmskps( struct x86_function *p,
1211 struct x86_reg dst,
1212 struct x86_reg src)
1213 {
1214 DUMP_RR( dst, src );
1215 emit_2ub(p, X86_TWOB, 0x50);
1216 emit_modrm(p, dst, src);
1217 }
1218
1219 /***********************************************************************
1220 * SSE2 instructions
1221 */
1222
sse2_movd(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1223 void sse2_movd( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1224 {
1225 DUMP_RR(dst, src);
1226 emit_2ub(p, 0x66, 0x0f);
1227 if(dst.mod == mod_REG && dst.file == file_REG32)
1228 {
1229 emit_1ub(p, 0x7e);
1230 emit_modrm(p, src, dst);
1231 }
1232 else
1233 {
1234 emit_op_modrm(p, 0x6e, 0x7e, dst, src);
1235 }
1236 }
1237
sse2_movq(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1238 void sse2_movq( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1239 {
1240 DUMP_RR(dst, src);
1241 switch (dst.mod) {
1242 case mod_REG:
1243 emit_3ub(p, 0xf3, 0x0f, 0x7e);
1244 emit_modrm(p, dst, src);
1245 break;
1246 case mod_INDIRECT:
1247 case mod_DISP32:
1248 case mod_DISP8:
1249 assert(src.mod == mod_REG);
1250 emit_3ub(p, 0x66, 0x0f, 0xd6);
1251 emit_modrm(p, src, dst);
1252 break;
1253 default:
1254 assert(0);
1255 break;
1256 }
1257 }
1258
sse2_movdqu(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1259 void sse2_movdqu( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1260 {
1261 DUMP_RR(dst, src);
1262 emit_2ub(p, 0xf3, 0x0f);
1263 emit_op_modrm(p, 0x6f, 0x7f, dst, src);
1264 }
1265
sse2_movdqa(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1266 void sse2_movdqa( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1267 {
1268 DUMP_RR(dst, src);
1269 emit_2ub(p, 0x66, 0x0f);
1270 emit_op_modrm(p, 0x6f, 0x7f, dst, src);
1271 }
1272
sse2_movsd(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1273 void sse2_movsd( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1274 {
1275 DUMP_RR(dst, src);
1276 emit_2ub(p, 0xf2, 0x0f);
1277 emit_op_modrm(p, 0x10, 0x11, dst, src);
1278 }
1279
sse2_movupd(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1280 void sse2_movupd( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1281 {
1282 DUMP_RR(dst, src);
1283 emit_2ub(p, 0x66, 0x0f);
1284 emit_op_modrm(p, 0x10, 0x11, dst, src);
1285 }
1286
sse2_movapd(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1287 void sse2_movapd( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1288 {
1289 DUMP_RR(dst, src);
1290 emit_2ub(p, 0x66, 0x0f);
1291 emit_op_modrm(p, 0x28, 0x29, dst, src);
1292 }
1293
1294 /**
1295 * Perform a reduced swizzle:
1296 */
sse2_pshufd(struct x86_function * p,struct x86_reg dst,struct x86_reg src,unsigned char shuf)1297 void sse2_pshufd( struct x86_function *p,
1298 struct x86_reg dst,
1299 struct x86_reg src,
1300 unsigned char shuf)
1301 {
1302 DUMP_RRI( dst, src, shuf );
1303 emit_3ub(p, 0x66, X86_TWOB, 0x70);
1304 emit_modrm(p, dst, src);
1305 emit_1ub(p, shuf);
1306 }
1307
sse2_pshuflw(struct x86_function * p,struct x86_reg dst,struct x86_reg src,unsigned char shuf)1308 void sse2_pshuflw( struct x86_function *p,
1309 struct x86_reg dst,
1310 struct x86_reg src,
1311 unsigned char shuf)
1312 {
1313 DUMP_RRI( dst, src, shuf );
1314 emit_3ub(p, 0xf2, X86_TWOB, 0x70);
1315 emit_modrm(p, dst, src);
1316 emit_1ub(p, shuf);
1317 }
1318
sse2_pshufhw(struct x86_function * p,struct x86_reg dst,struct x86_reg src,unsigned char shuf)1319 void sse2_pshufhw( struct x86_function *p,
1320 struct x86_reg dst,
1321 struct x86_reg src,
1322 unsigned char shuf)
1323 {
1324 DUMP_RRI( dst, src, shuf );
1325 emit_3ub(p, 0xf3, X86_TWOB, 0x70);
1326 emit_modrm(p, dst, src);
1327 emit_1ub(p, shuf);
1328 }
1329
sse2_cvttps2dq(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1330 void sse2_cvttps2dq( struct x86_function *p,
1331 struct x86_reg dst,
1332 struct x86_reg src )
1333 {
1334 DUMP_RR( dst, src );
1335 emit_3ub( p, 0xF3, X86_TWOB, 0x5B );
1336 emit_modrm( p, dst, src );
1337 }
1338
sse2_cvtps2dq(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1339 void sse2_cvtps2dq( struct x86_function *p,
1340 struct x86_reg dst,
1341 struct x86_reg src )
1342 {
1343 DUMP_RR( dst, src );
1344 emit_3ub(p, 0x66, X86_TWOB, 0x5B);
1345 emit_modrm( p, dst, src );
1346 }
1347
sse2_cvtsd2ss(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1348 void sse2_cvtsd2ss( struct x86_function *p,
1349 struct x86_reg dst,
1350 struct x86_reg src )
1351 {
1352 DUMP_RR( dst, src );
1353 emit_3ub(p, 0xf2, 0x0f, 0x5a);
1354 emit_modrm( p, dst, src );
1355 }
1356
sse2_cvtpd2ps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1357 void sse2_cvtpd2ps( struct x86_function *p,
1358 struct x86_reg dst,
1359 struct x86_reg src )
1360 {
1361 DUMP_RR( dst, src );
1362 emit_3ub(p, 0x66, 0x0f, 0x5a);
1363 emit_modrm( p, dst, src );
1364 }
1365
sse2_packssdw(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1366 void sse2_packssdw( struct x86_function *p,
1367 struct x86_reg dst,
1368 struct x86_reg src )
1369 {
1370 DUMP_RR( dst, src );
1371 emit_3ub(p, 0x66, X86_TWOB, 0x6B);
1372 emit_modrm( p, dst, src );
1373 }
1374
sse2_packsswb(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1375 void sse2_packsswb( struct x86_function *p,
1376 struct x86_reg dst,
1377 struct x86_reg src )
1378 {
1379 DUMP_RR( dst, src );
1380 emit_3ub(p, 0x66, X86_TWOB, 0x63);
1381 emit_modrm( p, dst, src );
1382 }
1383
sse2_packuswb(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1384 void sse2_packuswb( struct x86_function *p,
1385 struct x86_reg dst,
1386 struct x86_reg src )
1387 {
1388 DUMP_RR( dst, src );
1389 emit_3ub(p, 0x66, X86_TWOB, 0x67);
1390 emit_modrm( p, dst, src );
1391 }
1392
sse2_punpcklbw(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1393 void sse2_punpcklbw( struct x86_function *p,
1394 struct x86_reg dst,
1395 struct x86_reg src )
1396 {
1397 DUMP_RR( dst, src );
1398 emit_3ub(p, 0x66, X86_TWOB, 0x60);
1399 emit_modrm( p, dst, src );
1400 }
1401
sse2_punpcklwd(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1402 void sse2_punpcklwd( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1403 {
1404 DUMP_RR( dst, src );
1405 emit_3ub(p, 0x66, 0x0f, 0x61);
1406 emit_modrm( p, dst, src );
1407 }
1408
sse2_punpckldq(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1409 void sse2_punpckldq( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1410 {
1411 DUMP_RR( dst, src );
1412 emit_3ub(p, 0x66, 0x0f, 0x62);
1413 emit_modrm( p, dst, src );
1414 }
1415
sse2_punpcklqdq(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1416 void sse2_punpcklqdq( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1417 {
1418 DUMP_RR( dst, src );
1419 emit_3ub(p, 0x66, 0x0f, 0x6c);
1420 emit_modrm( p, dst, src );
1421 }
1422
sse2_psllw_imm(struct x86_function * p,struct x86_reg dst,unsigned imm)1423 void sse2_psllw_imm( struct x86_function *p, struct x86_reg dst, unsigned imm )
1424 {
1425 DUMP_RI(dst, imm);
1426 emit_3ub(p, 0x66, 0x0f, 0x71);
1427 emit_modrm_noreg(p, 6, dst);
1428 emit_1ub(p, imm);
1429 }
1430
sse2_pslld_imm(struct x86_function * p,struct x86_reg dst,unsigned imm)1431 void sse2_pslld_imm( struct x86_function *p, struct x86_reg dst, unsigned imm )
1432 {
1433 DUMP_RI(dst, imm);
1434 emit_3ub(p, 0x66, 0x0f, 0x72);
1435 emit_modrm_noreg(p, 6, dst);
1436 emit_1ub(p, imm);
1437 }
1438
sse2_psllq_imm(struct x86_function * p,struct x86_reg dst,unsigned imm)1439 void sse2_psllq_imm( struct x86_function *p, struct x86_reg dst, unsigned imm )
1440 {
1441 DUMP_RI(dst, imm);
1442 emit_3ub(p, 0x66, 0x0f, 0x73);
1443 emit_modrm_noreg(p, 6, dst);
1444 emit_1ub(p, imm);
1445 }
1446
sse2_psrlw_imm(struct x86_function * p,struct x86_reg dst,unsigned imm)1447 void sse2_psrlw_imm( struct x86_function *p, struct x86_reg dst, unsigned imm )
1448 {
1449 DUMP_RI(dst, imm);
1450 emit_3ub(p, 0x66, 0x0f, 0x71);
1451 emit_modrm_noreg(p, 2, dst);
1452 emit_1ub(p, imm);
1453 }
1454
sse2_psrld_imm(struct x86_function * p,struct x86_reg dst,unsigned imm)1455 void sse2_psrld_imm( struct x86_function *p, struct x86_reg dst, unsigned imm )
1456 {
1457 DUMP_RI(dst, imm);
1458 emit_3ub(p, 0x66, 0x0f, 0x72);
1459 emit_modrm_noreg(p, 2, dst);
1460 emit_1ub(p, imm);
1461 }
1462
sse2_psrlq_imm(struct x86_function * p,struct x86_reg dst,unsigned imm)1463 void sse2_psrlq_imm( struct x86_function *p, struct x86_reg dst, unsigned imm )
1464 {
1465 DUMP_RI(dst, imm);
1466 emit_3ub(p, 0x66, 0x0f, 0x73);
1467 emit_modrm_noreg(p, 2, dst);
1468 emit_1ub(p, imm);
1469 }
1470
sse2_psraw_imm(struct x86_function * p,struct x86_reg dst,unsigned imm)1471 void sse2_psraw_imm( struct x86_function *p, struct x86_reg dst, unsigned imm )
1472 {
1473 DUMP_RI(dst, imm);
1474 emit_3ub(p, 0x66, 0x0f, 0x71);
1475 emit_modrm_noreg(p, 4, dst);
1476 emit_1ub(p, imm);
1477 }
1478
sse2_psrad_imm(struct x86_function * p,struct x86_reg dst,unsigned imm)1479 void sse2_psrad_imm( struct x86_function *p, struct x86_reg dst, unsigned imm )
1480 {
1481 DUMP_RI(dst, imm);
1482 emit_3ub(p, 0x66, 0x0f, 0x72);
1483 emit_modrm_noreg(p, 4, dst);
1484 emit_1ub(p, imm);
1485 }
1486
sse2_por(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1487 void sse2_por( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1488 {
1489 DUMP_RR(dst, src);
1490 emit_3ub(p, 0x66, 0x0f, 0xeb);
1491 emit_modrm(p, dst, src);
1492 }
1493
sse2_rcpps(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1494 void sse2_rcpps( struct x86_function *p,
1495 struct x86_reg dst,
1496 struct x86_reg src )
1497 {
1498 DUMP_RR( dst, src );
1499 emit_2ub(p, X86_TWOB, 0x53);
1500 emit_modrm( p, dst, src );
1501 }
1502
sse2_rcpss(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1503 void sse2_rcpss( struct x86_function *p,
1504 struct x86_reg dst,
1505 struct x86_reg src )
1506 {
1507 DUMP_RR( dst, src );
1508 emit_3ub(p, 0xF3, X86_TWOB, 0x53);
1509 emit_modrm( p, dst, src );
1510 }
1511
sse2_pcmpgtd(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1512 void sse2_pcmpgtd(struct x86_function *p,
1513 struct x86_reg dst,
1514 struct x86_reg src)
1515 {
1516 DUMP_RR(dst, src);
1517 emit_3ub(p, 0x66, X86_TWOB, 0x66);
1518 emit_modrm(p, dst, src);
1519 }
1520
1521 /***********************************************************************
1522 * x87 instructions
1523 */
note_x87_pop(struct x86_function * p)1524 static void note_x87_pop( struct x86_function *p )
1525 {
1526 p->x87_stack--;
1527 assert(p->x87_stack >= 0);
1528 }
1529
note_x87_push(struct x86_function * p)1530 static void note_x87_push( struct x86_function *p )
1531 {
1532 p->x87_stack++;
1533 assert(p->x87_stack <= 7);
1534 }
1535
x87_assert_stack_empty(struct x86_function * p)1536 void x87_assert_stack_empty( struct x86_function *p )
1537 {
1538 assert (p->x87_stack == 0);
1539 }
1540
1541
x87_fist(struct x86_function * p,struct x86_reg dst)1542 void x87_fist( struct x86_function *p, struct x86_reg dst )
1543 {
1544 DUMP_R( dst );
1545 emit_1ub(p, 0xdb);
1546 emit_modrm_noreg(p, 2, dst);
1547 }
1548
x87_fistp(struct x86_function * p,struct x86_reg dst)1549 void x87_fistp( struct x86_function *p, struct x86_reg dst )
1550 {
1551 DUMP_R( dst );
1552 emit_1ub(p, 0xdb);
1553 emit_modrm_noreg(p, 3, dst);
1554 note_x87_pop(p);
1555 }
1556
x87_fild(struct x86_function * p,struct x86_reg arg)1557 void x87_fild( struct x86_function *p, struct x86_reg arg )
1558 {
1559 DUMP_R( arg );
1560 emit_1ub(p, 0xdf);
1561 emit_modrm_noreg(p, 0, arg);
1562 note_x87_push(p);
1563 }
1564
x87_fldz(struct x86_function * p)1565 void x87_fldz( struct x86_function *p )
1566 {
1567 DUMP();
1568 emit_2ub(p, 0xd9, 0xee);
1569 note_x87_push(p);
1570 }
1571
1572
x87_fldcw(struct x86_function * p,struct x86_reg arg)1573 void x87_fldcw( struct x86_function *p, struct x86_reg arg )
1574 {
1575 DUMP_R( arg );
1576 assert(arg.file == file_REG32);
1577 assert(arg.mod != mod_REG);
1578 emit_1ub(p, 0xd9);
1579 emit_modrm_noreg(p, 5, arg);
1580 }
1581
x87_fld1(struct x86_function * p)1582 void x87_fld1( struct x86_function *p )
1583 {
1584 DUMP();
1585 emit_2ub(p, 0xd9, 0xe8);
1586 note_x87_push(p);
1587 }
1588
x87_fldl2e(struct x86_function * p)1589 void x87_fldl2e( struct x86_function *p )
1590 {
1591 DUMP();
1592 emit_2ub(p, 0xd9, 0xea);
1593 note_x87_push(p);
1594 }
1595
x87_fldln2(struct x86_function * p)1596 void x87_fldln2( struct x86_function *p )
1597 {
1598 DUMP();
1599 emit_2ub(p, 0xd9, 0xed);
1600 note_x87_push(p);
1601 }
1602
x87_fwait(struct x86_function * p)1603 void x87_fwait( struct x86_function *p )
1604 {
1605 DUMP();
1606 emit_1ub(p, 0x9b);
1607 }
1608
x87_fnclex(struct x86_function * p)1609 void x87_fnclex( struct x86_function *p )
1610 {
1611 DUMP();
1612 emit_2ub(p, 0xdb, 0xe2);
1613 }
1614
x87_fclex(struct x86_function * p)1615 void x87_fclex( struct x86_function *p )
1616 {
1617 x87_fwait(p);
1618 x87_fnclex(p);
1619 }
1620
x87_fcmovb(struct x86_function * p,struct x86_reg arg)1621 void x87_fcmovb( struct x86_function *p, struct x86_reg arg )
1622 {
1623 DUMP_R( arg );
1624 assert(arg.file == file_x87);
1625 emit_2ub(p, 0xda, 0xc0+arg.idx);
1626 }
1627
x87_fcmove(struct x86_function * p,struct x86_reg arg)1628 void x87_fcmove( struct x86_function *p, struct x86_reg arg )
1629 {
1630 DUMP_R( arg );
1631 assert(arg.file == file_x87);
1632 emit_2ub(p, 0xda, 0xc8+arg.idx);
1633 }
1634
x87_fcmovbe(struct x86_function * p,struct x86_reg arg)1635 void x87_fcmovbe( struct x86_function *p, struct x86_reg arg )
1636 {
1637 DUMP_R( arg );
1638 assert(arg.file == file_x87);
1639 emit_2ub(p, 0xda, 0xd0+arg.idx);
1640 }
1641
x87_fcmovnb(struct x86_function * p,struct x86_reg arg)1642 void x87_fcmovnb( struct x86_function *p, struct x86_reg arg )
1643 {
1644 DUMP_R( arg );
1645 assert(arg.file == file_x87);
1646 emit_2ub(p, 0xdb, 0xc0+arg.idx);
1647 }
1648
x87_fcmovne(struct x86_function * p,struct x86_reg arg)1649 void x87_fcmovne( struct x86_function *p, struct x86_reg arg )
1650 {
1651 DUMP_R( arg );
1652 assert(arg.file == file_x87);
1653 emit_2ub(p, 0xdb, 0xc8+arg.idx);
1654 }
1655
x87_fcmovnbe(struct x86_function * p,struct x86_reg arg)1656 void x87_fcmovnbe( struct x86_function *p, struct x86_reg arg )
1657 {
1658 DUMP_R( arg );
1659 assert(arg.file == file_x87);
1660 emit_2ub(p, 0xdb, 0xd0+arg.idx);
1661 }
1662
1663
1664
x87_arith_op(struct x86_function * p,struct x86_reg dst,struct x86_reg arg,unsigned char dst0ub0,unsigned char dst0ub1,unsigned char arg0ub0,unsigned char arg0ub1,unsigned char argmem_noreg)1665 static void x87_arith_op( struct x86_function *p, struct x86_reg dst, struct x86_reg arg,
1666 unsigned char dst0ub0,
1667 unsigned char dst0ub1,
1668 unsigned char arg0ub0,
1669 unsigned char arg0ub1,
1670 unsigned char argmem_noreg)
1671 {
1672 assert(dst.file == file_x87);
1673
1674 if (arg.file == file_x87) {
1675 if (dst.idx == 0)
1676 emit_2ub(p, dst0ub0, dst0ub1+arg.idx);
1677 else if (arg.idx == 0)
1678 emit_2ub(p, arg0ub0, arg0ub1+arg.idx);
1679 else
1680 assert(0);
1681 }
1682 else if (dst.idx == 0) {
1683 assert(arg.file == file_REG32);
1684 emit_1ub(p, 0xd8);
1685 emit_modrm_noreg(p, argmem_noreg, arg);
1686 }
1687 else
1688 assert(0);
1689 }
1690
x87_fmul(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1691 void x87_fmul( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1692 {
1693 DUMP_RR( dst, src );
1694 x87_arith_op(p, dst, src,
1695 0xd8, 0xc8,
1696 0xdc, 0xc8,
1697 4);
1698 }
1699
x87_fsub(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1700 void x87_fsub( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1701 {
1702 DUMP_RR( dst, src );
1703 x87_arith_op(p, dst, src,
1704 0xd8, 0xe0,
1705 0xdc, 0xe8,
1706 4);
1707 }
1708
x87_fsubr(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1709 void x87_fsubr( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1710 {
1711 DUMP_RR( dst, src );
1712 x87_arith_op(p, dst, src,
1713 0xd8, 0xe8,
1714 0xdc, 0xe0,
1715 5);
1716 }
1717
x87_fadd(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1718 void x87_fadd( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1719 {
1720 DUMP_RR( dst, src );
1721 x87_arith_op(p, dst, src,
1722 0xd8, 0xc0,
1723 0xdc, 0xc0,
1724 0);
1725 }
1726
x87_fdiv(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1727 void x87_fdiv( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1728 {
1729 DUMP_RR( dst, src );
1730 x87_arith_op(p, dst, src,
1731 0xd8, 0xf0,
1732 0xdc, 0xf8,
1733 6);
1734 }
1735
x87_fdivr(struct x86_function * p,struct x86_reg dst,struct x86_reg src)1736 void x87_fdivr( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1737 {
1738 DUMP_RR( dst, src );
1739 x87_arith_op(p, dst, src,
1740 0xd8, 0xf8,
1741 0xdc, 0xf0,
1742 7);
1743 }
1744
x87_fmulp(struct x86_function * p,struct x86_reg dst)1745 void x87_fmulp( struct x86_function *p, struct x86_reg dst )
1746 {
1747 DUMP_R( dst );
1748 assert(dst.file == file_x87);
1749 assert(dst.idx >= 1);
1750 emit_2ub(p, 0xde, 0xc8+dst.idx);
1751 note_x87_pop(p);
1752 }
1753
x87_fsubp(struct x86_function * p,struct x86_reg dst)1754 void x87_fsubp( struct x86_function *p, struct x86_reg dst )
1755 {
1756 DUMP_R( dst );
1757 assert(dst.file == file_x87);
1758 assert(dst.idx >= 1);
1759 emit_2ub(p, 0xde, 0xe8+dst.idx);
1760 note_x87_pop(p);
1761 }
1762
x87_fsubrp(struct x86_function * p,struct x86_reg dst)1763 void x87_fsubrp( struct x86_function *p, struct x86_reg dst )
1764 {
1765 DUMP_R( dst );
1766 assert(dst.file == file_x87);
1767 assert(dst.idx >= 1);
1768 emit_2ub(p, 0xde, 0xe0+dst.idx);
1769 note_x87_pop(p);
1770 }
1771
x87_faddp(struct x86_function * p,struct x86_reg dst)1772 void x87_faddp( struct x86_function *p, struct x86_reg dst )
1773 {
1774 DUMP_R( dst );
1775 assert(dst.file == file_x87);
1776 assert(dst.idx >= 1);
1777 emit_2ub(p, 0xde, 0xc0+dst.idx);
1778 note_x87_pop(p);
1779 }
1780
x87_fdivp(struct x86_function * p,struct x86_reg dst)1781 void x87_fdivp( struct x86_function *p, struct x86_reg dst )
1782 {
1783 DUMP_R( dst );
1784 assert(dst.file == file_x87);
1785 assert(dst.idx >= 1);
1786 emit_2ub(p, 0xde, 0xf8+dst.idx);
1787 note_x87_pop(p);
1788 }
1789
x87_fdivrp(struct x86_function * p,struct x86_reg dst)1790 void x87_fdivrp( struct x86_function *p, struct x86_reg dst )
1791 {
1792 DUMP_R( dst );
1793 assert(dst.file == file_x87);
1794 assert(dst.idx >= 1);
1795 emit_2ub(p, 0xde, 0xf0+dst.idx);
1796 note_x87_pop(p);
1797 }
1798
x87_ftst(struct x86_function * p)1799 void x87_ftst( struct x86_function *p )
1800 {
1801 DUMP();
1802 emit_2ub(p, 0xd9, 0xe4);
1803 }
1804
x87_fucom(struct x86_function * p,struct x86_reg arg)1805 void x87_fucom( struct x86_function *p, struct x86_reg arg )
1806 {
1807 DUMP_R( arg );
1808 assert(arg.file == file_x87);
1809 emit_2ub(p, 0xdd, 0xe0+arg.idx);
1810 }
1811
x87_fucomp(struct x86_function * p,struct x86_reg arg)1812 void x87_fucomp( struct x86_function *p, struct x86_reg arg )
1813 {
1814 DUMP_R( arg );
1815 assert(arg.file == file_x87);
1816 emit_2ub(p, 0xdd, 0xe8+arg.idx);
1817 note_x87_pop(p);
1818 }
1819
x87_fucompp(struct x86_function * p)1820 void x87_fucompp( struct x86_function *p )
1821 {
1822 DUMP();
1823 emit_2ub(p, 0xda, 0xe9);
1824 note_x87_pop(p); /* pop twice */
1825 note_x87_pop(p); /* pop twice */
1826 }
1827
x87_fxch(struct x86_function * p,struct x86_reg arg)1828 void x87_fxch( struct x86_function *p, struct x86_reg arg )
1829 {
1830 DUMP_R( arg );
1831 assert(arg.file == file_x87);
1832 emit_2ub(p, 0xd9, 0xc8+arg.idx);
1833 }
1834
x87_fabs(struct x86_function * p)1835 void x87_fabs( struct x86_function *p )
1836 {
1837 DUMP();
1838 emit_2ub(p, 0xd9, 0xe1);
1839 }
1840
x87_fchs(struct x86_function * p)1841 void x87_fchs( struct x86_function *p )
1842 {
1843 DUMP();
1844 emit_2ub(p, 0xd9, 0xe0);
1845 }
1846
x87_fcos(struct x86_function * p)1847 void x87_fcos( struct x86_function *p )
1848 {
1849 DUMP();
1850 emit_2ub(p, 0xd9, 0xff);
1851 }
1852
1853
x87_fprndint(struct x86_function * p)1854 void x87_fprndint( struct x86_function *p )
1855 {
1856 DUMP();
1857 emit_2ub(p, 0xd9, 0xfc);
1858 }
1859
x87_fscale(struct x86_function * p)1860 void x87_fscale( struct x86_function *p )
1861 {
1862 DUMP();
1863 emit_2ub(p, 0xd9, 0xfd);
1864 }
1865
x87_fsin(struct x86_function * p)1866 void x87_fsin( struct x86_function *p )
1867 {
1868 DUMP();
1869 emit_2ub(p, 0xd9, 0xfe);
1870 }
1871
x87_fsincos(struct x86_function * p)1872 void x87_fsincos( struct x86_function *p )
1873 {
1874 DUMP();
1875 emit_2ub(p, 0xd9, 0xfb);
1876 }
1877
x87_fsqrt(struct x86_function * p)1878 void x87_fsqrt( struct x86_function *p )
1879 {
1880 DUMP();
1881 emit_2ub(p, 0xd9, 0xfa);
1882 }
1883
x87_fxtract(struct x86_function * p)1884 void x87_fxtract( struct x86_function *p )
1885 {
1886 DUMP();
1887 emit_2ub(p, 0xd9, 0xf4);
1888 }
1889
1890 /* st0 = (2^st0)-1
1891 *
1892 * Restrictions: -1.0 <= st0 <= 1.0
1893 */
x87_f2xm1(struct x86_function * p)1894 void x87_f2xm1( struct x86_function *p )
1895 {
1896 DUMP();
1897 emit_2ub(p, 0xd9, 0xf0);
1898 }
1899
1900 /* st1 = st1 * log2(st0);
1901 * pop_stack;
1902 */
x87_fyl2x(struct x86_function * p)1903 void x87_fyl2x( struct x86_function *p )
1904 {
1905 DUMP();
1906 emit_2ub(p, 0xd9, 0xf1);
1907 note_x87_pop(p);
1908 }
1909
1910 /* st1 = st1 * log2(st0 + 1.0);
1911 * pop_stack;
1912 *
1913 * A fast operation, with restrictions: -.29 < st0 < .29
1914 */
x87_fyl2xp1(struct x86_function * p)1915 void x87_fyl2xp1( struct x86_function *p )
1916 {
1917 DUMP();
1918 emit_2ub(p, 0xd9, 0xf9);
1919 note_x87_pop(p);
1920 }
1921
1922
x87_fld(struct x86_function * p,struct x86_reg arg)1923 void x87_fld( struct x86_function *p, struct x86_reg arg )
1924 {
1925 DUMP_R( arg );
1926 if (arg.file == file_x87)
1927 emit_2ub(p, 0xd9, 0xc0 + arg.idx);
1928 else {
1929 emit_1ub(p, 0xd9);
1930 emit_modrm_noreg(p, 0, arg);
1931 }
1932 note_x87_push(p);
1933 }
1934
x87_fst(struct x86_function * p,struct x86_reg dst)1935 void x87_fst( struct x86_function *p, struct x86_reg dst )
1936 {
1937 DUMP_R( dst );
1938 if (dst.file == file_x87)
1939 emit_2ub(p, 0xdd, 0xd0 + dst.idx);
1940 else {
1941 emit_1ub(p, 0xd9);
1942 emit_modrm_noreg(p, 2, dst);
1943 }
1944 }
1945
x87_fstp(struct x86_function * p,struct x86_reg dst)1946 void x87_fstp( struct x86_function *p, struct x86_reg dst )
1947 {
1948 DUMP_R( dst );
1949 if (dst.file == file_x87)
1950 emit_2ub(p, 0xdd, 0xd8 + dst.idx);
1951 else {
1952 emit_1ub(p, 0xd9);
1953 emit_modrm_noreg(p, 3, dst);
1954 }
1955 note_x87_pop(p);
1956 }
1957
x87_fpop(struct x86_function * p)1958 void x87_fpop( struct x86_function *p )
1959 {
1960 x87_fstp( p, x86_make_reg( file_x87, 0 ));
1961 }
1962
1963
x87_fcom(struct x86_function * p,struct x86_reg dst)1964 void x87_fcom( struct x86_function *p, struct x86_reg dst )
1965 {
1966 DUMP_R( dst );
1967 if (dst.file == file_x87)
1968 emit_2ub(p, 0xd8, 0xd0 + dst.idx);
1969 else {
1970 emit_1ub(p, 0xd8);
1971 emit_modrm_noreg(p, 2, dst);
1972 }
1973 }
1974
1975
x87_fcomp(struct x86_function * p,struct x86_reg dst)1976 void x87_fcomp( struct x86_function *p, struct x86_reg dst )
1977 {
1978 DUMP_R( dst );
1979 if (dst.file == file_x87)
1980 emit_2ub(p, 0xd8, 0xd8 + dst.idx);
1981 else {
1982 emit_1ub(p, 0xd8);
1983 emit_modrm_noreg(p, 3, dst);
1984 }
1985 note_x87_pop(p);
1986 }
1987
x87_fcomi(struct x86_function * p,struct x86_reg arg)1988 void x87_fcomi( struct x86_function *p, struct x86_reg arg )
1989 {
1990 DUMP_R( arg );
1991 emit_2ub(p, 0xdb, 0xf0+arg.idx);
1992 }
1993
x87_fcomip(struct x86_function * p,struct x86_reg arg)1994 void x87_fcomip( struct x86_function *p, struct x86_reg arg )
1995 {
1996 DUMP_R( arg );
1997 emit_2ub(p, 0xdb, 0xf0+arg.idx);
1998 note_x87_pop(p);
1999 }
2000
2001
x87_fnstsw(struct x86_function * p,struct x86_reg dst)2002 void x87_fnstsw( struct x86_function *p, struct x86_reg dst )
2003 {
2004 DUMP_R( dst );
2005 assert(dst.file == file_REG32);
2006
2007 if (dst.idx == reg_AX &&
2008 dst.mod == mod_REG)
2009 emit_2ub(p, 0xdf, 0xe0);
2010 else {
2011 emit_1ub(p, 0xdd);
2012 emit_modrm_noreg(p, 7, dst);
2013 }
2014 }
2015
2016
x87_fnstcw(struct x86_function * p,struct x86_reg dst)2017 void x87_fnstcw( struct x86_function *p, struct x86_reg dst )
2018 {
2019 DUMP_R( dst );
2020 assert(dst.file == file_REG32);
2021
2022 emit_1ub(p, 0x9b); /* WAIT -- needed? */
2023 emit_1ub(p, 0xd9);
2024 emit_modrm_noreg(p, 7, dst);
2025 }
2026
2027
2028
2029
2030 /***********************************************************************
2031 * MMX instructions
2032 */
2033
mmx_emms(struct x86_function * p)2034 void mmx_emms( struct x86_function *p )
2035 {
2036 DUMP();
2037 assert(p->need_emms);
2038 emit_2ub(p, 0x0f, 0x77);
2039 p->need_emms = 0;
2040 }
2041
mmx_packssdw(struct x86_function * p,struct x86_reg dst,struct x86_reg src)2042 void mmx_packssdw( struct x86_function *p,
2043 struct x86_reg dst,
2044 struct x86_reg src )
2045 {
2046 DUMP_RR( dst, src );
2047 assert(dst.file == file_MMX &&
2048 (src.file == file_MMX || src.mod != mod_REG));
2049
2050 p->need_emms = 1;
2051
2052 emit_2ub(p, X86_TWOB, 0x6b);
2053 emit_modrm( p, dst, src );
2054 }
2055
mmx_packuswb(struct x86_function * p,struct x86_reg dst,struct x86_reg src)2056 void mmx_packuswb( struct x86_function *p,
2057 struct x86_reg dst,
2058 struct x86_reg src )
2059 {
2060 DUMP_RR( dst, src );
2061 assert(dst.file == file_MMX &&
2062 (src.file == file_MMX || src.mod != mod_REG));
2063
2064 p->need_emms = 1;
2065
2066 emit_2ub(p, X86_TWOB, 0x67);
2067 emit_modrm( p, dst, src );
2068 }
2069
mmx_movd(struct x86_function * p,struct x86_reg dst,struct x86_reg src)2070 void mmx_movd( struct x86_function *p,
2071 struct x86_reg dst,
2072 struct x86_reg src )
2073 {
2074 DUMP_RR( dst, src );
2075 p->need_emms = 1;
2076 emit_1ub(p, X86_TWOB);
2077 emit_op_modrm( p, 0x6e, 0x7e, dst, src );
2078 }
2079
mmx_movq(struct x86_function * p,struct x86_reg dst,struct x86_reg src)2080 void mmx_movq( struct x86_function *p,
2081 struct x86_reg dst,
2082 struct x86_reg src )
2083 {
2084 DUMP_RR( dst, src );
2085 p->need_emms = 1;
2086 emit_1ub(p, X86_TWOB);
2087 emit_op_modrm( p, 0x6f, 0x7f, dst, src );
2088 }
2089
2090
2091 /***********************************************************************
2092 * Helper functions
2093 */
2094
2095
x86_cdecl_caller_push_regs(struct x86_function * p)2096 void x86_cdecl_caller_push_regs( struct x86_function *p )
2097 {
2098 x86_push(p, x86_make_reg(file_REG32, reg_AX));
2099 x86_push(p, x86_make_reg(file_REG32, reg_CX));
2100 x86_push(p, x86_make_reg(file_REG32, reg_DX));
2101 }
2102
x86_cdecl_caller_pop_regs(struct x86_function * p)2103 void x86_cdecl_caller_pop_regs( struct x86_function *p )
2104 {
2105 x86_pop(p, x86_make_reg(file_REG32, reg_DX));
2106 x86_pop(p, x86_make_reg(file_REG32, reg_CX));
2107 x86_pop(p, x86_make_reg(file_REG32, reg_AX));
2108 }
2109
2110
x86_fn_arg(struct x86_function * p,unsigned arg)2111 struct x86_reg x86_fn_arg( struct x86_function *p,
2112 unsigned arg )
2113 {
2114 switch(x86_target(p))
2115 {
2116 case X86_64_WIN64_ABI:
2117 /* Microsoft uses a different calling convention than the rest of the world */
2118 switch(arg)
2119 {
2120 case 1:
2121 return x86_make_reg(file_REG32, reg_CX);
2122 case 2:
2123 return x86_make_reg(file_REG32, reg_DX);
2124 case 3:
2125 return x86_make_reg(file_REG32, reg_R8);
2126 case 4:
2127 return x86_make_reg(file_REG32, reg_R9);
2128 default:
2129 /* Win64 allocates stack slots as if it pushed the first 4 arguments too */
2130 return x86_make_disp(x86_make_reg(file_REG32, reg_SP),
2131 p->stack_offset + arg * 8);
2132 }
2133 case X86_64_STD_ABI:
2134 switch(arg)
2135 {
2136 case 1:
2137 return x86_make_reg(file_REG32, reg_DI);
2138 case 2:
2139 return x86_make_reg(file_REG32, reg_SI);
2140 case 3:
2141 return x86_make_reg(file_REG32, reg_DX);
2142 case 4:
2143 return x86_make_reg(file_REG32, reg_CX);
2144 case 5:
2145 return x86_make_reg(file_REG32, reg_R8);
2146 case 6:
2147 return x86_make_reg(file_REG32, reg_R9);
2148 default:
2149 return x86_make_disp(x86_make_reg(file_REG32, reg_SP),
2150 p->stack_offset + (arg - 6) * 8); /* ??? */
2151 }
2152 case X86_32:
2153 return x86_make_disp(x86_make_reg(file_REG32, reg_SP),
2154 p->stack_offset + arg * 4); /* ??? */
2155 default:
2156 assert(0 && "Unexpected x86 target ABI in x86_fn_arg");
2157 return x86_make_reg(file_REG32, reg_CX); /* not used / silence warning */
2158 }
2159 }
2160
x86_init_func_common(struct x86_function * p)2161 static void x86_init_func_common( struct x86_function *p )
2162 {
2163 p->caps = 0;
2164 if(util_get_cpu_caps()->has_mmx)
2165 p->caps |= X86_MMX;
2166 if(util_get_cpu_caps()->has_mmx2)
2167 p->caps |= X86_MMX2;
2168 if(util_get_cpu_caps()->has_sse)
2169 p->caps |= X86_SSE;
2170 if(util_get_cpu_caps()->has_sse2)
2171 p->caps |= X86_SSE2;
2172 if(util_get_cpu_caps()->has_sse3)
2173 p->caps |= X86_SSE3;
2174 if(util_get_cpu_caps()->has_sse4_1)
2175 p->caps |= X86_SSE4_1;
2176 p->csr = p->store;
2177 #if DETECT_ARCH_X86
2178 emit_1i(p, 0xfb1e0ff3);
2179 #else
2180 emit_1i(p, 0xfa1e0ff3);
2181 #endif
2182 DUMP_START();
2183 }
2184
x86_init_func(struct x86_function * p)2185 void x86_init_func( struct x86_function *p )
2186 {
2187 p->size = 0;
2188 p->store = NULL;
2189 x86_init_func_common(p);
2190 }
2191
x86_init_func_size(struct x86_function * p,unsigned code_size)2192 void x86_init_func_size( struct x86_function *p, unsigned code_size )
2193 {
2194 p->size = code_size;
2195 p->store = rtasm_exec_malloc(code_size);
2196 if (p->store == NULL) {
2197 p->store = p->error_overflow;
2198 }
2199 x86_init_func_common(p);
2200 }
2201
x86_release_func(struct x86_function * p)2202 void x86_release_func( struct x86_function *p )
2203 {
2204 if (p->store && p->store != p->error_overflow)
2205 rtasm_exec_free(p->store);
2206
2207 p->store = NULL;
2208 p->csr = NULL;
2209 p->size = 0;
2210 }
2211
2212
2213 static inline x86_func
voidptr_to_x86_func(void * v)2214 voidptr_to_x86_func(void *v)
2215 {
2216 union {
2217 void *v;
2218 x86_func f;
2219 } u;
2220 STATIC_ASSERT(sizeof(u.v) == sizeof(u.f));
2221 u.v = v;
2222 return u.f;
2223 }
2224
2225
x86_get_func(struct x86_function * p)2226 x86_func x86_get_func( struct x86_function *p )
2227 {
2228 DUMP_END();
2229 if (DISASSEM && p->store)
2230 debug_printf("disassemble %p %p\n", p->store, p->csr);
2231
2232 if (p->store == p->error_overflow)
2233 return voidptr_to_x86_func(NULL);
2234 else
2235 return voidptr_to_x86_func(p->store);
2236 }
2237
2238 #else
2239
2240 void x86sse_dummy( void );
2241
x86sse_dummy(void)2242 void x86sse_dummy( void )
2243 {
2244 }
2245
2246 #endif
2247