2
* Tiny Code Generator for QEMU
4
* Copyright (c) 2008 Fabrice Bellard
6
* Permission is hereby granted, free of charge, to any person obtaining a copy
7
* of this software and associated documentation files (the "Software"), to deal
8
* in the Software without restriction, including without limitation the rights
9
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10
* copies of the Software, and to permit persons to whom the Software is
11
* furnished to do so, subject to the following conditions:
13
* The above copyright notice and this permission notice shall be included in
14
* all copies or substantial portions of the Software.
16
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
24
const char *tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
35
int tcg_target_reg_alloc_order[TCG_TARGET_NB_REGS] = {
46
const int tcg_target_call_iarg_regs[3] = { TCG_REG_EAX, TCG_REG_EDX, TCG_REG_ECX };
47
const int tcg_target_call_oarg_regs[2] = { TCG_REG_EAX, TCG_REG_EDX };
49
static void patch_reloc(uint8_t *code_ptr, int type,
50
tcg_target_long value)
54
*(uint32_t *)code_ptr = value;
57
*(uint32_t *)code_ptr = value - (long)code_ptr;
64
/* maximum number of register used for input function arguments */
65
static inline int tcg_target_get_call_iarg_regs_count(int flags)
67
flags &= TCG_CALL_TYPE_MASK;
69
case TCG_CALL_TYPE_STD:
71
case TCG_CALL_TYPE_REGPARM_1:
72
case TCG_CALL_TYPE_REGPARM_2:
73
case TCG_CALL_TYPE_REGPARM:
74
return flags - TCG_CALL_TYPE_REGPARM_1 + 1;
80
/* parse target specific constraints */
81
int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
89
tcg_regset_set_reg(ct->u.regs, TCG_REG_EAX);
93
tcg_regset_set_reg(ct->u.regs, TCG_REG_EBX);
97
tcg_regset_set_reg(ct->u.regs, TCG_REG_ECX);
100
ct->ct |= TCG_CT_REG;
101
tcg_regset_set_reg(ct->u.regs, TCG_REG_EDX);
104
ct->ct |= TCG_CT_REG;
105
tcg_regset_set_reg(ct->u.regs, TCG_REG_ESI);
108
ct->ct |= TCG_CT_REG;
109
tcg_regset_set_reg(ct->u.regs, TCG_REG_EDI);
112
ct->ct |= TCG_CT_REG;
113
tcg_regset_set32(ct->u.regs, 0, 0xf);
116
ct->ct |= TCG_CT_REG;
117
tcg_regset_set32(ct->u.regs, 0, 0xff);
120
/* qemu_ld/st address constraint */
122
ct->ct |= TCG_CT_REG;
123
tcg_regset_set32(ct->u.regs, 0, 0xff);
124
tcg_regset_reset_reg(ct->u.regs, TCG_REG_EAX);
125
tcg_regset_reset_reg(ct->u.regs, TCG_REG_EDX);
135
/* test if a constant matches the constraint */
136
static inline int tcg_target_const_match(tcg_target_long val,
137
const TCGArgConstraint *arg_ct)
141
if (ct & TCG_CT_CONST)
178
#define P_EXT 0x100 /* 0x0f opcode prefix */
180
static const uint8_t tcg_cond_to_jcc[10] = {
181
[TCG_COND_EQ] = JCC_JE,
182
[TCG_COND_NE] = JCC_JNE,
183
[TCG_COND_LT] = JCC_JL,
184
[TCG_COND_GE] = JCC_JGE,
185
[TCG_COND_LE] = JCC_JLE,
186
[TCG_COND_GT] = JCC_JG,
187
[TCG_COND_LTU] = JCC_JB,
188
[TCG_COND_GEU] = JCC_JAE,
189
[TCG_COND_LEU] = JCC_JBE,
190
[TCG_COND_GTU] = JCC_JA,
193
static inline void tcg_out_opc(TCGContext *s, int opc)
200
static inline void tcg_out_modrm(TCGContext *s, int opc, int r, int rm)
203
tcg_out8(s, 0xc0 | (r << 3) | rm);
206
/* rm == -1 means no register index */
207
static inline void tcg_out_modrm_offset(TCGContext *s, int opc, int r, int rm,
212
tcg_out8(s, 0x05 | (r << 3));
213
tcg_out32(s, offset);
214
} else if (offset == 0 && rm != TCG_REG_EBP) {
215
if (rm == TCG_REG_ESP) {
216
tcg_out8(s, 0x04 | (r << 3));
219
tcg_out8(s, 0x00 | (r << 3) | rm);
221
} else if ((int8_t)offset == offset) {
222
if (rm == TCG_REG_ESP) {
223
tcg_out8(s, 0x44 | (r << 3));
226
tcg_out8(s, 0x40 | (r << 3) | rm);
230
if (rm == TCG_REG_ESP) {
231
tcg_out8(s, 0x84 | (r << 3));
234
tcg_out8(s, 0x80 | (r << 3) | rm);
236
tcg_out32(s, offset);
240
static inline void tcg_out_mov(TCGContext *s, int ret, int arg)
243
tcg_out_modrm(s, 0x8b, ret, arg);
246
static inline void tcg_out_movi(TCGContext *s, TCGType type,
247
int ret, int32_t arg)
251
tcg_out_modrm(s, 0x01 | (ARITH_XOR << 3), ret, ret);
253
tcg_out8(s, 0xb8 + ret);
258
static inline void tcg_out_ld(TCGContext *s, int ret,
259
int arg1, int32_t arg2)
262
tcg_out_modrm_offset(s, 0x8b, ret, arg1, arg2);
265
static inline void tcg_out_st(TCGContext *s, int arg,
266
int arg1, int32_t arg2)
269
tcg_out_modrm_offset(s, 0x89, arg, arg1, arg2);
272
static inline void tgen_arithi(TCGContext *s, int c, int r0, int32_t val)
274
if (val == (int8_t)val) {
275
tcg_out_modrm(s, 0x83, c, r0);
278
tcg_out_modrm(s, 0x81, c, r0);
283
void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
286
tgen_arithi(s, ARITH_ADD, reg, val);
289
static void tcg_out_jxx(TCGContext *s, int opc, int label_index)
292
TCGLabel *l = &s->labels[label_index];
295
val = l->u.value - (tcg_target_long)s->code_ptr;
297
if ((int8_t)val1 == val1) {
301
tcg_out8(s, 0x70 + opc);
306
tcg_out32(s, val - 5);
309
tcg_out8(s, 0x80 + opc);
310
tcg_out32(s, val - 6);
318
tcg_out8(s, 0x80 + opc);
320
tcg_out_reloc(s, s->code_ptr, R_386_PC32, label_index, -4);
325
static void tcg_out_brcond(TCGContext *s, int cond,
326
TCGArg arg1, TCGArg arg2, int const_arg2,
350
tcg_out_modrm(s, 0x85, arg1, arg1);
351
tcg_out_jxx(s, c, label_index);
354
tgen_arithi(s, ARITH_CMP, arg1, arg2);
355
tcg_out_jxx(s, tcg_cond_to_jcc[cond], label_index);
358
tcg_out_modrm(s, 0x01 | (ARITH_CMP << 3), arg2, arg1);
359
tcg_out_jxx(s, tcg_cond_to_jcc[cond], label_index);
363
/* XXX: we implement it at the target level to avoid having to
364
handle cross basic blocks temporaries */
365
static void tcg_out_brcond2(TCGContext *s,
366
const TCGArg *args, const int *const_args)
369
label_next = gen_new_label();
372
tcg_out_brcond(s, TCG_COND_NE, args[0], args[2], const_args[2], label_next);
373
tcg_out_brcond(s, TCG_COND_EQ, args[1], args[3], const_args[3], args[5]);
376
tcg_out_brcond(s, TCG_COND_NE, args[0], args[2], const_args[2], args[5]);
377
tcg_out_brcond(s, TCG_COND_NE, args[1], args[3], const_args[3], args[5]);
380
tcg_out_brcond(s, TCG_COND_LT, args[1], args[3], const_args[3], args[5]);
381
tcg_out_brcond(s, TCG_COND_NE, args[1], args[3], const_args[3], label_next);
382
tcg_out_brcond(s, TCG_COND_LT, args[0], args[2], const_args[2], args[5]);
385
tcg_out_brcond(s, TCG_COND_LT, args[1], args[3], const_args[3], args[5]);
386
tcg_out_brcond(s, TCG_COND_NE, args[1], args[3], const_args[3], label_next);
387
tcg_out_brcond(s, TCG_COND_LE, args[0], args[2], const_args[2], args[5]);
390
tcg_out_brcond(s, TCG_COND_GT, args[1], args[3], const_args[3], args[5]);
391
tcg_out_brcond(s, TCG_COND_NE, args[1], args[3], const_args[3], label_next);
392
tcg_out_brcond(s, TCG_COND_GT, args[0], args[2], const_args[2], args[5]);
395
tcg_out_brcond(s, TCG_COND_GT, args[1], args[3], const_args[3], args[5]);
396
tcg_out_brcond(s, TCG_COND_NE, args[1], args[3], const_args[3], label_next);
397
tcg_out_brcond(s, TCG_COND_GE, args[0], args[2], const_args[2], args[5]);
400
tcg_out_brcond(s, TCG_COND_LTU, args[1], args[3], const_args[3], args[5]);
401
tcg_out_brcond(s, TCG_COND_NE, args[1], args[3], const_args[3], label_next);
402
tcg_out_brcond(s, TCG_COND_LTU, args[0], args[2], const_args[2], args[5]);
405
tcg_out_brcond(s, TCG_COND_LTU, args[1], args[3], const_args[3], args[5]);
406
tcg_out_brcond(s, TCG_COND_NE, args[1], args[3], const_args[3], label_next);
407
tcg_out_brcond(s, TCG_COND_LEU, args[0], args[2], const_args[2], args[5]);
410
tcg_out_brcond(s, TCG_COND_GTU, args[1], args[3], const_args[3], args[5]);
411
tcg_out_brcond(s, TCG_COND_NE, args[1], args[3], const_args[3], label_next);
412
tcg_out_brcond(s, TCG_COND_GTU, args[0], args[2], const_args[2], args[5]);
415
tcg_out_brcond(s, TCG_COND_GTU, args[1], args[3], const_args[3], args[5]);
416
tcg_out_brcond(s, TCG_COND_NE, args[1], args[3], const_args[3], label_next);
417
tcg_out_brcond(s, TCG_COND_GEU, args[0], args[2], const_args[2], args[5]);
422
tcg_out_label(s, label_next, (tcg_target_long)s->code_ptr);
425
#if defined(CONFIG_SOFTMMU)
426
extern void __ldb_mmu(void);
427
extern void __ldw_mmu(void);
428
extern void __ldl_mmu(void);
429
extern void __ldq_mmu(void);
431
extern void __stb_mmu(void);
432
extern void __stw_mmu(void);
433
extern void __stl_mmu(void);
434
extern void __stq_mmu(void);
436
static void *qemu_ld_helpers[4] = {
443
static void *qemu_st_helpers[4] = {
451
/* XXX: qemu_ld and qemu_st could be modified to clobber only EDX and
452
EAX. It will be useful once fixed registers globals are less
454
static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
457
int addr_reg, data_reg, data_reg2, r0, r1, mem_index, s_bits, bswap;
458
#if defined(CONFIG_SOFTMMU)
459
uint8_t *label1_ptr, *label2_ptr;
461
#if TARGET_LONG_BITS == 64
462
#if defined(CONFIG_SOFTMMU)
474
#if TARGET_LONG_BITS == 64
483
#if defined(CONFIG_SOFTMMU)
484
tcg_out_mov(s, r1, addr_reg);
486
tcg_out_mov(s, r0, addr_reg);
488
tcg_out_modrm(s, 0xc1, 5, r1); /* shr $x, r1 */
489
tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
491
tcg_out_modrm(s, 0x81, 4, r0); /* andl $x, r0 */
492
tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
494
tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
495
tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
497
tcg_out_opc(s, 0x8d); /* lea offset(r1, %ebp), r1 */
498
tcg_out8(s, 0x80 | (r1 << 3) | 0x04);
499
tcg_out8(s, (5 << 3) | r1);
500
tcg_out32(s, offsetof(CPUState, tlb_table[mem_index][0].addr_read));
503
tcg_out_modrm_offset(s, 0x3b, r0, r1, 0);
505
tcg_out_mov(s, r0, addr_reg);
507
#if TARGET_LONG_BITS == 32
509
tcg_out8(s, 0x70 + JCC_JE);
510
label1_ptr = s->code_ptr;
514
tcg_out8(s, 0x70 + JCC_JNE);
515
label3_ptr = s->code_ptr;
518
/* cmp 4(r1), addr_reg2 */
519
tcg_out_modrm_offset(s, 0x3b, addr_reg2, r1, 4);
522
tcg_out8(s, 0x70 + JCC_JE);
523
label1_ptr = s->code_ptr;
527
*label3_ptr = s->code_ptr - label3_ptr - 1;
530
/* XXX: move that code at the end of the TB */
531
#if TARGET_LONG_BITS == 32
532
tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_EDX, mem_index);
534
tcg_out_mov(s, TCG_REG_EDX, addr_reg2);
535
tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_ECX, mem_index);
538
tcg_out32(s, (tcg_target_long)qemu_ld_helpers[s_bits] -
539
(tcg_target_long)s->code_ptr - 4);
544
tcg_out_modrm(s, 0xbe | P_EXT, data_reg, TCG_REG_EAX);
548
tcg_out_modrm(s, 0xbf | P_EXT, data_reg, TCG_REG_EAX);
554
tcg_out_mov(s, data_reg, TCG_REG_EAX);
557
if (data_reg == TCG_REG_EDX) {
558
tcg_out_opc(s, 0x90 + TCG_REG_EDX); /* xchg %edx, %eax */
559
tcg_out_mov(s, data_reg2, TCG_REG_EAX);
561
tcg_out_mov(s, data_reg, TCG_REG_EAX);
562
tcg_out_mov(s, data_reg2, TCG_REG_EDX);
569
label2_ptr = s->code_ptr;
573
*label1_ptr = s->code_ptr - label1_ptr - 1;
576
tcg_out_modrm_offset(s, 0x03, r0, r1, offsetof(CPUTLBEntry, addend) -
577
offsetof(CPUTLBEntry, addr_read));
582
#ifdef TARGET_WORDS_BIGENDIAN
590
tcg_out_modrm_offset(s, 0xb6 | P_EXT, data_reg, r0, 0);
594
tcg_out_modrm_offset(s, 0xbe | P_EXT, data_reg, r0, 0);
598
tcg_out_modrm_offset(s, 0xb7 | P_EXT, data_reg, r0, 0);
600
/* rolw $8, data_reg */
602
tcg_out_modrm(s, 0xc1, 0, data_reg);
608
tcg_out_modrm_offset(s, 0xbf | P_EXT, data_reg, r0, 0);
610
/* rolw $8, data_reg */
612
tcg_out_modrm(s, 0xc1, 0, data_reg);
615
/* movswl data_reg, data_reg */
616
tcg_out_modrm(s, 0xbf | P_EXT, data_reg, data_reg);
620
/* movl (r0), data_reg */
621
tcg_out_modrm_offset(s, 0x8b, data_reg, r0, 0);
624
tcg_out_opc(s, (0xc8 + data_reg) | P_EXT);
628
/* XXX: could be nicer */
629
if (r0 == data_reg) {
633
tcg_out_mov(s, r1, r0);
637
tcg_out_modrm_offset(s, 0x8b, data_reg, r0, 0);
638
tcg_out_modrm_offset(s, 0x8b, data_reg2, r0, 4);
640
tcg_out_modrm_offset(s, 0x8b, data_reg, r0, 4);
641
tcg_out_opc(s, (0xc8 + data_reg) | P_EXT);
643
tcg_out_modrm_offset(s, 0x8b, data_reg2, r0, 0);
645
tcg_out_opc(s, (0xc8 + data_reg2) | P_EXT);
652
#if defined(CONFIG_SOFTMMU)
654
*label2_ptr = s->code_ptr - label2_ptr - 1;
659
static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
662
int addr_reg, data_reg, data_reg2, r0, r1, mem_index, s_bits, bswap;
663
#if defined(CONFIG_SOFTMMU)
664
uint8_t *label1_ptr, *label2_ptr;
666
#if TARGET_LONG_BITS == 64
667
#if defined(CONFIG_SOFTMMU)
679
#if TARGET_LONG_BITS == 64
689
#if defined(CONFIG_SOFTMMU)
690
tcg_out_mov(s, r1, addr_reg);
692
tcg_out_mov(s, r0, addr_reg);
694
tcg_out_modrm(s, 0xc1, 5, r1); /* shr $x, r1 */
695
tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
697
tcg_out_modrm(s, 0x81, 4, r0); /* andl $x, r0 */
698
tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
700
tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
701
tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
703
tcg_out_opc(s, 0x8d); /* lea offset(r1, %ebp), r1 */
704
tcg_out8(s, 0x80 | (r1 << 3) | 0x04);
705
tcg_out8(s, (5 << 3) | r1);
706
tcg_out32(s, offsetof(CPUState, tlb_table[mem_index][0].addr_write));
709
tcg_out_modrm_offset(s, 0x3b, r0, r1, 0);
711
tcg_out_mov(s, r0, addr_reg);
713
#if TARGET_LONG_BITS == 32
715
tcg_out8(s, 0x70 + JCC_JE);
716
label1_ptr = s->code_ptr;
720
tcg_out8(s, 0x70 + JCC_JNE);
721
label3_ptr = s->code_ptr;
724
/* cmp 4(r1), addr_reg2 */
725
tcg_out_modrm_offset(s, 0x3b, addr_reg2, r1, 4);
728
tcg_out8(s, 0x70 + JCC_JE);
729
label1_ptr = s->code_ptr;
733
*label3_ptr = s->code_ptr - label3_ptr - 1;
736
/* XXX: move that code at the end of the TB */
737
#if TARGET_LONG_BITS == 32
739
tcg_out_mov(s, TCG_REG_EDX, data_reg);
740
tcg_out_mov(s, TCG_REG_ECX, data_reg2);
741
tcg_out8(s, 0x6a); /* push Ib */
742
tcg_out8(s, mem_index);
744
tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] -
745
(tcg_target_long)s->code_ptr - 4);
746
tcg_out_addi(s, TCG_REG_ESP, 4);
751
tcg_out_modrm(s, 0xb6 | P_EXT, TCG_REG_EDX, data_reg);
755
tcg_out_modrm(s, 0xb7 | P_EXT, TCG_REG_EDX, data_reg);
758
tcg_out_mov(s, TCG_REG_EDX, data_reg);
761
tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_ECX, mem_index);
763
tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] -
764
(tcg_target_long)s->code_ptr - 4);
768
tcg_out_mov(s, TCG_REG_EDX, addr_reg2);
769
tcg_out8(s, 0x6a); /* push Ib */
770
tcg_out8(s, mem_index);
771
tcg_out_opc(s, 0x50 + data_reg2); /* push */
772
tcg_out_opc(s, 0x50 + data_reg); /* push */
774
tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] -
775
(tcg_target_long)s->code_ptr - 4);
776
tcg_out_addi(s, TCG_REG_ESP, 12);
778
tcg_out_mov(s, TCG_REG_EDX, addr_reg2);
782
tcg_out_modrm(s, 0xb6 | P_EXT, TCG_REG_ECX, data_reg);
786
tcg_out_modrm(s, 0xb7 | P_EXT, TCG_REG_ECX, data_reg);
789
tcg_out_mov(s, TCG_REG_ECX, data_reg);
792
tcg_out8(s, 0x6a); /* push Ib */
793
tcg_out8(s, mem_index);
795
tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] -
796
(tcg_target_long)s->code_ptr - 4);
797
tcg_out_addi(s, TCG_REG_ESP, 4);
803
label2_ptr = s->code_ptr;
807
*label1_ptr = s->code_ptr - label1_ptr - 1;
810
tcg_out_modrm_offset(s, 0x03, r0, r1, offsetof(CPUTLBEntry, addend) -
811
offsetof(CPUTLBEntry, addr_write));
816
#ifdef TARGET_WORDS_BIGENDIAN
824
tcg_out_modrm_offset(s, 0x88, data_reg, r0, 0);
828
tcg_out_mov(s, r1, data_reg);
829
tcg_out8(s, 0x66); /* rolw $8, %ecx */
830
tcg_out_modrm(s, 0xc1, 0, r1);
836
tcg_out_modrm_offset(s, 0x89, data_reg, r0, 0);
840
tcg_out_mov(s, r1, data_reg);
842
tcg_out_opc(s, (0xc8 + r1) | P_EXT);
846
tcg_out_modrm_offset(s, 0x89, data_reg, r0, 0);
850
tcg_out_mov(s, r1, data_reg2);
852
tcg_out_opc(s, (0xc8 + r1) | P_EXT);
853
tcg_out_modrm_offset(s, 0x89, r1, r0, 0);
854
tcg_out_mov(s, r1, data_reg);
856
tcg_out_opc(s, (0xc8 + r1) | P_EXT);
857
tcg_out_modrm_offset(s, 0x89, r1, r0, 4);
859
tcg_out_modrm_offset(s, 0x89, data_reg, r0, 0);
860
tcg_out_modrm_offset(s, 0x89, data_reg2, r0, 4);
867
#if defined(CONFIG_SOFTMMU)
869
*label2_ptr = s->code_ptr - label2_ptr - 1;
873
static inline void tcg_out_op(TCGContext *s, int opc,
874
const TCGArg *args, const int *const_args)
879
case INDEX_op_exit_tb:
880
tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_EAX, args[0]);
881
tcg_out8(s, 0xc3); /* ret */
883
case INDEX_op_goto_tb:
884
if (s->tb_jmp_offset) {
885
/* direct jump method */
886
tcg_out8(s, 0xe9); /* jmp im */
887
s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
890
/* indirect jump method */
892
tcg_out_modrm_offset(s, 0xff, 4, -1,
893
(tcg_target_long)(s->tb_next + args[0]));
895
s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
900
tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
902
tcg_out_modrm(s, 0xff, 2, args[0]);
908
tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
910
tcg_out_modrm(s, 0xff, 4, args[0]);
914
tcg_out_jxx(s, JCC_JMP, args[0]);
916
case INDEX_op_movi_i32:
917
tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
919
case INDEX_op_ld8u_i32:
921
tcg_out_modrm_offset(s, 0xb6 | P_EXT, args[0], args[1], args[2]);
923
case INDEX_op_ld8s_i32:
925
tcg_out_modrm_offset(s, 0xbe | P_EXT, args[0], args[1], args[2]);
927
case INDEX_op_ld16u_i32:
929
tcg_out_modrm_offset(s, 0xb7 | P_EXT, args[0], args[1], args[2]);
931
case INDEX_op_ld16s_i32:
933
tcg_out_modrm_offset(s, 0xbf | P_EXT, args[0], args[1], args[2]);
935
case INDEX_op_ld_i32:
937
tcg_out_modrm_offset(s, 0x8b, args[0], args[1], args[2]);
939
case INDEX_op_st8_i32:
941
tcg_out_modrm_offset(s, 0x88, args[0], args[1], args[2]);
943
case INDEX_op_st16_i32:
946
tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
948
case INDEX_op_st_i32:
950
tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
952
case INDEX_op_sub_i32:
955
case INDEX_op_and_i32:
958
case INDEX_op_or_i32:
961
case INDEX_op_xor_i32:
964
case INDEX_op_add_i32:
968
tgen_arithi(s, c, args[0], args[2]);
970
tcg_out_modrm(s, 0x01 | (c << 3), args[2], args[0]);
973
case INDEX_op_mul_i32:
977
if (val == (int8_t)val) {
978
tcg_out_modrm(s, 0x6b, args[0], args[0]);
981
tcg_out_modrm(s, 0x69, args[0], args[0]);
985
tcg_out_modrm(s, 0xaf | P_EXT, args[0], args[2]);
988
case INDEX_op_mulu2_i32:
989
tcg_out_modrm(s, 0xf7, 4, args[3]);
991
case INDEX_op_div2_i32:
992
tcg_out_modrm(s, 0xf7, 7, args[4]);
994
case INDEX_op_divu2_i32:
995
tcg_out_modrm(s, 0xf7, 6, args[4]);
997
case INDEX_op_shl_i32:
1000
if (const_args[2]) {
1002
tcg_out_modrm(s, 0xd1, c, args[0]);
1004
tcg_out_modrm(s, 0xc1, c, args[0]);
1005
tcg_out8(s, args[2]);
1008
tcg_out_modrm(s, 0xd3, c, args[0]);
1011
case INDEX_op_shr_i32:
1014
case INDEX_op_sar_i32:
1018
case INDEX_op_add2_i32:
1020
tgen_arithi(s, ARITH_ADD, args[0], args[4]);
1022
tcg_out_modrm(s, 0x01 | (ARITH_ADD << 3), args[4], args[0]);
1024
tgen_arithi(s, ARITH_ADC, args[1], args[5]);
1026
tcg_out_modrm(s, 0x01 | (ARITH_ADC << 3), args[5], args[1]);
1028
case INDEX_op_sub2_i32:
1030
tgen_arithi(s, ARITH_SUB, args[0], args[4]);
1032
tcg_out_modrm(s, 0x01 | (ARITH_SUB << 3), args[4], args[0]);
1034
tgen_arithi(s, ARITH_SBB, args[1], args[5]);
1036
tcg_out_modrm(s, 0x01 | (ARITH_SBB << 3), args[5], args[1]);
1038
case INDEX_op_brcond_i32:
1039
tcg_out_brcond(s, args[2], args[0], args[1], const_args[1], args[3]);
1041
case INDEX_op_brcond2_i32:
1042
tcg_out_brcond2(s, args, const_args);
1045
case INDEX_op_qemu_ld8u:
1046
tcg_out_qemu_ld(s, args, 0);
1048
case INDEX_op_qemu_ld8s:
1049
tcg_out_qemu_ld(s, args, 0 | 4);
1051
case INDEX_op_qemu_ld16u:
1052
tcg_out_qemu_ld(s, args, 1);
1054
case INDEX_op_qemu_ld16s:
1055
tcg_out_qemu_ld(s, args, 1 | 4);
1057
case INDEX_op_qemu_ld32u:
1058
tcg_out_qemu_ld(s, args, 2);
1060
case INDEX_op_qemu_ld64:
1061
tcg_out_qemu_ld(s, args, 3);
1064
case INDEX_op_qemu_st8:
1065
tcg_out_qemu_st(s, args, 0);
1067
case INDEX_op_qemu_st16:
1068
tcg_out_qemu_st(s, args, 1);
1070
case INDEX_op_qemu_st32:
1071
tcg_out_qemu_st(s, args, 2);
1073
case INDEX_op_qemu_st64:
1074
tcg_out_qemu_st(s, args, 3);
1082
static const TCGTargetOpDef x86_op_defs[] = {
1083
{ INDEX_op_exit_tb, { } },
1084
{ INDEX_op_goto_tb, { } },
1085
{ INDEX_op_call, { "ri" } },
1086
{ INDEX_op_jmp, { "ri" } },
1087
{ INDEX_op_br, { } },
1088
{ INDEX_op_mov_i32, { "r", "r" } },
1089
{ INDEX_op_movi_i32, { "r" } },
1090
{ INDEX_op_ld8u_i32, { "r", "r" } },
1091
{ INDEX_op_ld8s_i32, { "r", "r" } },
1092
{ INDEX_op_ld16u_i32, { "r", "r" } },
1093
{ INDEX_op_ld16s_i32, { "r", "r" } },
1094
{ INDEX_op_ld_i32, { "r", "r" } },
1095
{ INDEX_op_st8_i32, { "q", "r" } },
1096
{ INDEX_op_st16_i32, { "r", "r" } },
1097
{ INDEX_op_st_i32, { "r", "r" } },
1099
{ INDEX_op_add_i32, { "r", "0", "ri" } },
1100
{ INDEX_op_sub_i32, { "r", "0", "ri" } },
1101
{ INDEX_op_mul_i32, { "r", "0", "ri" } },
1102
{ INDEX_op_mulu2_i32, { "a", "d", "a", "r" } },
1103
{ INDEX_op_div2_i32, { "a", "d", "0", "1", "r" } },
1104
{ INDEX_op_divu2_i32, { "a", "d", "0", "1", "r" } },
1105
{ INDEX_op_and_i32, { "r", "0", "ri" } },
1106
{ INDEX_op_or_i32, { "r", "0", "ri" } },
1107
{ INDEX_op_xor_i32, { "r", "0", "ri" } },
1109
{ INDEX_op_shl_i32, { "r", "0", "ci" } },
1110
{ INDEX_op_shr_i32, { "r", "0", "ci" } },
1111
{ INDEX_op_sar_i32, { "r", "0", "ci" } },
1113
{ INDEX_op_brcond_i32, { "r", "ri" } },
1115
{ INDEX_op_add2_i32, { "r", "r", "0", "1", "ri", "ri" } },
1116
{ INDEX_op_sub2_i32, { "r", "r", "0", "1", "ri", "ri" } },
1117
{ INDEX_op_brcond2_i32, { "r", "r", "ri", "ri" } },
1119
#if TARGET_LONG_BITS == 32
1120
{ INDEX_op_qemu_ld8u, { "r", "L" } },
1121
{ INDEX_op_qemu_ld8s, { "r", "L" } },
1122
{ INDEX_op_qemu_ld16u, { "r", "L" } },
1123
{ INDEX_op_qemu_ld16s, { "r", "L" } },
1124
{ INDEX_op_qemu_ld32u, { "r", "L" } },
1125
{ INDEX_op_qemu_ld64, { "r", "r", "L" } },
1127
{ INDEX_op_qemu_st8, { "cb", "L" } },
1128
{ INDEX_op_qemu_st16, { "L", "L" } },
1129
{ INDEX_op_qemu_st32, { "L", "L" } },
1130
{ INDEX_op_qemu_st64, { "L", "L", "L" } },
1132
{ INDEX_op_qemu_ld8u, { "r", "L", "L" } },
1133
{ INDEX_op_qemu_ld8s, { "r", "L", "L" } },
1134
{ INDEX_op_qemu_ld16u, { "r", "L", "L" } },
1135
{ INDEX_op_qemu_ld16s, { "r", "L", "L" } },
1136
{ INDEX_op_qemu_ld32u, { "r", "L", "L" } },
1137
{ INDEX_op_qemu_ld64, { "r", "r", "L", "L" } },
1139
{ INDEX_op_qemu_st8, { "cb", "L", "L" } },
1140
{ INDEX_op_qemu_st16, { "L", "L", "L" } },
1141
{ INDEX_op_qemu_st32, { "L", "L", "L" } },
1142
{ INDEX_op_qemu_st64, { "L", "L", "L", "L" } },
1147
void tcg_target_init(TCGContext *s)
1150
if ((1 << CPU_TLB_ENTRY_BITS) != sizeof(CPUTLBEntry))
1153
tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xff);
1154
tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1155
(1 << TCG_REG_EAX) |
1156
(1 << TCG_REG_EDX) |
1157
(1 << TCG_REG_ECX));
1159
tcg_regset_clear(s->reserved_regs);
1160
tcg_regset_set_reg(s->reserved_regs, TCG_REG_ESP);
1162
tcg_add_target_add_op_defs(x86_op_defs);