172
164
OR_A0, /* temporary register used when doing address evaluation */
175
static inline void gen_op_movl_T0_0(void)
177
tcg_gen_movi_tl(cpu_T[0], 0);
180
static inline void gen_op_movl_T0_im(int32_t val)
182
tcg_gen_movi_tl(cpu_T[0], val);
185
static inline void gen_op_movl_T0_imu(uint32_t val)
187
tcg_gen_movi_tl(cpu_T[0], val);
190
static inline void gen_op_movl_T1_im(int32_t val)
192
tcg_gen_movi_tl(cpu_T[1], val);
195
static inline void gen_op_movl_T1_imu(uint32_t val)
197
tcg_gen_movi_tl(cpu_T[1], val);
200
static inline void gen_op_movl_A0_im(uint32_t val)
202
tcg_gen_movi_tl(cpu_A0, val);
206
static inline void gen_op_movq_A0_im(int64_t val)
208
tcg_gen_movi_tl(cpu_A0, val);
212
static inline void gen_movtl_T0_im(target_ulong val)
214
tcg_gen_movi_tl(cpu_T[0], val);
217
static inline void gen_movtl_T1_im(target_ulong val)
219
tcg_gen_movi_tl(cpu_T[1], val);
222
static inline void gen_op_andl_T0_ffff(void)
224
tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
227
static inline void gen_op_andl_T0_im(uint32_t val)
229
tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
232
static inline void gen_op_movl_T0_T1(void)
234
tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
237
static inline void gen_op_andl_A0_ffff(void)
239
tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
242
167
#ifdef TARGET_X86_64
244
169
#define NB_OP_SIZES 4
171
#define DEF_REGS(prefix, suffix) \
172
prefix ## EAX ## suffix,\
173
prefix ## ECX ## suffix,\
174
prefix ## EDX ## suffix,\
175
prefix ## EBX ## suffix,\
176
prefix ## ESP ## suffix,\
177
prefix ## EBP ## suffix,\
178
prefix ## ESI ## suffix,\
179
prefix ## EDI ## suffix,\
180
prefix ## R8 ## suffix,\
181
prefix ## R9 ## suffix,\
182
prefix ## R10 ## suffix,\
183
prefix ## R11 ## suffix,\
184
prefix ## R12 ## suffix,\
185
prefix ## R13 ## suffix,\
186
prefix ## R14 ## suffix,\
187
prefix ## R15 ## suffix,
189
#define DEF_BREGS(prefixb, prefixh, suffix) \
191
static void prefixb ## ESP ## suffix ## _wrapper(void) \
194
prefixb ## ESP ## suffix (); \
196
prefixh ## EAX ## suffix (); \
199
static void prefixb ## EBP ## suffix ## _wrapper(void) \
202
prefixb ## EBP ## suffix (); \
204
prefixh ## ECX ## suffix (); \
207
static void prefixb ## ESI ## suffix ## _wrapper(void) \
210
prefixb ## ESI ## suffix (); \
212
prefixh ## EDX ## suffix (); \
215
static void prefixb ## EDI ## suffix ## _wrapper(void) \
218
prefixb ## EDI ## suffix (); \
220
prefixh ## EBX ## suffix (); \
223
DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
224
DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
225
DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
226
DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
246
228
#else /* !TARGET_X86_64 */
248
230
#define NB_OP_SIZES 3
232
#define DEF_REGS(prefix, suffix) \
233
prefix ## EAX ## suffix,\
234
prefix ## ECX ## suffix,\
235
prefix ## EDX ## suffix,\
236
prefix ## EBX ## suffix,\
237
prefix ## ESP ## suffix,\
238
prefix ## EBP ## suffix,\
239
prefix ## ESI ## suffix,\
240
prefix ## EDI ## suffix,
250
242
#endif /* !TARGET_X86_64 */
252
#if defined(WORDS_BIGENDIAN)
253
#define REG_B_OFFSET (sizeof(target_ulong) - 1)
254
#define REG_H_OFFSET (sizeof(target_ulong) - 2)
255
#define REG_W_OFFSET (sizeof(target_ulong) - 2)
256
#define REG_L_OFFSET (sizeof(target_ulong) - 4)
257
#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
259
#define REG_B_OFFSET 0
260
#define REG_H_OFFSET 1
261
#define REG_W_OFFSET 0
262
#define REG_L_OFFSET 0
263
#define REG_LH_OFFSET 4
266
static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
270
if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
271
tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
273
tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
277
tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
281
tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
282
/* high part of register set to zero */
283
tcg_gen_movi_tl(cpu_tmp0, 0);
284
tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
288
tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
293
tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
299
static inline void gen_op_mov_reg_T0(int ot, int reg)
301
gen_op_mov_reg_v(ot, reg, cpu_T[0]);
304
static inline void gen_op_mov_reg_T1(int ot, int reg)
306
gen_op_mov_reg_v(ot, reg, cpu_T[1]);
309
static inline void gen_op_mov_reg_A0(int size, int reg)
313
tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
317
tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
318
/* high part of register set to zero */
319
tcg_gen_movi_tl(cpu_tmp0, 0);
320
tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
324
tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
329
tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
335
static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
339
if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
342
tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
347
tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
352
static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
354
gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
357
static inline void gen_op_movl_A0_reg(int reg)
359
tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
362
static inline void gen_op_addl_A0_im(int32_t val)
364
tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
366
tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
371
static inline void gen_op_addq_A0_im(int64_t val)
373
tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
377
static void gen_add_A0_im(DisasContext *s, int val)
381
gen_op_addq_A0_im(val);
384
gen_op_addl_A0_im(val);
387
static inline void gen_op_addl_T0_T1(void)
389
tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
392
static inline void gen_op_jmp_T0(void)
394
tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
397
static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
401
tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
402
tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
403
tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
406
tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
407
tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
409
tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
411
tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
415
tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
416
tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
417
tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
423
static inline void gen_op_add_reg_T0(int size, int reg)
427
tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
428
tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
429
tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
432
tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
433
tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
435
tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
437
tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
441
tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
442
tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
443
tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
449
static inline void gen_op_set_cc_op(int32_t val)
451
tcg_gen_movi_i32(cpu_cc_op, val);
454
static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
456
tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
458
tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
459
tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
461
tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
465
static inline void gen_op_movl_A0_seg(int reg)
467
tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
470
static inline void gen_op_addl_A0_seg(int reg)
472
tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
473
tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
475
tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
480
static inline void gen_op_movq_A0_seg(int reg)
482
tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
485
static inline void gen_op_addq_A0_seg(int reg)
487
tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
488
tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
491
static inline void gen_op_movq_A0_reg(int reg)
493
tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
496
static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
498
tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
500
tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
501
tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
505
static inline void gen_op_lds_T0_A0(int idx)
507
int mem_index = (idx >> 2) - 1;
510
tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
513
tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
517
tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
522
static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
524
int mem_index = (idx >> 2) - 1;
527
tcg_gen_qemu_ld8u(t0, a0, mem_index);
530
tcg_gen_qemu_ld16u(t0, a0, mem_index);
533
tcg_gen_qemu_ld32u(t0, a0, mem_index);
537
tcg_gen_qemu_ld64(t0, a0, mem_index);
542
/* XXX: always use ldu or lds */
543
static inline void gen_op_ld_T0_A0(int idx)
545
gen_op_ld_v(idx, cpu_T[0], cpu_A0);
548
static inline void gen_op_ldu_T0_A0(int idx)
550
gen_op_ld_v(idx, cpu_T[0], cpu_A0);
553
static inline void gen_op_ld_T1_A0(int idx)
555
gen_op_ld_v(idx, cpu_T[1], cpu_A0);
558
static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
560
int mem_index = (idx >> 2) - 1;
563
tcg_gen_qemu_st8(t0, a0, mem_index);
566
tcg_gen_qemu_st16(t0, a0, mem_index);
569
tcg_gen_qemu_st32(t0, a0, mem_index);
573
tcg_gen_qemu_st64(t0, a0, mem_index);
578
static inline void gen_op_st_T0_A0(int idx)
580
gen_op_st_v(idx, cpu_T[0], cpu_A0);
583
static inline void gen_op_st_T1_A0(int idx)
585
gen_op_st_v(idx, cpu_T[1], cpu_A0);
244
static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
251
gen_op_movb_ESP_T0_wrapper,
252
gen_op_movb_EBP_T0_wrapper,
253
gen_op_movb_ESI_T0_wrapper,
254
gen_op_movb_EDI_T0_wrapper,
271
DEF_REGS(gen_op_movw_, _T0)
274
DEF_REGS(gen_op_movl_, _T0)
278
DEF_REGS(gen_op_movq_, _T0)
283
static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
290
gen_op_movb_ESP_T1_wrapper,
291
gen_op_movb_EBP_T1_wrapper,
292
gen_op_movb_ESI_T1_wrapper,
293
gen_op_movb_EDI_T1_wrapper,
310
DEF_REGS(gen_op_movw_, _T1)
313
DEF_REGS(gen_op_movl_, _T1)
317
DEF_REGS(gen_op_movq_, _T1)
322
static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
324
DEF_REGS(gen_op_movw_, _A0)
327
DEF_REGS(gen_op_movl_, _A0)
331
DEF_REGS(gen_op_movq_, _A0)
336
static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
345
gen_op_movl_T0_ESP_wrapper,
346
gen_op_movl_T0_EBP_wrapper,
347
gen_op_movl_T0_ESI_wrapper,
348
gen_op_movl_T0_EDI_wrapper,
370
gen_op_movl_T1_ESP_wrapper,
371
gen_op_movl_T1_EBP_wrapper,
372
gen_op_movl_T1_ESI_wrapper,
373
gen_op_movl_T1_EDI_wrapper,
392
DEF_REGS(gen_op_movl_T0_, )
395
DEF_REGS(gen_op_movl_T1_, )
400
DEF_REGS(gen_op_movl_T0_, )
403
DEF_REGS(gen_op_movl_T1_, )
409
DEF_REGS(gen_op_movl_T0_, )
412
DEF_REGS(gen_op_movl_T1_, )
418
static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
419
DEF_REGS(gen_op_movl_A0_, )
422
static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
424
DEF_REGS(gen_op_addl_A0_, )
427
DEF_REGS(gen_op_addl_A0_, _s1)
430
DEF_REGS(gen_op_addl_A0_, _s2)
433
DEF_REGS(gen_op_addl_A0_, _s3)
438
static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
439
DEF_REGS(gen_op_movq_A0_, )
442
static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
444
DEF_REGS(gen_op_addq_A0_, )
447
DEF_REGS(gen_op_addq_A0_, _s1)
450
DEF_REGS(gen_op_addq_A0_, _s2)
453
DEF_REGS(gen_op_addq_A0_, _s3)
458
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
460
DEF_REGS(gen_op_cmovw_, _T1_T0)
463
DEF_REGS(gen_op_cmovl_, _T1_T0)
467
DEF_REGS(gen_op_cmovq_, _T1_T0)
472
static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
483
#define DEF_ARITHC(SUFFIX)\
485
gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
486
gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
489
gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
490
gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
493
gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
494
gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
497
X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
498
X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
501
static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
505
static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
507
#ifndef CONFIG_USER_ONLY
513
static const int cc_op_arithb[8] = {
524
#define DEF_CMPXCHG(SUFFIX)\
525
gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
526
gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
527
gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
528
X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
530
static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
534
static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
536
#ifndef CONFIG_USER_ONLY
542
#define DEF_SHIFT(SUFFIX)\
544
gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
545
gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
546
gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
547
gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
548
gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
549
gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
550
gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
551
gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
554
gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
555
gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
556
gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
557
gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
558
gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
559
gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
560
gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
561
gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
564
gen_op_roll ## SUFFIX ## _T0_T1_cc,\
565
gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
566
gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
567
gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
568
gen_op_shll ## SUFFIX ## _T0_T1_cc,\
569
gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
570
gen_op_shll ## SUFFIX ## _T0_T1_cc,\
571
gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
574
X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
575
X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
576
X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
577
X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
578
X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
579
X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
580
X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
581
X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
584
static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
588
static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
590
#ifndef CONFIG_USER_ONLY
596
#define DEF_SHIFTD(SUFFIX, op)\
602
gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
603
gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
606
gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
607
gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
610
X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
611
gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
614
static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
618
static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
622
static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
624
#ifndef CONFIG_USER_ONLY
625
DEF_SHIFTD(_kernel, im)
626
DEF_SHIFTD(_user, im)
630
static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
631
DEF_SHIFTD(_raw, ECX)
632
#ifndef CONFIG_USER_ONLY
633
DEF_SHIFTD(_kernel, ECX)
634
DEF_SHIFTD(_user, ECX)
638
static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
641
gen_op_btsw_T0_T1_cc,
642
gen_op_btrw_T0_T1_cc,
643
gen_op_btcw_T0_T1_cc,
647
gen_op_btsl_T0_T1_cc,
648
gen_op_btrl_T0_T1_cc,
649
gen_op_btcl_T0_T1_cc,
654
gen_op_btsq_T0_T1_cc,
655
gen_op_btrq_T0_T1_cc,
656
gen_op_btcq_T0_T1_cc,
661
static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
662
gen_op_add_bitw_A0_T1,
663
gen_op_add_bitl_A0_T1,
664
X86_64_ONLY(gen_op_add_bitq_A0_T1),
667
static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
684
static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
685
gen_op_ldsb_raw_T0_A0,
686
gen_op_ldsw_raw_T0_A0,
687
X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
689
#ifndef CONFIG_USER_ONLY
690
gen_op_ldsb_kernel_T0_A0,
691
gen_op_ldsw_kernel_T0_A0,
692
X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
695
gen_op_ldsb_user_T0_A0,
696
gen_op_ldsw_user_T0_A0,
697
X86_64_ONLY(gen_op_ldsl_user_T0_A0),
702
static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
703
gen_op_ldub_raw_T0_A0,
704
gen_op_lduw_raw_T0_A0,
708
#ifndef CONFIG_USER_ONLY
709
gen_op_ldub_kernel_T0_A0,
710
gen_op_lduw_kernel_T0_A0,
714
gen_op_ldub_user_T0_A0,
715
gen_op_lduw_user_T0_A0,
721
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
722
static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
723
gen_op_ldub_raw_T0_A0,
724
gen_op_lduw_raw_T0_A0,
725
gen_op_ldl_raw_T0_A0,
726
X86_64_ONLY(gen_op_ldq_raw_T0_A0),
728
#ifndef CONFIG_USER_ONLY
729
gen_op_ldub_kernel_T0_A0,
730
gen_op_lduw_kernel_T0_A0,
731
gen_op_ldl_kernel_T0_A0,
732
X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
734
gen_op_ldub_user_T0_A0,
735
gen_op_lduw_user_T0_A0,
736
gen_op_ldl_user_T0_A0,
737
X86_64_ONLY(gen_op_ldq_user_T0_A0),
741
static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
742
gen_op_ldub_raw_T1_A0,
743
gen_op_lduw_raw_T1_A0,
744
gen_op_ldl_raw_T1_A0,
745
X86_64_ONLY(gen_op_ldq_raw_T1_A0),
747
#ifndef CONFIG_USER_ONLY
748
gen_op_ldub_kernel_T1_A0,
749
gen_op_lduw_kernel_T1_A0,
750
gen_op_ldl_kernel_T1_A0,
751
X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
753
gen_op_ldub_user_T1_A0,
754
gen_op_lduw_user_T1_A0,
755
gen_op_ldl_user_T1_A0,
756
X86_64_ONLY(gen_op_ldq_user_T1_A0),
760
static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
761
gen_op_stb_raw_T0_A0,
762
gen_op_stw_raw_T0_A0,
763
gen_op_stl_raw_T0_A0,
764
X86_64_ONLY(gen_op_stq_raw_T0_A0),
766
#ifndef CONFIG_USER_ONLY
767
gen_op_stb_kernel_T0_A0,
768
gen_op_stw_kernel_T0_A0,
769
gen_op_stl_kernel_T0_A0,
770
X86_64_ONLY(gen_op_stq_kernel_T0_A0),
772
gen_op_stb_user_T0_A0,
773
gen_op_stw_user_T0_A0,
774
gen_op_stl_user_T0_A0,
775
X86_64_ONLY(gen_op_stq_user_T0_A0),
779
static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
781
gen_op_stw_raw_T1_A0,
782
gen_op_stl_raw_T1_A0,
783
X86_64_ONLY(gen_op_stq_raw_T1_A0),
785
#ifndef CONFIG_USER_ONLY
787
gen_op_stw_kernel_T1_A0,
788
gen_op_stl_kernel_T1_A0,
789
X86_64_ONLY(gen_op_stq_kernel_T1_A0),
792
gen_op_stw_user_T1_A0,
793
gen_op_stl_user_T1_A0,
794
X86_64_ONLY(gen_op_stq_user_T1_A0),
588
798
static inline void gen_jmp_im(target_ulong pc)
590
tcg_gen_movi_tl(cpu_tmp0, pc);
591
tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
801
if (pc == (uint32_t)pc) {
802
gen_op_movl_eip_im(pc);
803
} else if (pc == (int32_t)pc) {
804
gen_op_movq_eip_im(pc);
806
gen_op_movq_eip_im64(pc >> 32, pc);
809
gen_op_movl_eip_im(pc);
594
813
static inline void gen_string_movl_A0_ESI(DisasContext *s)
631
850
#ifdef TARGET_X86_64
632
851
if (s->aflag == 2) {
633
gen_op_movq_A0_reg(R_EDI);
852
gen_op_movq_A0_reg[R_EDI]();
638
gen_op_movl_A0_seg(R_ES);
639
gen_op_addl_A0_reg_sN(0, R_EDI);
857
gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
858
gen_op_addl_A0_reg_sN[0][R_EDI]();
641
gen_op_movl_A0_reg(R_EDI);
860
gen_op_movl_A0_reg[R_EDI]();
644
gen_op_movl_A0_reg(R_EDI);
863
gen_op_movl_A0_reg[R_EDI]();
645
864
gen_op_andl_A0_ffff();
646
gen_op_addl_A0_seg(R_ES);
650
static inline void gen_op_movl_T0_Dshift(int ot)
652
tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
653
tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
656
static void gen_extu(int ot, TCGv reg)
660
tcg_gen_ext8u_tl(reg, reg);
663
tcg_gen_ext16u_tl(reg, reg);
666
tcg_gen_ext32u_tl(reg, reg);
673
static void gen_exts(int ot, TCGv reg)
677
tcg_gen_ext8s_tl(reg, reg);
680
tcg_gen_ext16s_tl(reg, reg);
683
tcg_gen_ext32s_tl(reg, reg);
690
static inline void gen_op_jnz_ecx(int size, int label1)
692
tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
693
gen_extu(size + 1, cpu_tmp0);
694
tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
697
static inline void gen_op_jz_ecx(int size, int label1)
699
tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
700
gen_extu(size + 1, cpu_tmp0);
701
tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
704
static void *helper_in_func[3] = {
710
static void *helper_out_func[3] = {
716
static void *gen_check_io_func[3] = {
722
static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
726
target_ulong next_eip;
865
gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
869
static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
870
gen_op_movl_T0_Dshiftb,
871
gen_op_movl_T0_Dshiftw,
872
gen_op_movl_T0_Dshiftl,
873
X86_64_ONLY(gen_op_movl_T0_Dshiftq),
876
static GenOpFunc1 *gen_op_jnz_ecx[3] = {
879
X86_64_ONLY(gen_op_jnz_ecxq),
882
static GenOpFunc1 *gen_op_jz_ecx[3] = {
885
X86_64_ONLY(gen_op_jz_ecxq),
888
static GenOpFunc *gen_op_dec_ECX[3] = {
891
X86_64_ONLY(gen_op_decq_ECX),
894
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
899
X86_64_ONLY(gen_op_jnz_subq),
905
X86_64_ONLY(gen_op_jz_subq),
909
static GenOpFunc *gen_op_in_DX_T0[3] = {
915
static GenOpFunc *gen_op_out_DX_T0[3] = {
921
static GenOpFunc *gen_op_in[3] = {
927
static GenOpFunc *gen_op_out[3] = {
933
static GenOpFunc *gen_check_io_T0[3] = {
939
static GenOpFunc *gen_check_io_DX[3] = {
945
static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
729
947
if (s->pe && (s->cpl > s->iopl || s->vm86)) {
730
948
if (s->cc_op != CC_OP_DYNAMIC)
731
949
gen_op_set_cc_op(s->cc_op);
732
950
gen_jmp_im(cur_eip);
734
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
735
tcg_gen_helper_0_1(gen_check_io_func[ot],
738
if(s->flags & HF_SVMI_MASK) {
740
if (s->cc_op != CC_OP_DYNAMIC)
741
gen_op_set_cc_op(s->cc_op);
745
svm_flags |= (1 << (4 + ot));
746
next_eip = s->pc - s->cs_base;
747
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
748
tcg_gen_helper_0_3(helper_svm_check_io,
750
tcg_const_i32(svm_flags),
751
tcg_const_i32(next_eip - cur_eip));
952
gen_check_io_DX[ot]();
954
gen_check_io_T0[ot]();
755
958
static inline void gen_movs(DisasContext *s, int ot)
757
960
gen_string_movl_A0_ESI(s);
758
gen_op_ld_T0_A0(ot + s->mem_index);
961
gen_op_ld_T0_A0[ot + s->mem_index]();
759
962
gen_string_movl_A0_EDI(s);
760
gen_op_st_T0_A0(ot + s->mem_index);
761
gen_op_movl_T0_Dshift(ot);
762
gen_op_add_reg_T0(s->aflag, R_ESI);
763
gen_op_add_reg_T0(s->aflag, R_EDI);
963
gen_op_st_T0_A0[ot + s->mem_index]();
964
gen_op_movl_T0_Dshift[ot]();
967
gen_op_addq_ESI_T0();
968
gen_op_addq_EDI_T0();
972
gen_op_addl_ESI_T0();
973
gen_op_addl_EDI_T0();
975
gen_op_addw_ESI_T0();
976
gen_op_addw_EDI_T0();
766
980
static inline void gen_update_cc_op(DisasContext *s)
774
static void gen_op_update1_cc(void)
776
tcg_gen_discard_tl(cpu_cc_src);
777
tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
780
static void gen_op_update2_cc(void)
782
tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
783
tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
786
static inline void gen_op_cmpl_T0_T1_cc(void)
788
tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
789
tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
792
static inline void gen_op_testl_T0_T1_cc(void)
794
tcg_gen_discard_tl(cpu_cc_src);
795
tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
798
static void gen_op_update_neg_cc(void)
800
tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
801
tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
804
/* compute eflags.C to reg */
805
static void gen_compute_eflags_c(TCGv reg)
807
#if TCG_TARGET_REG_BITS == 32
808
tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
809
tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
810
(long)cc_table + offsetof(CCTable, compute_c));
811
tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
812
tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
813
1, &cpu_tmp2_i32, 0, NULL);
815
tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
816
tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
817
tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
818
(long)cc_table + offsetof(CCTable, compute_c));
819
tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
820
tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
821
1, &cpu_tmp2_i32, 0, NULL);
823
tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
826
/* compute all eflags to cc_src */
827
static void gen_compute_eflags(TCGv reg)
829
#if TCG_TARGET_REG_BITS == 32
830
tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
831
tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
832
(long)cc_table + offsetof(CCTable, compute_all));
833
tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
834
tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
835
1, &cpu_tmp2_i32, 0, NULL);
837
tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
838
tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
839
tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
840
(long)cc_table + offsetof(CCTable, compute_all));
841
tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
842
tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
843
1, &cpu_tmp2_i32, 0, NULL);
845
tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
848
static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
850
if (s->cc_op != CC_OP_DYNAMIC)
851
gen_op_set_cc_op(s->cc_op);
854
gen_compute_eflags(cpu_T[0]);
855
tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
856
tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
859
gen_compute_eflags_c(cpu_T[0]);
862
gen_compute_eflags(cpu_T[0]);
863
tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
864
tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
867
gen_compute_eflags(cpu_tmp0);
868
tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
869
tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
870
tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
873
gen_compute_eflags(cpu_T[0]);
874
tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
875
tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
878
gen_compute_eflags(cpu_T[0]);
879
tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
880
tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
883
gen_compute_eflags(cpu_tmp0);
884
tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
885
tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
886
tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
887
tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
891
gen_compute_eflags(cpu_tmp0);
892
tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
893
tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
894
tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
895
tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
896
tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
897
tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
902
/* return true if setcc_slow is not needed (WARNING: must be kept in
903
sync with gen_jcc1) */
904
static int is_fast_jcc_case(DisasContext *s, int b)
907
jcc_op = (b >> 1) & 7;
909
/* we optimize the cmp/jcc case */
914
if (jcc_op == JCC_O || jcc_op == JCC_P)
918
/* some jumps are easy to compute */
943
if (jcc_op != JCC_Z && jcc_op != JCC_S)
953
/* generate a conditional jump to label 'l1' according to jump opcode
954
value 'b'. In the fast case, T0 is guaranted not to be used. */
955
static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
957
int inv, jcc_op, size, cond;
961
jcc_op = (b >> 1) & 7;
964
/* we optimize the cmp/jcc case */
970
size = cc_op - CC_OP_SUBB;
976
tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
980
tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
985
tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
993
tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
999
tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1000
tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1004
tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1005
tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1008
#ifdef TARGET_X86_64
1010
tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1011
tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1016
tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1023
cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1026
cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1028
tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1032
tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1033
tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1037
tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1038
tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1040
#ifdef TARGET_X86_64
1043
tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1044
tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1051
tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1055
cond = inv ? TCG_COND_GE : TCG_COND_LT;
1058
cond = inv ? TCG_COND_GT : TCG_COND_LE;
1060
tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1064
tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1065
tcg_gen_ext8s_tl(t0, cpu_cc_src);
1069
tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1070
tcg_gen_ext16s_tl(t0, cpu_cc_src);
1072
#ifdef TARGET_X86_64
1075
tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1076
tcg_gen_ext32s_tl(t0, cpu_cc_src);
1083
tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1091
/* some jumps are easy to compute */
1133
size = (cc_op - CC_OP_ADDB) & 3;
1136
size = (cc_op - CC_OP_ADDB) & 3;
1144
gen_setcc_slow_T0(s, jcc_op);
1145
tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1151
988
/* XXX: does not work with gdbstub "ice" single step - not a
1152
989
serious problem */
1153
990
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1286
1160
GEN_REPZ2(scas)
1287
1161
GEN_REPZ2(cmps)
1289
static void *helper_fp_arith_ST0_FT0[8] = {
1290
helper_fadd_ST0_FT0,
1291
helper_fmul_ST0_FT0,
1292
helper_fcom_ST0_FT0,
1293
helper_fcom_ST0_FT0,
1294
helper_fsub_ST0_FT0,
1295
helper_fsubr_ST0_FT0,
1296
helper_fdiv_ST0_FT0,
1297
helper_fdivr_ST0_FT0,
1174
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1205
#ifdef TARGET_X86_64
1208
BUGGY_64(gen_op_jb_subq),
1210
BUGGY_64(gen_op_jbe_subq),
1213
BUGGY_64(gen_op_jl_subq),
1214
BUGGY_64(gen_op_jle_subq),
1218
static GenOpFunc1 *gen_op_loop[3][4] = {
1229
#ifdef TARGET_X86_64
1238
static GenOpFunc *gen_setcc_slow[8] = {
1249
static GenOpFunc *gen_setcc_sub[4][8] = {
1252
gen_op_setb_T0_subb,
1253
gen_op_setz_T0_subb,
1254
gen_op_setbe_T0_subb,
1255
gen_op_sets_T0_subb,
1257
gen_op_setl_T0_subb,
1258
gen_op_setle_T0_subb,
1262
gen_op_setb_T0_subw,
1263
gen_op_setz_T0_subw,
1264
gen_op_setbe_T0_subw,
1265
gen_op_sets_T0_subw,
1267
gen_op_setl_T0_subw,
1268
gen_op_setle_T0_subw,
1272
gen_op_setb_T0_subl,
1273
gen_op_setz_T0_subl,
1274
gen_op_setbe_T0_subl,
1275
gen_op_sets_T0_subl,
1277
gen_op_setl_T0_subl,
1278
gen_op_setle_T0_subl,
1280
#ifdef TARGET_X86_64
1283
gen_op_setb_T0_subq,
1284
gen_op_setz_T0_subq,
1285
gen_op_setbe_T0_subq,
1286
gen_op_sets_T0_subq,
1288
gen_op_setl_T0_subq,
1289
gen_op_setle_T0_subq,
1294
static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1295
gen_op_fadd_ST0_FT0,
1296
gen_op_fmul_ST0_FT0,
1297
gen_op_fcom_ST0_FT0,
1298
gen_op_fcom_ST0_FT0,
1299
gen_op_fsub_ST0_FT0,
1300
gen_op_fsubr_ST0_FT0,
1301
gen_op_fdiv_ST0_FT0,
1302
gen_op_fdivr_ST0_FT0,
1300
1305
/* NOTE the exception in "r" op ordering */
1301
static void *helper_fp_arith_STN_ST0[8] = {
1302
helper_fadd_STN_ST0,
1303
helper_fmul_STN_ST0,
1306
helper_fsubr_STN_ST0,
1307
helper_fsub_STN_ST0,
1308
helper_fdivr_STN_ST0,
1309
helper_fdiv_STN_ST0,
1306
static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1307
gen_op_fadd_STN_ST0,
1308
gen_op_fmul_STN_ST0,
1311
gen_op_fsubr_STN_ST0,
1312
gen_op_fsub_STN_ST0,
1313
gen_op_fdivr_STN_ST0,
1314
gen_op_fdiv_STN_ST0,
1312
1317
/* if d == OR_TMP0, it means memory operand (address in A0) */
1313
1318
static void gen_op(DisasContext *s1, int op, int ot, int d)
1320
GenOpFunc *gen_update_cc;
1315
1322
if (d != OR_TMP0) {
1316
gen_op_mov_TN_reg(ot, 0, d);
1323
gen_op_mov_TN_reg[ot][0][d]();
1318
gen_op_ld_T0_A0(ot + s1->mem_index);
1325
gen_op_ld_T0_A0[ot + s1->mem_index]();
1322
if (s1->cc_op != CC_OP_DYNAMIC)
1323
gen_op_set_cc_op(s1->cc_op);
1324
gen_compute_eflags_c(cpu_tmp4);
1325
tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1326
tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1328
gen_op_mov_reg_T0(ot, d);
1330
gen_op_st_T0_A0(ot + s1->mem_index);
1331
tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1332
tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1333
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1334
tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1335
tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1336
s1->cc_op = CC_OP_DYNAMIC;
1339
1330
if (s1->cc_op != CC_OP_DYNAMIC)
1340
1331
gen_op_set_cc_op(s1->cc_op);
1341
gen_compute_eflags_c(cpu_tmp4);
1342
tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1343
tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1345
gen_op_mov_reg_T0(ot, d);
1347
gen_op_st_T0_A0(ot + s1->mem_index);
1348
tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1349
tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1350
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1351
tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1352
tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1333
gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1334
gen_op_mov_reg_T0[ot][d]();
1336
gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1353
1338
s1->cc_op = CC_OP_DYNAMIC;
1356
1341
gen_op_addl_T0_T1();
1358
gen_op_mov_reg_T0(ot, d);
1360
gen_op_st_T0_A0(ot + s1->mem_index);
1361
gen_op_update2_cc();
1362
1342
s1->cc_op = CC_OP_ADDB + ot;
1343
gen_update_cc = gen_op_update2_cc;
1365
tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1367
gen_op_mov_reg_T0(ot, d);
1369
gen_op_st_T0_A0(ot + s1->mem_index);
1370
gen_op_update2_cc();
1346
gen_op_subl_T0_T1();
1371
1347
s1->cc_op = CC_OP_SUBB + ot;
1348
gen_update_cc = gen_op_update2_cc;
1375
tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1377
gen_op_mov_reg_T0(ot, d);
1379
gen_op_st_T0_A0(ot + s1->mem_index);
1380
gen_op_update1_cc();
1381
s1->cc_op = CC_OP_LOGICB + ot;
1384
tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1386
gen_op_mov_reg_T0(ot, d);
1388
gen_op_st_T0_A0(ot + s1->mem_index);
1389
gen_op_update1_cc();
1390
s1->cc_op = CC_OP_LOGICB + ot;
1393
tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1395
gen_op_mov_reg_T0(ot, d);
1397
gen_op_st_T0_A0(ot + s1->mem_index);
1398
gen_op_update1_cc();
1354
gen_op_arith_T0_T1_cc[op]();
1399
1355
s1->cc_op = CC_OP_LOGICB + ot;
1356
gen_update_cc = gen_op_update1_cc;
1402
1359
gen_op_cmpl_T0_T1_cc();
1403
1360
s1->cc_op = CC_OP_SUBB + ot;
1361
gen_update_cc = NULL;
1364
if (op != OP_CMPL) {
1366
gen_op_mov_reg_T0[ot][d]();
1368
gen_op_st_T0_A0[ot + s1->mem_index]();
1370
/* the flags update must happen after the memory write (precise
1371
exception support) */
1408
1377
/* if d == OR_TMP0, it means memory operand (address in A0) */
1409
1378
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1411
1380
if (d != OR_TMP0)
1412
gen_op_mov_TN_reg(ot, 0, d);
1381
gen_op_mov_TN_reg[ot][0][d]();
1414
gen_op_ld_T0_A0(ot + s1->mem_index);
1383
gen_op_ld_T0_A0[ot + s1->mem_index]();
1415
1384
if (s1->cc_op != CC_OP_DYNAMIC)
1416
1385
gen_op_set_cc_op(s1->cc_op);
1418
tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1419
1388
s1->cc_op = CC_OP_INCB + ot;
1421
tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1422
1391
s1->cc_op = CC_OP_DECB + ot;
1424
1393
if (d != OR_TMP0)
1425
gen_op_mov_reg_T0(ot, d);
1427
gen_op_st_T0_A0(ot + s1->mem_index);
1428
gen_compute_eflags_c(cpu_cc_src);
1429
tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1432
static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1433
int is_right, int is_arith)
1446
gen_op_ld_T0_A0(ot + s->mem_index);
1448
gen_op_mov_TN_reg(ot, 0, op1);
1450
tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1452
tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1456
gen_exts(ot, cpu_T[0]);
1457
tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1458
tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1460
gen_extu(ot, cpu_T[0]);
1461
tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1462
tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1465
tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1466
tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1471
gen_op_st_T0_A0(ot + s->mem_index);
1473
gen_op_mov_reg_T0(ot, op1);
1475
/* update eflags if non zero shift */
1476
if (s->cc_op != CC_OP_DYNAMIC)
1477
gen_op_set_cc_op(s->cc_op);
1479
/* XXX: inefficient */
1480
t0 = tcg_temp_local_new(TCG_TYPE_TL);
1481
t1 = tcg_temp_local_new(TCG_TYPE_TL);
1483
tcg_gen_mov_tl(t0, cpu_T[0]);
1484
tcg_gen_mov_tl(t1, cpu_T3);
1486
shift_label = gen_new_label();
1487
tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1489
tcg_gen_mov_tl(cpu_cc_src, t1);
1490
tcg_gen_mov_tl(cpu_cc_dst, t0);
1492
tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1494
tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1496
gen_set_label(shift_label);
1497
s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1503
static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1504
int is_right, int is_arith)
1515
gen_op_ld_T0_A0(ot + s->mem_index);
1517
gen_op_mov_TN_reg(ot, 0, op1);
1523
gen_exts(ot, cpu_T[0]);
1524
tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1525
tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1527
gen_extu(ot, cpu_T[0]);
1528
tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1529
tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1532
tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1533
tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1539
gen_op_st_T0_A0(ot + s->mem_index);
1541
gen_op_mov_reg_T0(ot, op1);
1543
/* update eflags if non zero shift */
1545
tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1546
tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1548
s->cc_op = CC_OP_SARB + ot;
1550
s->cc_op = CC_OP_SHLB + ot;
1554
static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1557
tcg_gen_shli_tl(ret, arg1, arg2);
1559
tcg_gen_shri_tl(ret, arg1, -arg2);
1562
/* XXX: add faster immediate case */
1563
static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1567
int label1, label2, data_bits;
1568
TCGv t0, t1, t2, a0;
1570
/* XXX: inefficient, but we must use local temps */
1571
t0 = tcg_temp_local_new(TCG_TYPE_TL);
1572
t1 = tcg_temp_local_new(TCG_TYPE_TL);
1573
t2 = tcg_temp_local_new(TCG_TYPE_TL);
1574
a0 = tcg_temp_local_new(TCG_TYPE_TL);
1582
if (op1 == OR_TMP0) {
1583
tcg_gen_mov_tl(a0, cpu_A0);
1584
gen_op_ld_v(ot + s->mem_index, t0, a0);
1586
gen_op_mov_v_reg(ot, t0, op1);
1589
tcg_gen_mov_tl(t1, cpu_T[1]);
1591
tcg_gen_andi_tl(t1, t1, mask);
1593
/* Must test zero case to avoid using undefined behaviour in TCG
1595
label1 = gen_new_label();
1596
tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
1599
tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
1601
tcg_gen_mov_tl(cpu_tmp0, t1);
1604
tcg_gen_mov_tl(t2, t0);
1606
data_bits = 8 << ot;
1607
/* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1608
fix TCG definition) */
1610
tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
1611
tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1612
tcg_gen_shl_tl(t0, t0, cpu_tmp0);
1614
tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
1615
tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1616
tcg_gen_shr_tl(t0, t0, cpu_tmp0);
1618
tcg_gen_or_tl(t0, t0, cpu_tmp4);
1620
gen_set_label(label1);
1622
if (op1 == OR_TMP0) {
1623
gen_op_st_v(ot + s->mem_index, t0, a0);
1625
gen_op_mov_reg_v(ot, op1, t0);
1629
if (s->cc_op != CC_OP_DYNAMIC)
1630
gen_op_set_cc_op(s->cc_op);
1632
label2 = gen_new_label();
1633
tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
1635
gen_compute_eflags(cpu_cc_src);
1636
tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1637
tcg_gen_xor_tl(cpu_tmp0, t2, t0);
1638
tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1639
tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1640
tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1642
tcg_gen_shri_tl(t0, t0, data_bits - 1);
1644
tcg_gen_andi_tl(t0, t0, CC_C);
1645
tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
1647
tcg_gen_discard_tl(cpu_cc_dst);
1648
tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1650
gen_set_label(label2);
1651
s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1659
static void *helper_rotc[8] = {
1663
X86_64_ONLY(helper_rclq),
1667
X86_64_ONLY(helper_rcrq),
1670
/* XXX: add faster immediate = 1 case */
1671
static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
1676
if (s->cc_op != CC_OP_DYNAMIC)
1677
gen_op_set_cc_op(s->cc_op);
1681
gen_op_ld_T0_A0(ot + s->mem_index);
1683
gen_op_mov_TN_reg(ot, 0, op1);
1685
tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1686
cpu_T[0], cpu_T[0], cpu_T[1]);
1689
gen_op_st_T0_A0(ot + s->mem_index);
1691
gen_op_mov_reg_T0(ot, op1);
1694
label1 = gen_new_label();
1695
tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
1697
tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
1698
tcg_gen_discard_tl(cpu_cc_dst);
1699
tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1701
gen_set_label(label1);
1702
s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1705
/* XXX: add faster immediate case */
1706
static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
1709
int label1, label2, data_bits;
1711
TCGv t0, t1, t2, a0;
1713
t0 = tcg_temp_local_new(TCG_TYPE_TL);
1714
t1 = tcg_temp_local_new(TCG_TYPE_TL);
1715
t2 = tcg_temp_local_new(TCG_TYPE_TL);
1716
a0 = tcg_temp_local_new(TCG_TYPE_TL);
1724
if (op1 == OR_TMP0) {
1725
tcg_gen_mov_tl(a0, cpu_A0);
1726
gen_op_ld_v(ot + s->mem_index, t0, a0);
1728
gen_op_mov_v_reg(ot, t0, op1);
1731
tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1733
tcg_gen_mov_tl(t1, cpu_T[1]);
1734
tcg_gen_mov_tl(t2, cpu_T3);
1736
/* Must test zero case to avoid using undefined behaviour in TCG
1738
label1 = gen_new_label();
1739
tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
1741
tcg_gen_addi_tl(cpu_tmp5, t2, -1);
1742
if (ot == OT_WORD) {
1743
/* Note: we implement the Intel behaviour for shift count > 16 */
1745
tcg_gen_andi_tl(t0, t0, 0xffff);
1746
tcg_gen_shli_tl(cpu_tmp0, t1, 16);
1747
tcg_gen_or_tl(t0, t0, cpu_tmp0);
1748
tcg_gen_ext32u_tl(t0, t0);
1750
tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1752
/* only needed if count > 16, but a test would complicate */
1753
tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
1754
tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
1756
tcg_gen_shr_tl(t0, t0, t2);
1758
tcg_gen_or_tl(t0, t0, cpu_tmp0);
1760
/* XXX: not optimal */
1761
tcg_gen_andi_tl(t0, t0, 0xffff);
1762
tcg_gen_shli_tl(t1, t1, 16);
1763
tcg_gen_or_tl(t1, t1, t0);
1764
tcg_gen_ext32u_tl(t1, t1);
1766
tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1767
tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1768
tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
1769
tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1771
tcg_gen_shl_tl(t0, t0, t2);
1772
tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
1773
tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1774
tcg_gen_or_tl(t0, t0, t1);
1777
data_bits = 8 << ot;
1780
tcg_gen_ext32u_tl(t0, t0);
1782
tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1784
tcg_gen_shr_tl(t0, t0, t2);
1785
tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
1786
tcg_gen_shl_tl(t1, t1, cpu_tmp5);
1787
tcg_gen_or_tl(t0, t0, t1);
1791
tcg_gen_ext32u_tl(t1, t1);
1793
tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1795
tcg_gen_shl_tl(t0, t0, t2);
1796
tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
1797
tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1798
tcg_gen_or_tl(t0, t0, t1);
1801
tcg_gen_mov_tl(t1, cpu_tmp4);
1803
gen_set_label(label1);
1805
if (op1 == OR_TMP0) {
1806
gen_op_st_v(ot + s->mem_index, t0, a0);
1808
gen_op_mov_reg_v(ot, op1, t0);
1812
if (s->cc_op != CC_OP_DYNAMIC)
1813
gen_op_set_cc_op(s->cc_op);
1815
label2 = gen_new_label();
1816
tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
1818
tcg_gen_mov_tl(cpu_cc_src, t1);
1819
tcg_gen_mov_tl(cpu_cc_dst, t0);
1821
tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1823
tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1825
gen_set_label(label2);
1826
s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1394
gen_op_mov_reg_T0[ot][d]();
1396
gen_op_st_T0_A0[ot + s1->mem_index]();
1397
gen_op_update_inc_cc();
1834
1400
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1403
gen_op_mov_TN_reg[ot][0][d]();
1405
gen_op_ld_T0_A0[ot + s1->mem_index]();
1836
1406
if (s != OR_TMP1)
1837
gen_op_mov_TN_reg(ot, 1, s);
1840
gen_rot_rm_T1(s1, ot, d, 0);
1843
gen_rot_rm_T1(s1, ot, d, 1);
1847
gen_shift_rm_T1(s1, ot, d, 0, 0);
1850
gen_shift_rm_T1(s1, ot, d, 1, 0);
1853
gen_shift_rm_T1(s1, ot, d, 1, 1);
1856
gen_rotc_rm_T1(s1, ot, d, 0);
1859
gen_rotc_rm_T1(s1, ot, d, 1);
1407
gen_op_mov_TN_reg[ot][1][s]();
1408
/* for zero counts, flags are not updated, so must do it dynamically */
1409
if (s1->cc_op != CC_OP_DYNAMIC)
1410
gen_op_set_cc_op(s1->cc_op);
1413
gen_op_shift_T0_T1_cc[ot][op]();
1415
gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1417
gen_op_mov_reg_T0[ot][d]();
1418
s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1864
1421
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1869
gen_shift_rm_im(s1, ot, d, c, 0, 0);
1872
gen_shift_rm_im(s1, ot, d, c, 1, 0);
1875
gen_shift_rm_im(s1, ot, d, c, 1, 1);
1878
/* currently not optimized */
1879
gen_op_movl_T1_im(c);
1880
gen_shift(s1, op, ot, d, OR_TMP1);
1423
/* currently not optimized */
1424
gen_op_movl_T1_im(c);
1425
gen_shift(s1, op, ot, d, OR_TMP1);
1885
1428
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2660
2307
gen_jmp_tb(s, eip, 0);
2663
static inline void gen_ldq_env_A0(int idx, int offset)
2665
int mem_index = (idx >> 2) - 1;
2666
tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2667
tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2670
static inline void gen_stq_env_A0(int idx, int offset)
2672
int mem_index = (idx >> 2) - 1;
2673
tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2674
tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2677
static inline void gen_ldo_env_A0(int idx, int offset)
2679
int mem_index = (idx >> 2) - 1;
2680
tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2681
tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2682
tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2683
tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2684
tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2687
static inline void gen_sto_env_A0(int idx, int offset)
2689
int mem_index = (idx >> 2) - 1;
2690
tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2691
tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2692
tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2693
tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2694
tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2697
static inline void gen_op_movo(int d_offset, int s_offset)
2699
tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2700
tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2701
tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2702
tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2705
static inline void gen_op_movq(int d_offset, int s_offset)
2707
tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2708
tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2711
static inline void gen_op_movl(int d_offset, int s_offset)
2713
tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2714
tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2717
static inline void gen_op_movq_env_0(int d_offset)
2719
tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2720
tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2723
#define SSE_SPECIAL ((void *)1)
2724
#define SSE_DUMMY ((void *)2)
2726
#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2727
#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2728
helper_ ## x ## ss, helper_ ## x ## sd, }
2730
static void *sse_op_table1[256][4] = {
2731
/* 3DNow! extensions */
2732
[0x0e] = { SSE_DUMMY }, /* femms */
2733
[0x0f] = { SSE_DUMMY }, /* pf... */
2310
static void gen_movtl_T0_im(target_ulong val)
2312
#ifdef TARGET_X86_64
2313
if ((int32_t)val == val) {
2314
gen_op_movl_T0_im(val);
2316
gen_op_movq_T0_im64(val >> 32, val);
2319
gen_op_movl_T0_im(val);
2323
static void gen_movtl_T1_im(target_ulong val)
2325
#ifdef TARGET_X86_64
2326
if ((int32_t)val == val) {
2327
gen_op_movl_T1_im(val);
2329
gen_op_movq_T1_im64(val >> 32, val);
2332
gen_op_movl_T1_im(val);
2336
static void gen_add_A0_im(DisasContext *s, int val)
2338
#ifdef TARGET_X86_64
2340
gen_op_addq_A0_im(val);
2343
gen_op_addl_A0_im(val);
2346
static GenOpFunc1 *gen_ldq_env_A0[3] = {
2347
gen_op_ldq_raw_env_A0,
2348
#ifndef CONFIG_USER_ONLY
2349
gen_op_ldq_kernel_env_A0,
2350
gen_op_ldq_user_env_A0,
2354
static GenOpFunc1 *gen_stq_env_A0[3] = {
2355
gen_op_stq_raw_env_A0,
2356
#ifndef CONFIG_USER_ONLY
2357
gen_op_stq_kernel_env_A0,
2358
gen_op_stq_user_env_A0,
2362
static GenOpFunc1 *gen_ldo_env_A0[3] = {
2363
gen_op_ldo_raw_env_A0,
2364
#ifndef CONFIG_USER_ONLY
2365
gen_op_ldo_kernel_env_A0,
2366
gen_op_ldo_user_env_A0,
2370
static GenOpFunc1 *gen_sto_env_A0[3] = {
2371
gen_op_sto_raw_env_A0,
2372
#ifndef CONFIG_USER_ONLY
2373
gen_op_sto_kernel_env_A0,
2374
gen_op_sto_user_env_A0,
2378
#define SSE_SPECIAL ((GenOpFunc2 *)1)
2380
#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2381
#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2382
gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2384
static GenOpFunc2 *sse_op_table1[256][4] = {
2734
2385
/* pure SSE operations */
2735
2386
[0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2736
2387
[0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2737
2388
[0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2738
2389
[0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2739
[0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2740
[0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2390
[0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2391
[0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2741
2392
[0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2742
2393
[0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2896
2544
SSE_FOP(cmpnle),
2897
2545
SSE_FOP(cmpord),
2900
static void *sse_op_table5[256] = {
2901
[0x0c] = helper_pi2fw,
2902
[0x0d] = helper_pi2fd,
2903
[0x1c] = helper_pf2iw,
2904
[0x1d] = helper_pf2id,
2905
[0x8a] = helper_pfnacc,
2906
[0x8e] = helper_pfpnacc,
2907
[0x90] = helper_pfcmpge,
2908
[0x94] = helper_pfmin,
2909
[0x96] = helper_pfrcp,
2910
[0x97] = helper_pfrsqrt,
2911
[0x9a] = helper_pfsub,
2912
[0x9e] = helper_pfadd,
2913
[0xa0] = helper_pfcmpgt,
2914
[0xa4] = helper_pfmax,
2915
[0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2916
[0xa7] = helper_movq, /* pfrsqit1 */
2917
[0xaa] = helper_pfsubr,
2918
[0xae] = helper_pfacc,
2919
[0xb0] = helper_pfcmpeq,
2920
[0xb4] = helper_pfmul,
2921
[0xb6] = helper_movq, /* pfrcpit2 */
2922
[0xb7] = helper_pmulhrw_mmx,
2923
[0xbb] = helper_pswapd,
2924
[0xbf] = helper_pavgb_mmx /* pavgusb */
2927
struct sse_op_helper_s {
2928
void *op[2]; uint32_t ext_mask;
2930
#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
2931
#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
2932
#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
2933
#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
2934
static struct sse_op_helper_s sse_op_table6[256] = {
2935
[0x00] = SSSE3_OP(pshufb),
2936
[0x01] = SSSE3_OP(phaddw),
2937
[0x02] = SSSE3_OP(phaddd),
2938
[0x03] = SSSE3_OP(phaddsw),
2939
[0x04] = SSSE3_OP(pmaddubsw),
2940
[0x05] = SSSE3_OP(phsubw),
2941
[0x06] = SSSE3_OP(phsubd),
2942
[0x07] = SSSE3_OP(phsubsw),
2943
[0x08] = SSSE3_OP(psignb),
2944
[0x09] = SSSE3_OP(psignw),
2945
[0x0a] = SSSE3_OP(psignd),
2946
[0x0b] = SSSE3_OP(pmulhrsw),
2947
[0x10] = SSE41_OP(pblendvb),
2948
[0x14] = SSE41_OP(blendvps),
2949
[0x15] = SSE41_OP(blendvpd),
2950
[0x17] = SSE41_OP(ptest),
2951
[0x1c] = SSSE3_OP(pabsb),
2952
[0x1d] = SSSE3_OP(pabsw),
2953
[0x1e] = SSSE3_OP(pabsd),
2954
[0x20] = SSE41_OP(pmovsxbw),
2955
[0x21] = SSE41_OP(pmovsxbd),
2956
[0x22] = SSE41_OP(pmovsxbq),
2957
[0x23] = SSE41_OP(pmovsxwd),
2958
[0x24] = SSE41_OP(pmovsxwq),
2959
[0x25] = SSE41_OP(pmovsxdq),
2960
[0x28] = SSE41_OP(pmuldq),
2961
[0x29] = SSE41_OP(pcmpeqq),
2962
[0x2a] = SSE41_SPECIAL, /* movntqda */
2963
[0x2b] = SSE41_OP(packusdw),
2964
[0x30] = SSE41_OP(pmovzxbw),
2965
[0x31] = SSE41_OP(pmovzxbd),
2966
[0x32] = SSE41_OP(pmovzxbq),
2967
[0x33] = SSE41_OP(pmovzxwd),
2968
[0x34] = SSE41_OP(pmovzxwq),
2969
[0x35] = SSE41_OP(pmovzxdq),
2970
[0x37] = SSE42_OP(pcmpgtq),
2971
[0x38] = SSE41_OP(pminsb),
2972
[0x39] = SSE41_OP(pminsd),
2973
[0x3a] = SSE41_OP(pminuw),
2974
[0x3b] = SSE41_OP(pminud),
2975
[0x3c] = SSE41_OP(pmaxsb),
2976
[0x3d] = SSE41_OP(pmaxsd),
2977
[0x3e] = SSE41_OP(pmaxuw),
2978
[0x3f] = SSE41_OP(pmaxud),
2979
[0x40] = SSE41_OP(pmulld),
2980
[0x41] = SSE41_OP(phminposuw),
2983
static struct sse_op_helper_s sse_op_table7[256] = {
2984
[0x08] = SSE41_OP(roundps),
2985
[0x09] = SSE41_OP(roundpd),
2986
[0x0a] = SSE41_OP(roundss),
2987
[0x0b] = SSE41_OP(roundsd),
2988
[0x0c] = SSE41_OP(blendps),
2989
[0x0d] = SSE41_OP(blendpd),
2990
[0x0e] = SSE41_OP(pblendw),
2991
[0x0f] = SSSE3_OP(palignr),
2992
[0x14] = SSE41_SPECIAL, /* pextrb */
2993
[0x15] = SSE41_SPECIAL, /* pextrw */
2994
[0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
2995
[0x17] = SSE41_SPECIAL, /* extractps */
2996
[0x20] = SSE41_SPECIAL, /* pinsrb */
2997
[0x21] = SSE41_SPECIAL, /* insertps */
2998
[0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
2999
[0x40] = SSE41_OP(dpps),
3000
[0x41] = SSE41_OP(dppd),
3001
[0x42] = SSE41_OP(mpsadbw),
3002
[0x60] = SSE42_OP(pcmpestrm),
3003
[0x61] = SSE42_OP(pcmpestri),
3004
[0x62] = SSE42_OP(pcmpistrm),
3005
[0x63] = SSE42_OP(pcmpistri),
3008
2548
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3010
2550
int b1, op1_offset, op2_offset, is_xmm, val, ot;
3011
2551
int modrm, mod, rm, reg, reg_addr, offset_addr;
2552
GenOpFunc2 *sse_op2;
2553
GenOpFunc3 *sse_op3;
3015
if (s->prefix & PREFIX_DATA)
2556
if (s->prefix & PREFIX_DATA)
3017
else if (s->prefix & PREFIX_REPZ)
2558
else if (s->prefix & PREFIX_REPZ)
3019
else if (s->prefix & PREFIX_REPNZ)
2560
else if (s->prefix & PREFIX_REPNZ)
3023
2564
sse_op2 = sse_op_table1[b][b1];
3025
2566
goto illegal_op;
3026
if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2567
if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
3557
3046
goto illegal_op;
3559
3048
rm = (modrm & 7) | REX_B(s);
3560
tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3561
tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3049
gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3563
3051
rm = (modrm & 7);
3564
tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3565
tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3567
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3568
reg = ((modrm >> 3) & 7) | rex_r;
3569
gen_op_mov_reg_T0(OT_LONG, reg);
3572
if (s->prefix & PREFIX_REPNZ)
3576
modrm = ldub_code(s->pc++);
3578
reg = ((modrm >> 3) & 7) | rex_r;
3579
mod = (modrm >> 6) & 3;
3581
sse_op2 = sse_op_table6[b].op[b1];
3584
if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
3588
op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3590
op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
3592
op2_offset = offsetof(CPUX86State,xmm_t0);
3593
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3595
case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
3596
case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
3597
case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
3598
gen_ldq_env_A0(s->mem_index, op2_offset +
3599
offsetof(XMMReg, XMM_Q(0)));
3601
case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
3602
case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
3603
tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
3604
(s->mem_index >> 2) - 1);
3605
tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
3606
offsetof(XMMReg, XMM_L(0)));
3608
case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
3609
tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
3610
(s->mem_index >> 2) - 1);
3611
tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
3612
offsetof(XMMReg, XMM_W(0)));
3614
case 0x2a: /* movntqda */
3615
gen_ldo_env_A0(s->mem_index, op1_offset);
3618
gen_ldo_env_A0(s->mem_index, op2_offset);
3622
op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3624
op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3626
op2_offset = offsetof(CPUX86State,mmx_t0);
3627
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3628
gen_ldq_env_A0(s->mem_index, op2_offset);
3631
if (sse_op2 == SSE_SPECIAL)
3634
tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3635
tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3636
tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3639
s->cc_op = CC_OP_EFLAGS;
3641
case 0x338: /* crc32 */
3644
modrm = ldub_code(s->pc++);
3645
reg = ((modrm >> 3) & 7) | rex_r;
3647
if (b != 0xf0 && b != 0xf1)
3649
if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
3654
else if (b == 0xf1 && s->dflag != 2)
3655
if (s->prefix & PREFIX_DATA)
3662
gen_op_mov_TN_reg(OT_LONG, 0, reg);
3663
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3664
gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3665
tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
3666
cpu_T[0], tcg_const_i32(8 << ot));
3668
ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3669
gen_op_mov_reg_T0(ot, reg);
3674
modrm = ldub_code(s->pc++);
3676
reg = ((modrm >> 3) & 7) | rex_r;
3677
mod = (modrm >> 6) & 3;
3679
sse_op2 = sse_op_table7[b].op[b1];
3682
if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
3685
if (sse_op2 == SSE_SPECIAL) {
3686
ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3687
rm = (modrm & 7) | REX_B(s);
3689
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3690
reg = ((modrm >> 3) & 7) | rex_r;
3691
val = ldub_code(s->pc++);
3693
case 0x14: /* pextrb */
3694
tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3695
xmm_regs[reg].XMM_B(val & 15)));
3697
gen_op_mov_reg_T0(ot, rm);
3699
tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
3700
(s->mem_index >> 2) - 1);
3702
case 0x15: /* pextrw */
3703
tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3704
xmm_regs[reg].XMM_W(val & 7)));
3706
gen_op_mov_reg_T0(ot, rm);
3708
tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
3709
(s->mem_index >> 2) - 1);
3712
if (ot == OT_LONG) { /* pextrd */
3713
tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
3714
offsetof(CPUX86State,
3715
xmm_regs[reg].XMM_L(val & 3)));
3717
gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
3719
tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
3720
(s->mem_index >> 2) - 1);
3721
} else { /* pextrq */
3722
tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3723
offsetof(CPUX86State,
3724
xmm_regs[reg].XMM_Q(val & 1)));
3726
gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
3728
tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
3729
(s->mem_index >> 2) - 1);
3732
case 0x17: /* extractps */
3733
tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3734
xmm_regs[reg].XMM_L(val & 3)));
3736
gen_op_mov_reg_T0(ot, rm);
3738
tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
3739
(s->mem_index >> 2) - 1);
3741
case 0x20: /* pinsrb */
3743
gen_op_mov_TN_reg(OT_LONG, 0, rm);
3745
tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
3746
(s->mem_index >> 2) - 1);
3747
tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3748
xmm_regs[reg].XMM_B(val & 15)));
3750
case 0x21: /* insertps */
3752
tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
3753
offsetof(CPUX86State,xmm_regs[rm]
3754
.XMM_L((val >> 6) & 3)));
3756
tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
3757
(s->mem_index >> 2) - 1);
3758
tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
3759
offsetof(CPUX86State,xmm_regs[reg]
3760
.XMM_L((val >> 4) & 3)));
3762
tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3763
cpu_env, offsetof(CPUX86State,
3764
xmm_regs[reg].XMM_L(0)));
3766
tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3767
cpu_env, offsetof(CPUX86State,
3768
xmm_regs[reg].XMM_L(1)));
3770
tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3771
cpu_env, offsetof(CPUX86State,
3772
xmm_regs[reg].XMM_L(2)));
3774
tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3775
cpu_env, offsetof(CPUX86State,
3776
xmm_regs[reg].XMM_L(3)));
3779
if (ot == OT_LONG) { /* pinsrd */
3781
gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
3783
tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
3784
(s->mem_index >> 2) - 1);
3785
tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
3786
offsetof(CPUX86State,
3787
xmm_regs[reg].XMM_L(val & 3)));
3788
} else { /* pinsrq */
3790
gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
3792
tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
3793
(s->mem_index >> 2) - 1);
3794
tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3795
offsetof(CPUX86State,
3796
xmm_regs[reg].XMM_Q(val & 1)));
3804
op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3806
op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
3808
op2_offset = offsetof(CPUX86State,xmm_t0);
3809
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3810
gen_ldo_env_A0(s->mem_index, op2_offset);
3813
op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3815
op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3817
op2_offset = offsetof(CPUX86State,mmx_t0);
3818
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3819
gen_ldq_env_A0(s->mem_index, op2_offset);
3822
val = ldub_code(s->pc++);
3824
if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
3825
s->cc_op = CC_OP_EFLAGS;
3828
/* The helper must use entire 64-bit gp registers */
3832
tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3833
tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3834
tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3052
gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3054
reg = ((modrm >> 3) & 7) | rex_r;
3055
gen_op_mov_reg_T0[OT_LONG][reg]();
3837
3058
goto illegal_op;
4702
3754
mod = (modrm >> 6) & 3;
4703
3755
if (mod == 3) {
4704
3756
rm = (modrm & 7) | REX_B(s);
4705
gen_op_mov_TN_reg(ot, 0, reg);
4706
gen_op_mov_TN_reg(ot, 1, rm);
3757
gen_op_mov_TN_reg[ot][0][reg]();
3758
gen_op_mov_TN_reg[ot][1][rm]();
4707
3759
gen_op_addl_T0_T1();
4708
gen_op_mov_reg_T1(ot, reg);
4709
gen_op_mov_reg_T0(ot, rm);
3760
gen_op_mov_reg_T1[ot][reg]();
3761
gen_op_mov_reg_T0[ot][rm]();
4711
3763
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4712
gen_op_mov_TN_reg(ot, 0, reg);
4713
gen_op_ld_T1_A0(ot + s->mem_index);
3764
gen_op_mov_TN_reg[ot][0][reg]();
3765
gen_op_ld_T1_A0[ot + s->mem_index]();
4714
3766
gen_op_addl_T0_T1();
4715
gen_op_st_T0_A0(ot + s->mem_index);
4716
gen_op_mov_reg_T1(ot, reg);
3767
gen_op_st_T0_A0[ot + s->mem_index]();
3768
gen_op_mov_reg_T1[ot][reg]();
4718
3770
gen_op_update2_cc();
4719
3771
s->cc_op = CC_OP_ADDB + ot;
4722
3774
case 0x1b1: /* cmpxchg Ev, Gv */
4725
TCGv t0, t1, t2, a0;
4730
ot = dflag + OT_WORD;
4731
modrm = ldub_code(s->pc++);
4732
reg = ((modrm >> 3) & 7) | rex_r;
4733
mod = (modrm >> 6) & 3;
4734
t0 = tcg_temp_local_new(TCG_TYPE_TL);
4735
t1 = tcg_temp_local_new(TCG_TYPE_TL);
4736
t2 = tcg_temp_local_new(TCG_TYPE_TL);
4737
a0 = tcg_temp_local_new(TCG_TYPE_TL);
4738
gen_op_mov_v_reg(ot, t1, reg);
4740
rm = (modrm & 7) | REX_B(s);
4741
gen_op_mov_v_reg(ot, t0, rm);
4743
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4744
tcg_gen_mov_tl(a0, cpu_A0);
4745
gen_op_ld_v(ot + s->mem_index, t0, a0);
4746
rm = 0; /* avoid warning */
4748
label1 = gen_new_label();
4749
tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
4750
tcg_gen_sub_tl(t2, t2, t0);
4752
tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
4754
label2 = gen_new_label();
4755
gen_op_mov_reg_v(ot, R_EAX, t0);
4757
gen_set_label(label1);
4758
gen_op_mov_reg_v(ot, rm, t1);
4759
gen_set_label(label2);
4761
tcg_gen_mov_tl(t1, t0);
4762
gen_op_mov_reg_v(ot, R_EAX, t0);
4763
gen_set_label(label1);
4765
gen_op_st_v(ot + s->mem_index, t1, a0);
4767
tcg_gen_mov_tl(cpu_cc_src, t0);
4768
tcg_gen_mov_tl(cpu_cc_dst, t2);
4769
s->cc_op = CC_OP_SUBB + ot;
3778
ot = dflag + OT_WORD;
3779
modrm = ldub_code(s->pc++);
3780
reg = ((modrm >> 3) & 7) | rex_r;
3781
mod = (modrm >> 6) & 3;
3782
gen_op_mov_TN_reg[ot][1][reg]();
3784
rm = (modrm & 7) | REX_B(s);
3785
gen_op_mov_TN_reg[ot][0][rm]();
3786
gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3787
gen_op_mov_reg_T0[ot][rm]();
3789
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3790
gen_op_ld_T0_A0[ot + s->mem_index]();
3791
gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3793
s->cc_op = CC_OP_SUBB + ot;
4776
3795
case 0x1c7: /* cmpxchg8b */
4777
3796
modrm = ldub_code(s->pc++);
4778
3797
mod = (modrm >> 6) & 3;
4779
if ((mod == 3) || ((modrm & 0x38) != 0x8))
4780
3799
goto illegal_op;
4781
#ifdef TARGET_X86_64
4783
if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
4785
gen_jmp_im(pc_start - s->cs_base);
4786
if (s->cc_op != CC_OP_DYNAMIC)
4787
gen_op_set_cc_op(s->cc_op);
4788
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4789
tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
4793
if (!(s->cpuid_features & CPUID_CX8))
4795
gen_jmp_im(pc_start - s->cs_base);
4796
if (s->cc_op != CC_OP_DYNAMIC)
4797
gen_op_set_cc_op(s->cc_op);
4798
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4799
tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
3800
if (s->cc_op != CC_OP_DYNAMIC)
3801
gen_op_set_cc_op(s->cc_op);
3802
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4801
3804
s->cc_op = CC_OP_EFLAGS;
4804
3807
/**************************/
4806
3809
case 0x50 ... 0x57: /* push */
4807
gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
3810
gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
4808
3811
gen_push_T0(s);
4810
3813
case 0x58 ... 0x5f: /* pop */
5393
4407
switch(op >> 4) {
5395
gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5396
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5397
tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
4409
gen_op_flds_ST0_A0();
5400
gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5401
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5402
tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4412
gen_op_fildl_ST0_A0();
5405
tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5406
(s->mem_index >> 2) - 1);
5407
tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
4415
gen_op_fldl_ST0_A0();
5411
gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5412
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5413
tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4419
gen_op_fild_ST0_A0();
5418
/* XXX: the corresponding CPUID bit must be tested ! */
5419
4424
switch(op >> 4) {
5421
tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
5422
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5423
gen_op_st_T0_A0(OT_LONG + s->mem_index);
4426
gen_op_fisttl_ST0_A0();
5426
tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
5427
tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5428
(s->mem_index >> 2) - 1);
4429
gen_op_fisttll_ST0_A0();
5432
tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
5433
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5434
gen_op_st_T0_A0(OT_WORD + s->mem_index);
4433
gen_op_fistt_ST0_A0();
5437
tcg_gen_helper_0_0(helper_fpop);
5440
4438
switch(op >> 4) {
5442
tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
5443
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5444
gen_op_st_T0_A0(OT_LONG + s->mem_index);
4440
gen_op_fsts_ST0_A0();
5447
tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
5448
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5449
gen_op_st_T0_A0(OT_LONG + s->mem_index);
4443
gen_op_fistl_ST0_A0();
5452
tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
5453
tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5454
(s->mem_index >> 2) - 1);
4446
gen_op_fstl_ST0_A0();
5458
tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
5459
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5460
gen_op_st_T0_A0(OT_WORD + s->mem_index);
4450
gen_op_fist_ST0_A0();
5463
4453
if ((op & 7) == 3)
5464
tcg_gen_helper_0_0(helper_fpop);
5468
4458
case 0x0c: /* fldenv mem */
5469
if (s->cc_op != CC_OP_DYNAMIC)
5470
gen_op_set_cc_op(s->cc_op);
5471
gen_jmp_im(pc_start - s->cs_base);
5472
tcg_gen_helper_0_2(helper_fldenv,
5473
cpu_A0, tcg_const_i32(s->dflag));
4459
gen_op_fldenv_A0(s->dflag);
5475
4461
case 0x0d: /* fldcw mem */
5476
gen_op_ld_T0_A0(OT_WORD + s->mem_index);
5477
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5478
tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
5480
4464
case 0x0e: /* fnstenv mem */
5481
if (s->cc_op != CC_OP_DYNAMIC)
5482
gen_op_set_cc_op(s->cc_op);
5483
gen_jmp_im(pc_start - s->cs_base);
5484
tcg_gen_helper_0_2(helper_fstenv,
5485
cpu_A0, tcg_const_i32(s->dflag));
4465
gen_op_fnstenv_A0(s->dflag);
5487
4467
case 0x0f: /* fnstcw mem */
5488
tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
5489
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5490
gen_op_st_T0_A0(OT_WORD + s->mem_index);
5492
4470
case 0x1d: /* fldt mem */
5493
if (s->cc_op != CC_OP_DYNAMIC)
5494
gen_op_set_cc_op(s->cc_op);
5495
gen_jmp_im(pc_start - s->cs_base);
5496
tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
4471
gen_op_fldt_ST0_A0();
5498
4473
case 0x1f: /* fstpt mem */
5499
if (s->cc_op != CC_OP_DYNAMIC)
5500
gen_op_set_cc_op(s->cc_op);
5501
gen_jmp_im(pc_start - s->cs_base);
5502
tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
5503
tcg_gen_helper_0_0(helper_fpop);
4474
gen_op_fstt_ST0_A0();
5505
4477
case 0x2c: /* frstor mem */
5506
if (s->cc_op != CC_OP_DYNAMIC)
5507
gen_op_set_cc_op(s->cc_op);
5508
gen_jmp_im(pc_start - s->cs_base);
5509
tcg_gen_helper_0_2(helper_frstor,
5510
cpu_A0, tcg_const_i32(s->dflag));
4478
gen_op_frstor_A0(s->dflag);
5512
4480
case 0x2e: /* fnsave mem */
5513
if (s->cc_op != CC_OP_DYNAMIC)
5514
gen_op_set_cc_op(s->cc_op);
5515
gen_jmp_im(pc_start - s->cs_base);
5516
tcg_gen_helper_0_2(helper_fsave,
5517
cpu_A0, tcg_const_i32(s->dflag));
4481
gen_op_fnsave_A0(s->dflag);
5519
4483
case 0x2f: /* fnstsw mem */
5520
tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5521
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5522
gen_op_st_T0_A0(OT_WORD + s->mem_index);
5524
4486
case 0x3c: /* fbld */
5525
if (s->cc_op != CC_OP_DYNAMIC)
5526
gen_op_set_cc_op(s->cc_op);
5527
gen_jmp_im(pc_start - s->cs_base);
5528
tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
4487
gen_op_fbld_ST0_A0();
5530
4489
case 0x3e: /* fbstp */
5531
if (s->cc_op != CC_OP_DYNAMIC)
5532
gen_op_set_cc_op(s->cc_op);
5533
gen_jmp_im(pc_start - s->cs_base);
5534
tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
5535
tcg_gen_helper_0_0(helper_fpop);
4490
gen_op_fbst_ST0_A0();
5537
4493
case 0x3d: /* fildll */
5538
tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5539
(s->mem_index >> 2) - 1);
5540
tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
4494
gen_op_fildll_ST0_A0();
5542
4496
case 0x3f: /* fistpll */
5543
tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
5544
tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5545
(s->mem_index >> 2) - 1);
5546
tcg_gen_helper_0_0(helper_fpop);
4497
gen_op_fistll_ST0_A0();
5549
4501
goto illegal_op;
6946
5695
gen_op_set_cc_op(s->cc_op);
6947
5696
s->cc_op = CC_OP_DYNAMIC;
6949
gen_jmp_im(pc_start - s->cs_base);
6950
tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
5698
gen_jmp_im(s->pc - s->cs_base);
6954
5703
goto illegal_op;
6956
5705
} else { /* sidt */
6957
gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
6958
5706
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6959
tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
6960
gen_op_st_T0_A0(OT_WORD + s->mem_index);
5707
gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5708
gen_op_st_T0_A0[OT_WORD + s->mem_index]();
6961
5709
gen_add_A0_im(s, 2);
6962
tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
5710
gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
6964
5712
gen_op_andl_T0_im(0xffffff);
6965
gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5713
gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
6968
5716
case 2: /* lgdt */
6969
5717
case 3: /* lidt */
6971
if (s->cc_op != CC_OP_DYNAMIC)
6972
gen_op_set_cc_op(s->cc_op);
6973
gen_jmp_im(pc_start - s->cs_base);
6976
if (!(s->flags & HF_SVME_MASK) || !s->pe)
6979
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6982
tcg_gen_helper_0_2(helper_vmrun,
6983
tcg_const_i32(s->aflag),
6984
tcg_const_i32(s->pc - pc_start));
6989
case 1: /* VMMCALL */
6990
if (!(s->flags & HF_SVME_MASK))
6992
tcg_gen_helper_0_0(helper_vmmcall);
6994
case 2: /* VMLOAD */
6995
if (!(s->flags & HF_SVME_MASK) || !s->pe)
6998
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7001
tcg_gen_helper_0_1(helper_vmload,
7002
tcg_const_i32(s->aflag));
7005
case 3: /* VMSAVE */
7006
if (!(s->flags & HF_SVME_MASK) || !s->pe)
7009
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7012
tcg_gen_helper_0_1(helper_vmsave,
7013
tcg_const_i32(s->aflag));
7017
if ((!(s->flags & HF_SVME_MASK) &&
7018
!(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7022
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7025
tcg_gen_helper_0_0(helper_stgi);
7029
if (!(s->flags & HF_SVME_MASK) || !s->pe)
7032
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7035
tcg_gen_helper_0_0(helper_clgi);
7038
case 6: /* SKINIT */
7039
if ((!(s->flags & HF_SVME_MASK) &&
7040
!(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7043
tcg_gen_helper_0_0(helper_skinit);
7045
case 7: /* INVLPGA */
7046
if (!(s->flags & HF_SVME_MASK) || !s->pe)
7049
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7052
tcg_gen_helper_0_1(helper_invlpga,
7053
tcg_const_i32(s->aflag));
7059
} else if (s->cpl != 0) {
7060
5721
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7062
gen_svm_check_intercept(s, pc_start,
7063
op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7064
5723
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7065
gen_op_ld_T1_A0(OT_WORD + s->mem_index);
5724
gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
7066
5725
gen_add_A0_im(s, 2);
7067
gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5726
gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
7069
5728
gen_op_andl_T0_im(0xffffff);
7071
tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7072
tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
5730
gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5731
gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
7074
tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7075
tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
5733
gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5734
gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
7079
5738
case 4: /* smsw */
7080
gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7081
tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
5739
gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
7082
5740
gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7084
5742
case 6: /* lmsw */
7085
5743
if (s->cpl != 0) {
7086
5744
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7088
gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7089
5746
gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7090
tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7091
5748
gen_jmp_im(s->pc - s->cs_base);
7491
6070
/* lock generation */
7492
6071
if (s->prefix & PREFIX_LOCK)
7493
tcg_gen_helper_0_0(helper_unlock);
7496
6075
if (s->prefix & PREFIX_LOCK)
7497
tcg_gen_helper_0_0(helper_unlock);
7498
6077
/* XXX: ensure that no lock was generated */
7499
6078
gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6082
#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6083
#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6085
/* flags read by an operation */
6086
static uint16_t opc_read_flags[NB_OPS] = {
6087
[INDEX_op_aas] = CC_A,
6088
[INDEX_op_aaa] = CC_A,
6089
[INDEX_op_das] = CC_A | CC_C,
6090
[INDEX_op_daa] = CC_A | CC_C,
6092
/* subtle: due to the incl/decl implementation, C is used */
6093
[INDEX_op_update_inc_cc] = CC_C,
6095
[INDEX_op_into] = CC_O,
6097
[INDEX_op_jb_subb] = CC_C,
6098
[INDEX_op_jb_subw] = CC_C,
6099
[INDEX_op_jb_subl] = CC_C,
6101
[INDEX_op_jz_subb] = CC_Z,
6102
[INDEX_op_jz_subw] = CC_Z,
6103
[INDEX_op_jz_subl] = CC_Z,
6105
[INDEX_op_jbe_subb] = CC_Z | CC_C,
6106
[INDEX_op_jbe_subw] = CC_Z | CC_C,
6107
[INDEX_op_jbe_subl] = CC_Z | CC_C,
6109
[INDEX_op_js_subb] = CC_S,
6110
[INDEX_op_js_subw] = CC_S,
6111
[INDEX_op_js_subl] = CC_S,
6113
[INDEX_op_jl_subb] = CC_O | CC_S,
6114
[INDEX_op_jl_subw] = CC_O | CC_S,
6115
[INDEX_op_jl_subl] = CC_O | CC_S,
6117
[INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6118
[INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6119
[INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6121
[INDEX_op_loopnzw] = CC_Z,
6122
[INDEX_op_loopnzl] = CC_Z,
6123
[INDEX_op_loopzw] = CC_Z,
6124
[INDEX_op_loopzl] = CC_Z,
6126
[INDEX_op_seto_T0_cc] = CC_O,
6127
[INDEX_op_setb_T0_cc] = CC_C,
6128
[INDEX_op_setz_T0_cc] = CC_Z,
6129
[INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6130
[INDEX_op_sets_T0_cc] = CC_S,
6131
[INDEX_op_setp_T0_cc] = CC_P,
6132
[INDEX_op_setl_T0_cc] = CC_O | CC_S,
6133
[INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6135
[INDEX_op_setb_T0_subb] = CC_C,
6136
[INDEX_op_setb_T0_subw] = CC_C,
6137
[INDEX_op_setb_T0_subl] = CC_C,
6139
[INDEX_op_setz_T0_subb] = CC_Z,
6140
[INDEX_op_setz_T0_subw] = CC_Z,
6141
[INDEX_op_setz_T0_subl] = CC_Z,
6143
[INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6144
[INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6145
[INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6147
[INDEX_op_sets_T0_subb] = CC_S,
6148
[INDEX_op_sets_T0_subw] = CC_S,
6149
[INDEX_op_sets_T0_subl] = CC_S,
6151
[INDEX_op_setl_T0_subb] = CC_O | CC_S,
6152
[INDEX_op_setl_T0_subw] = CC_O | CC_S,
6153
[INDEX_op_setl_T0_subl] = CC_O | CC_S,
6155
[INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6156
[INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6157
[INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6159
[INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6160
[INDEX_op_cmc] = CC_C,
6161
[INDEX_op_salc] = CC_C,
6163
/* needed for correct flag optimisation before string ops */
6164
[INDEX_op_jnz_ecxw] = CC_OSZAPC,
6165
[INDEX_op_jnz_ecxl] = CC_OSZAPC,
6166
[INDEX_op_jz_ecxw] = CC_OSZAPC,
6167
[INDEX_op_jz_ecxl] = CC_OSZAPC,
6169
#ifdef TARGET_X86_64
6170
[INDEX_op_jb_subq] = CC_C,
6171
[INDEX_op_jz_subq] = CC_Z,
6172
[INDEX_op_jbe_subq] = CC_Z | CC_C,
6173
[INDEX_op_js_subq] = CC_S,
6174
[INDEX_op_jl_subq] = CC_O | CC_S,
6175
[INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6177
[INDEX_op_loopnzq] = CC_Z,
6178
[INDEX_op_loopzq] = CC_Z,
6180
[INDEX_op_setb_T0_subq] = CC_C,
6181
[INDEX_op_setz_T0_subq] = CC_Z,
6182
[INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6183
[INDEX_op_sets_T0_subq] = CC_S,
6184
[INDEX_op_setl_T0_subq] = CC_O | CC_S,
6185
[INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6187
[INDEX_op_jnz_ecxq] = CC_OSZAPC,
6188
[INDEX_op_jz_ecxq] = CC_OSZAPC,
6191
#define DEF_READF(SUFFIX)\
6192
[INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6193
[INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6194
[INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6195
X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6196
[INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6197
[INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6198
[INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6199
X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6201
[INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6202
[INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6203
[INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6204
X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6205
[INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6206
[INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6207
[INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6208
X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6212
#ifndef CONFIG_USER_ONLY
6218
/* flags written by an operation */
6219
static uint16_t opc_write_flags[NB_OPS] = {
6220
[INDEX_op_update2_cc] = CC_OSZAPC,
6221
[INDEX_op_update1_cc] = CC_OSZAPC,
6222
[INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6223
[INDEX_op_update_neg_cc] = CC_OSZAPC,
6224
/* subtle: due to the incl/decl implementation, C is used */
6225
[INDEX_op_update_inc_cc] = CC_OSZAPC,
6226
[INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6228
[INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6229
[INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6230
[INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6231
X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6232
[INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6233
[INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6234
[INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6235
X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6236
[INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6237
[INDEX_op_imull_T0_T1] = CC_OSZAPC,
6238
X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6241
[INDEX_op_ucomiss] = CC_OSZAPC,
6242
[INDEX_op_ucomisd] = CC_OSZAPC,
6243
[INDEX_op_comiss] = CC_OSZAPC,
6244
[INDEX_op_comisd] = CC_OSZAPC,
6247
[INDEX_op_aam] = CC_OSZAPC,
6248
[INDEX_op_aad] = CC_OSZAPC,
6249
[INDEX_op_aas] = CC_OSZAPC,
6250
[INDEX_op_aaa] = CC_OSZAPC,
6251
[INDEX_op_das] = CC_OSZAPC,
6252
[INDEX_op_daa] = CC_OSZAPC,
6254
[INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6255
[INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6256
[INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6257
[INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6258
[INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6259
[INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6260
[INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6261
[INDEX_op_clc] = CC_C,
6262
[INDEX_op_stc] = CC_C,
6263
[INDEX_op_cmc] = CC_C,
6265
[INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6266
[INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6267
X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6268
[INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6269
[INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6270
X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6271
[INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6272
[INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6273
X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6274
[INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6275
[INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6276
X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6278
[INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6279
[INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6280
X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6281
[INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6282
[INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6283
X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6285
[INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6286
[INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6287
[INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6288
X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6290
[INDEX_op_cmpxchg8b] = CC_Z,
6291
[INDEX_op_lar] = CC_Z,
6292
[INDEX_op_lsl] = CC_Z,
6293
[INDEX_op_verr] = CC_Z,
6294
[INDEX_op_verw] = CC_Z,
6295
[INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6296
[INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6298
#define DEF_WRITEF(SUFFIX)\
6299
[INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6300
[INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6301
[INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6302
X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6303
[INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6304
[INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6305
[INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6306
X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6308
[INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6309
[INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6310
[INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6311
X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6312
[INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6313
[INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6314
[INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6315
X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6317
[INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6318
[INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6319
[INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6320
X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6321
[INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6322
[INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6323
[INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6324
X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6326
[INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6327
[INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6328
[INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6329
X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6331
[INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6332
[INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6333
[INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6334
X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6336
[INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6337
[INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6338
[INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6339
X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6341
[INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6342
[INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6343
X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6344
[INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6345
[INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6346
X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6348
[INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6349
[INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6350
X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6351
[INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6352
[INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6353
X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6355
[INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6356
[INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6357
[INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6358
X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6363
#ifndef CONFIG_USER_ONLY
6369
/* simpler form of an operation if no flags need to be generated */
6370
static uint16_t opc_simpler[NB_OPS] = {
6371
[INDEX_op_update2_cc] = INDEX_op_nop,
6372
[INDEX_op_update1_cc] = INDEX_op_nop,
6373
[INDEX_op_update_neg_cc] = INDEX_op_nop,
6375
/* broken: CC_OP logic must be rewritten */
6376
[INDEX_op_update_inc_cc] = INDEX_op_nop,
6379
[INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6380
[INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6381
[INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6382
X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6384
[INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6385
[INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6386
[INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6387
X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6389
[INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6390
[INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6391
[INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6392
X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6394
#define DEF_SIMPLER(SUFFIX)\
6395
[INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6396
[INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6397
[INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6398
X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6400
[INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6401
[INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6402
[INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6403
X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6407
#ifndef CONFIG_USER_ONLY
6408
DEF_SIMPLER(_kernel)
7503
6413
void optimize_flags_init(void)
7505
#if TCG_TARGET_REG_BITS == 32
7506
assert(sizeof(CCTable) == (1 << 3));
7508
assert(sizeof(CCTable) == (1 << 4));
7510
cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
7511
cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
7512
TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
7513
cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
7514
TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
7515
cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
7516
TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
7517
cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
7518
TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
7520
/* register helpers */
7522
#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
6416
/* put default values in arrays */
6417
for(i = 0; i < NB_OPS; i++) {
6418
if (opc_simpler[i] == 0)
6423
/* CPU flags computation optimization: we move backward thru the
6424
generated code to see which flags are needed. The operation is
6425
modified if suitable */
6426
static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6429
int live_flags, write_flags, op;
6431
opc_ptr = opc_buf + opc_buf_len;
6432
/* live_flags contains the flags needed by the next instructions
6433
in the code. At the end of the bloc, we consider that all the
6435
live_flags = CC_OSZAPC;
6436
while (opc_ptr > opc_buf) {
6438
/* if none of the flags written by the instruction is used,
6439
then we can try to find a simpler instruction */
6440
write_flags = opc_write_flags[op];
6441
if ((live_flags & write_flags) == 0) {
6442
*opc_ptr = opc_simpler[op];
6444
/* compute the live flags before the instruction */
6445
live_flags &= ~write_flags;
6446
live_flags |= opc_read_flags[op];
7526
6450
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7527
6451
basic block 'tb'. If search_pc is TRUE, also generate PC
7528
6452
information for each intermediate instruction. */
7529
static inline void gen_intermediate_code_internal(CPUState *env,
7530
TranslationBlock *tb,
6453
static inline int gen_intermediate_code_internal(CPUState *env,
6454
TranslationBlock *tb,
7533
6457
DisasContext dc1, *dc = &dc1;
7534
6458
target_ulong pc_ptr;
7535
6459
uint16_t *gen_opc_end;
6460
int flags, j, lj, cflags;
7538
6461
target_ulong pc_start;
7539
6462
target_ulong cs_base;
7543
6464
/* generate intermediate code */
7544
6465
pc_start = tb->pc;
7545
6466
cs_base = tb->cs_base;