42
42
#define ENABLE_ARCH_6K arm_feature(env, ARM_FEATURE_V6K)
43
43
#define ENABLE_ARCH_6T2 arm_feature(env, ARM_FEATURE_THUMB2)
44
44
#define ENABLE_ARCH_7 arm_feature(env, ARM_FEATURE_V7)
45
#define ENABLE_ARCH_8 arm_feature(env, ARM_FEATURE_V8)
46
47
#define ARCH(x) do { if (!ENABLE_ARCH_##x) goto illegal_op; } while(0)
98
99
/* FIXME: These should be removed. */
99
static TCGv cpu_F0s, cpu_F1s;
100
static TCGv_i32 cpu_F0s, cpu_F1s;
100
101
static TCGv_i64 cpu_F0d, cpu_F1d;
102
103
#include "exec/gen-icount.h"
139
140
#include "helper.h"
142
static inline TCGv load_cpu_offset(int offset)
143
static inline TCGv_i32 load_cpu_offset(int offset)
144
TCGv tmp = tcg_temp_new_i32();
145
TCGv_i32 tmp = tcg_temp_new_i32();
145
146
tcg_gen_ld_i32(tmp, cpu_env, offset);
149
150
#define load_cpu_field(name) load_cpu_offset(offsetof(CPUARMState, name))
151
static inline void store_cpu_offset(TCGv var, int offset)
152
static inline void store_cpu_offset(TCGv_i32 var, int offset)
153
154
tcg_gen_st_i32(var, cpu_env, offset);
154
155
tcg_temp_free_i32(var);
158
159
store_cpu_offset(var, offsetof(CPUARMState, name))
160
161
/* Set a variable to the value of a CPU register. */
161
static void load_reg_var(DisasContext *s, TCGv var, int reg)
162
static void load_reg_var(DisasContext *s, TCGv_i32 var, int reg)
176
177
/* Create a new temporary and set it to the value of a CPU register. */
177
static inline TCGv load_reg(DisasContext *s, int reg)
178
static inline TCGv_i32 load_reg(DisasContext *s, int reg)
179
TCGv tmp = tcg_temp_new_i32();
180
TCGv_i32 tmp = tcg_temp_new_i32();
180
181
load_reg_var(s, tmp, reg);
184
185
/* Set a CPU register. The source must be a temporary and will be
185
186
marked as dead. */
186
static void store_reg(DisasContext *s, int reg, TCGv var)
187
static void store_reg(DisasContext *s, int reg, TCGv_i32 var)
189
190
tcg_gen_andi_i32(var, var, ~1);
203
204
#define gen_uxtb16(var) gen_helper_uxtb16(var, var)
206
static inline void gen_set_cpsr(TCGv var, uint32_t mask)
207
static inline void gen_set_cpsr(TCGv_i32 var, uint32_t mask)
208
TCGv tmp_mask = tcg_const_i32(mask);
209
TCGv_i32 tmp_mask = tcg_const_i32(mask);
209
210
gen_helper_cpsr_write(cpu_env, var, tmp_mask);
210
211
tcg_temp_free_i32(tmp_mask);
215
216
static void gen_exception(int excp)
217
TCGv tmp = tcg_temp_new_i32();
218
TCGv_i32 tmp = tcg_temp_new_i32();
218
219
tcg_gen_movi_i32(tmp, excp);
219
220
gen_helper_exception(cpu_env, tmp);
220
221
tcg_temp_free_i32(tmp);
223
static void gen_smul_dual(TCGv a, TCGv b)
224
static void gen_smul_dual(TCGv_i32 a, TCGv_i32 b)
225
TCGv tmp1 = tcg_temp_new_i32();
226
TCGv tmp2 = tcg_temp_new_i32();
226
TCGv_i32 tmp1 = tcg_temp_new_i32();
227
TCGv_i32 tmp2 = tcg_temp_new_i32();
227
228
tcg_gen_ext16s_i32(tmp1, a);
228
229
tcg_gen_ext16s_i32(tmp2, b);
229
230
tcg_gen_mul_i32(tmp1, tmp1, tmp2);
238
239
/* Byteswap each halfword. */
239
static void gen_rev16(TCGv var)
240
static void gen_rev16(TCGv_i32 var)
241
TCGv tmp = tcg_temp_new_i32();
242
TCGv_i32 tmp = tcg_temp_new_i32();
242
243
tcg_gen_shri_i32(tmp, var, 8);
243
244
tcg_gen_andi_i32(tmp, tmp, 0x00ff00ff);
244
245
tcg_gen_shli_i32(var, var, 8);
258
259
/* Unsigned bitfield extract. */
259
static void gen_ubfx(TCGv var, int shift, uint32_t mask)
260
static void gen_ubfx(TCGv_i32 var, int shift, uint32_t mask)
262
263
tcg_gen_shri_i32(var, var, shift);
309
310
/* 32x32->64 multiply. Marks inputs as dead. */
310
static TCGv_i64 gen_mulu_i64_i32(TCGv a, TCGv b)
311
static TCGv_i64 gen_mulu_i64_i32(TCGv_i32 a, TCGv_i32 b)
312
TCGv lo = tcg_temp_new_i32();
313
TCGv hi = tcg_temp_new_i32();
313
TCGv_i32 lo = tcg_temp_new_i32();
314
TCGv_i32 hi = tcg_temp_new_i32();
316
317
tcg_gen_mulu2_i32(lo, hi, a, b);
320
321
ret = tcg_temp_new_i64();
321
322
tcg_gen_concat_i32_i64(ret, lo, hi);
323
tcg_temp_free_i32(lo);
324
tcg_temp_free_i32(hi);
328
static TCGv_i64 gen_muls_i64_i32(TCGv a, TCGv b)
329
static TCGv_i64 gen_muls_i64_i32(TCGv_i32 a, TCGv_i32 b)
330
TCGv lo = tcg_temp_new_i32();
331
TCGv hi = tcg_temp_new_i32();
331
TCGv_i32 lo = tcg_temp_new_i32();
332
TCGv_i32 hi = tcg_temp_new_i32();
334
335
tcg_gen_muls2_i32(lo, hi, a, b);
338
339
ret = tcg_temp_new_i64();
339
340
tcg_gen_concat_i32_i64(ret, lo, hi);
341
tcg_temp_free_i32(lo);
342
tcg_temp_free_i32(hi);
346
347
/* Swap low and high halfwords. */
347
static void gen_swap_half(TCGv var)
348
static void gen_swap_half(TCGv_i32 var)
349
TCGv tmp = tcg_temp_new_i32();
350
TCGv_i32 tmp = tcg_temp_new_i32();
350
351
tcg_gen_shri_i32(tmp, var, 16);
351
352
tcg_gen_shli_i32(var, var, 16);
352
353
tcg_gen_or_i32(var, var, tmp);
360
361
t0 = (t0 + t1) ^ tmp;
363
static void gen_add16(TCGv t0, TCGv t1)
364
static void gen_add16(TCGv_i32 t0, TCGv_i32 t1)
365
TCGv tmp = tcg_temp_new_i32();
366
TCGv_i32 tmp = tcg_temp_new_i32();
366
367
tcg_gen_xor_i32(tmp, t0, t1);
367
368
tcg_gen_andi_i32(tmp, tmp, 0x8000);
368
369
tcg_gen_andi_i32(t0, t0, ~0x8000);
376
377
/* Set CF to the top bit of var. */
377
static void gen_set_CF_bit31(TCGv var)
378
static void gen_set_CF_bit31(TCGv_i32 var)
379
380
tcg_gen_shri_i32(cpu_CF, var, 31);
382
383
/* Set N and Z flags from var. */
383
static inline void gen_logic_CC(TCGv var)
384
static inline void gen_logic_CC(TCGv_i32 var)
385
386
tcg_gen_mov_i32(cpu_NF, var);
386
387
tcg_gen_mov_i32(cpu_ZF, var);
389
390
/* T0 += T1 + CF. */
390
static void gen_adc(TCGv t0, TCGv t1)
391
static void gen_adc(TCGv_i32 t0, TCGv_i32 t1)
392
393
tcg_gen_add_i32(t0, t0, t1);
393
394
tcg_gen_add_i32(t0, t0, cpu_CF);
396
397
/* dest = T0 + T1 + CF. */
397
static void gen_add_carry(TCGv dest, TCGv t0, TCGv t1)
398
static void gen_add_carry(TCGv_i32 dest, TCGv_i32 t0, TCGv_i32 t1)
399
400
tcg_gen_add_i32(dest, t0, t1);
400
401
tcg_gen_add_i32(dest, dest, cpu_CF);
403
404
/* dest = T0 - T1 + CF - 1. */
404
static void gen_sub_carry(TCGv dest, TCGv t0, TCGv t1)
405
static void gen_sub_carry(TCGv_i32 dest, TCGv_i32 t0, TCGv_i32 t1)
406
407
tcg_gen_sub_i32(dest, t0, t1);
407
408
tcg_gen_add_i32(dest, dest, cpu_CF);
411
412
/* dest = T0 + T1. Compute C, N, V and Z flags */
412
static void gen_add_CC(TCGv dest, TCGv t0, TCGv t1)
413
static void gen_add_CC(TCGv_i32 dest, TCGv_i32 t0, TCGv_i32 t1)
414
TCGv tmp = tcg_temp_new_i32();
415
TCGv_i32 tmp = tcg_temp_new_i32();
415
416
tcg_gen_movi_i32(tmp, 0);
416
417
tcg_gen_add2_i32(cpu_NF, cpu_CF, t0, tmp, t1, tmp);
417
418
tcg_gen_mov_i32(cpu_ZF, cpu_NF);
425
426
/* dest = T0 + T1 + CF. Compute C, N, V and Z flags */
426
static void gen_adc_CC(TCGv dest, TCGv t0, TCGv t1)
427
static void gen_adc_CC(TCGv_i32 dest, TCGv_i32 t0, TCGv_i32 t1)
428
TCGv tmp = tcg_temp_new_i32();
429
TCGv_i32 tmp = tcg_temp_new_i32();
429
430
if (TCG_TARGET_HAS_add2_i32) {
430
431
tcg_gen_movi_i32(tmp, 0);
431
432
tcg_gen_add2_i32(cpu_NF, cpu_CF, t0, tmp, cpu_CF, tmp);
453
454
/* dest = T0 - T1. Compute C, N, V and Z flags */
454
static void gen_sub_CC(TCGv dest, TCGv t0, TCGv t1)
455
static void gen_sub_CC(TCGv_i32 dest, TCGv_i32 t0, TCGv_i32 t1)
457
458
tcg_gen_sub_i32(cpu_NF, t0, t1);
458
459
tcg_gen_mov_i32(cpu_ZF, cpu_NF);
459
460
tcg_gen_setcond_i32(TCG_COND_GEU, cpu_CF, t0, t1);
468
469
/* dest = T0 + ~T1 + CF. Compute C, N, V and Z flags */
469
static void gen_sbc_CC(TCGv dest, TCGv t0, TCGv t1)
470
static void gen_sbc_CC(TCGv_i32 dest, TCGv_i32 t0, TCGv_i32 t1)
471
TCGv tmp = tcg_temp_new_i32();
472
TCGv_i32 tmp = tcg_temp_new_i32();
472
473
tcg_gen_not_i32(tmp, t1);
473
474
gen_adc_CC(dest, t0, tmp);
475
tcg_temp_free_i32(tmp);
477
478
#define GEN_SHIFT(name) \
478
static void gen_##name(TCGv dest, TCGv t0, TCGv t1) \
479
static void gen_##name(TCGv_i32 dest, TCGv_i32 t0, TCGv_i32 t1) \
480
TCGv tmp1, tmp2, tmp3; \
481
TCGv_i32 tmp1, tmp2, tmp3; \
481
482
tmp1 = tcg_temp_new_i32(); \
482
483
tcg_gen_andi_i32(tmp1, t1, 0xff); \
483
484
tmp2 = tcg_const_i32(0); \
496
static void gen_sar(TCGv dest, TCGv t0, TCGv t1)
497
static void gen_sar(TCGv_i32 dest, TCGv_i32 t0, TCGv_i32 t1)
499
500
tmp1 = tcg_temp_new_i32();
500
501
tcg_gen_andi_i32(tmp1, t1, 0xff);
501
502
tmp2 = tcg_const_i32(0x1f);
505
506
tcg_temp_free_i32(tmp1);
508
static void tcg_gen_abs_i32(TCGv dest, TCGv src)
509
static void tcg_gen_abs_i32(TCGv_i32 dest, TCGv_i32 src)
510
TCGv c0 = tcg_const_i32(0);
511
TCGv tmp = tcg_temp_new_i32();
511
TCGv_i32 c0 = tcg_const_i32(0);
512
TCGv_i32 tmp = tcg_temp_new_i32();
512
513
tcg_gen_neg_i32(tmp, src);
513
514
tcg_gen_movcond_i32(TCG_COND_GT, dest, src, c0, src, tmp);
514
515
tcg_temp_free_i32(c0);
515
516
tcg_temp_free_i32(tmp);
518
static void shifter_out_im(TCGv var, int shift)
519
static void shifter_out_im(TCGv_i32 var, int shift)
520
521
if (shift == 0) {
521
522
tcg_gen_andi_i32(cpu_CF, var, 1);
565
567
shifter_out_im(var, shift - 1);
566
568
tcg_gen_rotri_i32(var, var, shift); break;
568
TCGv tmp = tcg_temp_new_i32();
570
TCGv_i32 tmp = tcg_temp_new_i32();
569
571
tcg_gen_shli_i32(tmp, cpu_CF, 31);
571
573
shifter_out_im(var, 0);
613
615
case 4: gen_pas_helper(glue(pfx,add8)); break; \
614
616
case 7: gen_pas_helper(glue(pfx,sub8)); break; \
616
static void gen_arm_parallel_addsub(int op1, int op2, TCGv a, TCGv b)
618
static void gen_arm_parallel_addsub(int op1, int op2, TCGv_i32 a, TCGv_i32 b)
660
662
case 5: gen_pas_helper(glue(pfx,sub16)); break; \
661
663
case 6: gen_pas_helper(glue(pfx,subaddx)); break; \
663
static void gen_thumb2_parallel_addsub(int op1, int op2, TCGv a, TCGv b)
665
static void gen_thumb2_parallel_addsub(int op1, int op2, TCGv_i32 a, TCGv_i32 b)
808
810
/* Set PC and Thumb state from var. var is marked as dead. */
809
static inline void gen_bx(DisasContext *s, TCGv var)
811
static inline void gen_bx(DisasContext *s, TCGv_i32 var)
811
813
s->is_jmp = DISAS_UPDATE;
812
814
tcg_gen_andi_i32(cpu_R[15], var, ~1);
847
849
s->is_jmp = DISAS_SMC;
850
static inline TCGv gen_ld8s(TCGv addr, int index)
852
TCGv tmp = tcg_temp_new_i32();
853
tcg_gen_qemu_ld8s(tmp, addr, index);
856
static inline TCGv gen_ld8u(TCGv addr, int index)
858
TCGv tmp = tcg_temp_new_i32();
859
tcg_gen_qemu_ld8u(tmp, addr, index);
862
static inline TCGv gen_ld16s(TCGv addr, int index)
864
TCGv tmp = tcg_temp_new_i32();
865
tcg_gen_qemu_ld16s(tmp, addr, index);
868
static inline TCGv gen_ld16u(TCGv addr, int index)
870
TCGv tmp = tcg_temp_new_i32();
871
tcg_gen_qemu_ld16u(tmp, addr, index);
874
static inline TCGv gen_ld32(TCGv addr, int index)
876
TCGv tmp = tcg_temp_new_i32();
877
tcg_gen_qemu_ld32u(tmp, addr, index);
880
static inline TCGv_i64 gen_ld64(TCGv addr, int index)
882
TCGv_i64 tmp = tcg_temp_new_i64();
883
tcg_gen_qemu_ld64(tmp, addr, index);
886
static inline void gen_st8(TCGv val, TCGv addr, int index)
888
tcg_gen_qemu_st8(val, addr, index);
889
tcg_temp_free_i32(val);
891
static inline void gen_st16(TCGv val, TCGv addr, int index)
893
tcg_gen_qemu_st16(val, addr, index);
894
tcg_temp_free_i32(val);
896
static inline void gen_st32(TCGv val, TCGv addr, int index)
898
tcg_gen_qemu_st32(val, addr, index);
899
tcg_temp_free_i32(val);
901
static inline void gen_st64(TCGv_i64 val, TCGv addr, int index)
903
tcg_gen_qemu_st64(val, addr, index);
904
tcg_temp_free_i64(val);
907
852
static inline void gen_set_pc_im(uint32_t val)
909
854
tcg_gen_movi_i32(cpu_R[15], val);
1111
1056
#define VFP_GEN_FIX(name) \
1112
1057
static inline void gen_vfp_##name(int dp, int shift, int neon) \
1114
TCGv tmp_shift = tcg_const_i32(shift); \
1059
TCGv_i32 tmp_shift = tcg_const_i32(shift); \
1115
1060
TCGv_ptr statusptr = get_fpstatus_ptr(neon); \
1117
1062
gen_helper_vfp_##name##d(cpu_F0d, cpu_F0d, tmp_shift, statusptr); \
1139
1084
tcg_gen_qemu_ld32u(cpu_F0s, addr, IS_USER(s));
1142
static inline void gen_vfp_st(DisasContext *s, int dp, TCGv addr)
1087
static inline void gen_vfp_st(DisasContext *s, int dp, TCGv_i32 addr)
1145
1090
tcg_gen_qemu_st64(cpu_F0d, addr, IS_USER(s));
1171
1116
return vfp_reg_offset(0, sreg);
1174
static TCGv neon_load_reg(int reg, int pass)
1119
static TCGv_i32 neon_load_reg(int reg, int pass)
1176
TCGv tmp = tcg_temp_new_i32();
1121
TCGv_i32 tmp = tcg_temp_new_i32();
1177
1122
tcg_gen_ld_i32(tmp, cpu_env, neon_reg_offset(reg, pass));
1181
static void neon_store_reg(int reg, int pass, TCGv var)
1126
static void neon_store_reg(int reg, int pass, TCGv_i32 var)
1183
1128
tcg_gen_st_i32(var, cpu_env, neon_reg_offset(reg, pass));
1184
1129
tcg_temp_free_i32(var);
1235
1180
tcg_gen_st_i64(var, cpu_env, offsetof(CPUARMState, iwmmxt.regs[reg]));
1238
static inline TCGv iwmmxt_load_creg(int reg)
1183
static inline TCGv_i32 iwmmxt_load_creg(int reg)
1240
TCGv var = tcg_temp_new_i32();
1185
TCGv_i32 var = tcg_temp_new_i32();
1241
1186
tcg_gen_ld_i32(var, cpu_env, offsetof(CPUARMState, iwmmxt.cregs[reg]));
1245
static inline void iwmmxt_store_creg(int reg, TCGv var)
1190
static inline void iwmmxt_store_creg(int reg, TCGv_i32 var)
1247
1192
tcg_gen_st_i32(var, cpu_env, offsetof(CPUARMState, iwmmxt.cregs[reg]));
1248
1193
tcg_temp_free_i32(var);
1425
static inline int gen_iwmmxt_shift(uint32_t insn, uint32_t mask, TCGv dest)
1371
static inline int gen_iwmmxt_shift(uint32_t insn, uint32_t mask, TCGv_i32 dest)
1427
1373
int rd = (insn >> 0) & 0xf;
1430
1376
if (insn & (1 << 8)) {
1431
1377
if (rd < ARM_IWMMXT_wCGR0 || rd > ARM_IWMMXT_wCGR3) {
1452
1398
int rdhi, rdlo, rd0, rd1, i;
1454
TCGv tmp, tmp2, tmp3;
1400
TCGv_i32 tmp, tmp2, tmp3;
1456
1402
if ((insn & 0x0e000e00) == 0x0c000000) {
1457
1403
if ((insn & 0x0fe00ff0) == 0x0c400000) {
1489
1435
tcg_gen_qemu_ld64(cpu_M0, addr, IS_USER(s));
1491
1437
} else { /* WLDRW wRd */
1492
tmp = gen_ld32(addr, IS_USER(s));
1438
tmp = tcg_temp_new_i32();
1439
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
1442
tmp = tcg_temp_new_i32();
1495
1443
if (insn & (1 << 22)) { /* WLDRH */
1496
tmp = gen_ld16u(addr, IS_USER(s));
1444
tcg_gen_qemu_ld16u(tmp, addr, IS_USER(s));
1497
1445
} else { /* WLDRB */
1498
tmp = gen_ld8u(addr, IS_USER(s));
1446
tcg_gen_qemu_ld8u(tmp, addr, IS_USER(s));
1508
1456
if ((insn >> 28) == 0xf) { /* WSTRW wCx */
1509
1457
tmp = iwmmxt_load_creg(wrd);
1510
gen_st32(tmp, addr, IS_USER(s));
1458
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
1512
1460
gen_op_iwmmxt_movq_M0_wRn(wrd);
1513
1461
tmp = tcg_temp_new_i32();
1514
1462
if (insn & (1 << 8)) {
1515
1463
if (insn & (1 << 22)) { /* WSTRD */
1516
tcg_temp_free_i32(tmp);
1517
1464
tcg_gen_qemu_st64(cpu_M0, addr, IS_USER(s));
1518
1465
} else { /* WSTRW wRd */
1519
1466
tcg_gen_trunc_i64_i32(tmp, cpu_M0);
1520
gen_st32(tmp, addr, IS_USER(s));
1467
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
1523
1470
if (insn & (1 << 22)) { /* WSTRH */
1524
1471
tcg_gen_trunc_i64_i32(tmp, cpu_M0);
1525
gen_st16(tmp, addr, IS_USER(s));
1472
tcg_gen_qemu_st16(tmp, addr, IS_USER(s));
1526
1473
} else { /* WSTRB */
1527
1474
tcg_gen_trunc_i64_i32(tmp, cpu_M0);
1528
gen_st8(tmp, addr, IS_USER(s));
1475
tcg_gen_qemu_st8(tmp, addr, IS_USER(s));
1479
tcg_temp_free_i32(tmp);
1533
1481
tcg_temp_free_i32(addr);
2453
2401
static int disas_dsp_insn(CPUARMState *env, DisasContext *s, uint32_t insn)
2455
2403
int acc, rd0, rd1, rdhi, rdlo;
2458
2406
if ((insn & 0x0ff00f10) == 0x0e200010) {
2459
2407
/* Multiply with Internal Accumulate Format */
2539
2487
#define VFP_DREG_M(reg, insn) VFP_DREG(reg, insn, 0, 5)
2541
2489
/* Move between integer and VFP cores. */
2542
static TCGv gen_vfp_mrs(void)
2490
static TCGv_i32 gen_vfp_mrs(void)
2544
TCGv tmp = tcg_temp_new_i32();
2492
TCGv_i32 tmp = tcg_temp_new_i32();
2545
2493
tcg_gen_mov_i32(tmp, cpu_F0s);
2549
static void gen_vfp_msr(TCGv tmp)
2497
static void gen_vfp_msr(TCGv_i32 tmp)
2551
2499
tcg_gen_mov_i32(cpu_F0s, tmp);
2552
2500
tcg_temp_free_i32(tmp);
2555
static void gen_neon_dup_u8(TCGv var, int shift)
2503
static void gen_neon_dup_u8(TCGv_i32 var, int shift)
2557
TCGv tmp = tcg_temp_new_i32();
2505
TCGv_i32 tmp = tcg_temp_new_i32();
2559
2507
tcg_gen_shri_i32(var, var, shift);
2560
2508
tcg_gen_ext8u_i32(var, var);
2565
2513
tcg_temp_free_i32(tmp);
2568
static void gen_neon_dup_low16(TCGv var)
2516
static void gen_neon_dup_low16(TCGv_i32 var)
2570
TCGv tmp = tcg_temp_new_i32();
2518
TCGv_i32 tmp = tcg_temp_new_i32();
2571
2519
tcg_gen_ext16u_i32(var, var);
2572
2520
tcg_gen_shli_i32(tmp, var, 16);
2573
2521
tcg_gen_or_i32(var, var, tmp);
2574
2522
tcg_temp_free_i32(tmp);
2577
static void gen_neon_dup_high16(TCGv var)
2525
static void gen_neon_dup_high16(TCGv_i32 var)
2579
TCGv tmp = tcg_temp_new_i32();
2527
TCGv_i32 tmp = tcg_temp_new_i32();
2580
2528
tcg_gen_andi_i32(var, var, 0xffff0000);
2581
2529
tcg_gen_shri_i32(tmp, var, 16);
2582
2530
tcg_gen_or_i32(var, var, tmp);
2583
2531
tcg_temp_free_i32(tmp);
2586
static TCGv gen_load_and_replicate(DisasContext *s, TCGv addr, int size)
2534
static TCGv_i32 gen_load_and_replicate(DisasContext *s, TCGv_i32 addr, int size)
2588
2536
/* Load a single Neon element and replicate into a 32 bit TCG reg */
2537
TCGv_i32 tmp = tcg_temp_new_i32();
2590
2538
switch (size) {
2592
tmp = gen_ld8u(addr, IS_USER(s));
2540
tcg_gen_qemu_ld8u(tmp, addr, IS_USER(s));
2593
2541
gen_neon_dup_u8(tmp, 0);
2596
tmp = gen_ld16u(addr, IS_USER(s));
2544
tcg_gen_qemu_ld16u(tmp, addr, IS_USER(s));
2597
2545
gen_neon_dup_low16(tmp);
2600
tmp = gen_ld32(addr, IS_USER(s));
2548
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
2602
2550
default: /* Avoid compiler warnings. */
2612
2560
uint32_t rd, rn, rm, op, i, n, offset, delta_d, delta_m, bank_mask;
2613
2561
int dp, veclen;
2618
2566
if (!arm_feature(env, ARM_FEATURE_VFP))
3438
static inline void gen_mulxy(TCGv t0, TCGv t1, int x, int y)
3386
static inline void gen_mulxy(TCGv_i32 t0, TCGv_i32 t1, int x, int y)
3441
3389
tcg_gen_sari_i32(t0, t0, 16);
3484
3432
/* Returns nonzero if access to the PSR is not permitted. Marks t0 as dead. */
3485
static int gen_set_psr(DisasContext *s, uint32_t mask, int spsr, TCGv t0)
3433
static int gen_set_psr(DisasContext *s, uint32_t mask, int spsr, TCGv_i32 t0)
3489
3437
/* ??? This is also undefined in system mode. */
3490
3438
if (IS_USER(s))
3506
3454
/* Returns nonzero if access to the PSR is not permitted. */
3507
3455
static int gen_set_psr_im(DisasContext *s, uint32_t mask, int spsr, uint32_t val)
3510
3458
tmp = tcg_temp_new_i32();
3511
3459
tcg_gen_movi_i32(tmp, val);
3512
3460
return gen_set_psr(s, mask, spsr, tmp);
3515
3463
/* Generate an old-style exception return. Marks pc as dead. */
3516
static void gen_exception_return(DisasContext *s, TCGv pc)
3464
static void gen_exception_return(DisasContext *s, TCGv_i32 pc)
3519
3467
store_reg(s, 15, pc);
3520
3468
tmp = load_cpu_field(spsr);
3521
3469
gen_set_cpsr(tmp, 0xffffffff);
3526
3474
/* Generate a v6 exception return. Marks both values as dead. */
3527
static void gen_rfe(DisasContext *s, TCGv pc, TCGv cpsr)
3475
static void gen_rfe(DisasContext *s, TCGv_i32 pc, TCGv_i32 cpsr)
3529
3477
gen_set_cpsr(cpsr, 0xffffffff);
3530
3478
tcg_temp_free_i32(cpsr);
3569
3518
#define CPU_V001 cpu_V0, cpu_V0, cpu_V1
3571
static inline void gen_neon_add(int size, TCGv t0, TCGv t1)
3520
static inline void gen_neon_add(int size, TCGv_i32 t0, TCGv_i32 t1)
3573
3522
switch (size) {
3574
3523
case 0: gen_helper_neon_add_u8(t0, t0, t1); break;
3640
3589
default: return 1; \
3643
static TCGv neon_load_scratch(int scratch)
3592
static TCGv_i32 neon_load_scratch(int scratch)
3645
TCGv tmp = tcg_temp_new_i32();
3594
TCGv_i32 tmp = tcg_temp_new_i32();
3646
3595
tcg_gen_ld_i32(tmp, cpu_env, offsetof(CPUARMState, vfp.scratch[scratch]));
3650
static void neon_store_scratch(int scratch, TCGv var)
3599
static void neon_store_scratch(int scratch, TCGv_i32 var)
3652
3601
tcg_gen_st_i32(var, cpu_env, offsetof(CPUARMState, vfp.scratch[scratch]));
3653
3602
tcg_temp_free_i32(var);
3656
static inline TCGv neon_get_scalar(int size, int reg)
3605
static inline TCGv_i32 neon_get_scalar(int size, int reg)
3659
3608
if (size == 1) {
3660
3609
tmp = neon_load_reg(reg & 7, reg >> 4);
3872
3821
tcg_gen_addi_i32(addr, addr, 1 << size);
3874
3823
if (size == 3) {
3824
tmp64 = tcg_temp_new_i64();
3876
tmp64 = gen_ld64(addr, IS_USER(s));
3826
tcg_gen_qemu_ld64(tmp64, addr, IS_USER(s));
3877
3827
neon_store_reg64(tmp64, rd);
3878
tcg_temp_free_i64(tmp64);
3880
tmp64 = tcg_temp_new_i64();
3881
3829
neon_load_reg64(tmp64, rd);
3882
gen_st64(tmp64, addr, IS_USER(s));
3830
tcg_gen_qemu_st64(tmp64, addr, IS_USER(s));
3832
tcg_temp_free_i64(tmp64);
3884
3833
tcg_gen_addi_i32(addr, addr, stride);
3886
3835
for (pass = 0; pass < 2; pass++) {
3887
3836
if (size == 2) {
3889
tmp = gen_ld32(addr, IS_USER(s));
3838
tmp = tcg_temp_new_i32();
3839
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
3890
3840
neon_store_reg(rd, pass, tmp);
3892
3842
tmp = neon_load_reg(rd, pass);
3893
gen_st32(tmp, addr, IS_USER(s));
3843
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
3844
tcg_temp_free_i32(tmp);
3895
3846
tcg_gen_addi_i32(addr, addr, stride);
3896
3847
} else if (size == 1) {
3898
tmp = gen_ld16u(addr, IS_USER(s));
3849
tmp = tcg_temp_new_i32();
3850
tcg_gen_qemu_ld16u(tmp, addr, IS_USER(s));
3899
3851
tcg_gen_addi_i32(addr, addr, stride);
3900
tmp2 = gen_ld16u(addr, IS_USER(s));
3852
tmp2 = tcg_temp_new_i32();
3853
tcg_gen_qemu_ld16u(tmp2, addr, IS_USER(s));
3901
3854
tcg_gen_addi_i32(addr, addr, stride);
3902
3855
tcg_gen_shli_i32(tmp2, tmp2, 16);
3903
3856
tcg_gen_or_i32(tmp, tmp, tmp2);
3907
3860
tmp = neon_load_reg(rd, pass);
3908
3861
tmp2 = tcg_temp_new_i32();
3909
3862
tcg_gen_shri_i32(tmp2, tmp, 16);
3910
gen_st16(tmp, addr, IS_USER(s));
3863
tcg_gen_qemu_st16(tmp, addr, IS_USER(s));
3864
tcg_temp_free_i32(tmp);
3911
3865
tcg_gen_addi_i32(addr, addr, stride);
3912
gen_st16(tmp2, addr, IS_USER(s));
3866
tcg_gen_qemu_st16(tmp2, addr, IS_USER(s));
3867
tcg_temp_free_i32(tmp2);
3913
3868
tcg_gen_addi_i32(addr, addr, stride);
3915
3870
} else /* size == 0 */ {
3872
TCGV_UNUSED_I32(tmp2);
3918
3873
for (n = 0; n < 4; n++) {
3919
tmp = gen_ld8u(addr, IS_USER(s));
3874
tmp = tcg_temp_new_i32();
3875
tcg_gen_qemu_ld8u(tmp, addr, IS_USER(s));
3920
3876
tcg_gen_addi_i32(addr, addr, stride);
3937
3893
tcg_gen_shri_i32(tmp, tmp2, n * 8);
3939
gen_st8(tmp, addr, IS_USER(s));
3895
tcg_gen_qemu_st8(tmp, addr, IS_USER(s));
3896
tcg_temp_free_i32(tmp);
3940
3897
tcg_gen_addi_i32(addr, addr, stride);
3942
3899
tcg_temp_free_i32(tmp2);
4056
4013
load_reg_var(s, addr, rn);
4057
4014
for (reg = 0; reg < nregs; reg++) {
4016
tmp = tcg_temp_new_i32();
4059
4017
switch (size) {
4061
tmp = gen_ld8u(addr, IS_USER(s));
4019
tcg_gen_qemu_ld8u(tmp, addr, IS_USER(s));
4064
tmp = gen_ld16u(addr, IS_USER(s));
4022
tcg_gen_qemu_ld16u(tmp, addr, IS_USER(s));
4067
tmp = gen_ld32(addr, IS_USER(s));
4025
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
4069
4027
default: /* Avoid compiler warnings. */
4082
4040
tcg_gen_shri_i32(tmp, tmp, shift);
4083
4041
switch (size) {
4085
gen_st8(tmp, addr, IS_USER(s));
4043
tcg_gen_qemu_st8(tmp, addr, IS_USER(s));
4088
gen_st16(tmp, addr, IS_USER(s));
4046
tcg_gen_qemu_st16(tmp, addr, IS_USER(s));
4091
gen_st32(tmp, addr, IS_USER(s));
4049
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
4052
tcg_temp_free_i32(tmp);
4096
4055
tcg_gen_addi_i32(addr, addr, 1 << size);
4119
4078
/* Bitwise select. dest = c ? t : f. Clobbers T and F. */
4120
static void gen_neon_bsl(TCGv dest, TCGv t, TCGv f, TCGv c)
4079
static void gen_neon_bsl(TCGv_i32 dest, TCGv_i32 t, TCGv_i32 f, TCGv_i32 c)
4122
4081
tcg_gen_and_i32(t, t, c);
4123
4082
tcg_gen_andc_i32(f, f, c);
4124
4083
tcg_gen_or_i32(dest, t, f);
4127
static inline void gen_neon_narrow(int size, TCGv dest, TCGv_i64 src)
4086
static inline void gen_neon_narrow(int size, TCGv_i32 dest, TCGv_i64 src)
4129
4088
switch (size) {
4130
4089
case 0: gen_helper_neon_narrow_u8(dest, src); break;
4292
static void gen_neon_narrow_op(int op, int u, int size, TCGv dest, TCGv_i64 src)
4252
static void gen_neon_narrow_op(int op, int u, int size,
4253
TCGv_i32 dest, TCGv_i64 src)
5457
5418
tmp = neon_load_reg(rn, 1);
5458
5419
neon_store_scratch(2, tmp);
5421
TCGV_UNUSED_I32(tmp3);
5461
5422
for (pass = 0; pass < 2; pass++) {
5462
5423
if (src1_wide) {
5463
5424
neon_load_reg64(cpu_V0, rn + pass);
5425
TCGV_UNUSED_I32(tmp);
5466
5427
if (pass == 1 && rd == rn) {
5467
5428
tmp = neon_load_scratch(2);
6082
6043
case NEON_2RM_VNEG:
6083
6044
tmp2 = tcg_const_i32(0);
6084
6045
gen_neon_rsb(size, tmp, tmp2);
6085
tcg_temp_free(tmp2);
6046
tcg_temp_free_i32(tmp2);
6087
6048
case NEON_2RM_VCGT0_F:
6089
6050
TCGv_ptr fpstatus = get_fpstatus_ptr(1);
6090
6051
tmp2 = tcg_const_i32(0);
6091
6052
gen_helper_neon_cgt_f32(tmp, tmp, tmp2, fpstatus);
6092
tcg_temp_free(tmp2);
6053
tcg_temp_free_i32(tmp2);
6093
6054
tcg_temp_free_ptr(fpstatus);
6411
6376
tcg_temp_free_ptr(tmpptr);
6412
6377
tcg_temp_free_i32(tmp);
6414
TCGv tmp = load_reg(s, rt);
6379
TCGv_i32 tmp = load_reg(s, rt);
6415
6380
store_cpu_offset(tmp, ri->fieldoffset);
6385
if (use_icount && (ri->type & ARM_CP_IO)) {
6386
/* I/O operations must end the TB here (whether read or write) */
6389
} else if (!isread && !(ri->type & ARM_CP_SUPPRESS_TB_END)) {
6418
6390
/* We default to ending the TB on a coprocessor register write,
6419
6391
* but allow this to be suppressed by the register definition
6420
6392
* (usually only necessary to work around guest bugs).
6422
if (!(ri->type & ARM_CP_SUPPRESS_TB_END)) {
6479
6450
/* Set N and Z flags from hi|lo. */
6480
static void gen_logicq_cc(TCGv lo, TCGv hi)
6451
static void gen_logicq_cc(TCGv_i32 lo, TCGv_i32 hi)
6482
6453
tcg_gen_mov_i32(cpu_NF, hi);
6483
6454
tcg_gen_or_i32(cpu_ZF, lo, hi);
6493
6464
this sequence is effectively atomic. In user emulation mode we
6494
6465
throw an exception and handle the atomic operation elsewhere. */
6495
6466
static void gen_load_exclusive(DisasContext *s, int rt, int rt2,
6496
TCGv addr, int size)
6467
TCGv_i32 addr, int size)
6469
TCGv_i32 tmp = tcg_temp_new_i32();
6500
6471
switch (size) {
6502
tmp = gen_ld8u(addr, IS_USER(s));
6473
tcg_gen_qemu_ld8u(tmp, addr, IS_USER(s));
6505
tmp = gen_ld16u(addr, IS_USER(s));
6476
tcg_gen_qemu_ld16u(tmp, addr, IS_USER(s));
6509
tmp = gen_ld32(addr, IS_USER(s));
6480
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
6514
6485
tcg_gen_mov_i32(cpu_exclusive_val, tmp);
6515
6486
store_reg(s, rt, tmp);
6516
6487
if (size == 3) {
6517
TCGv tmp2 = tcg_temp_new_i32();
6488
TCGv_i32 tmp2 = tcg_temp_new_i32();
6518
6489
tcg_gen_addi_i32(tmp2, addr, 4);
6519
tmp = gen_ld32(tmp2, IS_USER(s));
6490
tmp = tcg_temp_new_i32();
6491
tcg_gen_qemu_ld32u(tmp, tmp2, IS_USER(s));
6520
6492
tcg_temp_free_i32(tmp2);
6521
6493
tcg_gen_mov_i32(cpu_exclusive_high, tmp);
6522
6494
store_reg(s, rt2, tmp);
6555
6527
fail_label = gen_new_label();
6556
6528
done_label = gen_new_label();
6557
6529
tcg_gen_brcond_i32(TCG_COND_NE, addr, cpu_exclusive_addr, fail_label);
6530
tmp = tcg_temp_new_i32();
6558
6531
switch (size) {
6560
tmp = gen_ld8u(addr, IS_USER(s));
6533
tcg_gen_qemu_ld8u(tmp, addr, IS_USER(s));
6563
tmp = gen_ld16u(addr, IS_USER(s));
6536
tcg_gen_qemu_ld16u(tmp, addr, IS_USER(s));
6567
tmp = gen_ld32(addr, IS_USER(s));
6540
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
6572
6545
tcg_gen_brcond_i32(TCG_COND_NE, tmp, cpu_exclusive_val, fail_label);
6573
6546
tcg_temp_free_i32(tmp);
6574
6547
if (size == 3) {
6575
TCGv tmp2 = tcg_temp_new_i32();
6548
TCGv_i32 tmp2 = tcg_temp_new_i32();
6576
6549
tcg_gen_addi_i32(tmp2, addr, 4);
6577
tmp = gen_ld32(tmp2, IS_USER(s));
6550
tmp = tcg_temp_new_i32();
6551
tcg_gen_qemu_ld32u(tmp, tmp2, IS_USER(s));
6578
6552
tcg_temp_free_i32(tmp2);
6579
6553
tcg_gen_brcond_i32(TCG_COND_NE, tmp, cpu_exclusive_high, fail_label);
6580
6554
tcg_temp_free_i32(tmp);
6582
6556
tmp = load_reg(s, rt);
6583
6557
switch (size) {
6585
gen_st8(tmp, addr, IS_USER(s));
6559
tcg_gen_qemu_st8(tmp, addr, IS_USER(s));
6588
gen_st16(tmp, addr, IS_USER(s));
6562
tcg_gen_qemu_st16(tmp, addr, IS_USER(s));
6592
gen_st32(tmp, addr, IS_USER(s));
6566
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
6571
tcg_temp_free_i32(tmp);
6597
6572
if (size == 3) {
6598
6573
tcg_gen_addi_i32(addr, addr, 4);
6599
6574
tmp = load_reg(s, rt2);
6600
gen_st32(tmp, addr, IS_USER(s));
6575
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
6576
tcg_temp_free_i32(tmp);
6602
6578
tcg_gen_movi_i32(cpu_R[rd], 0);
6603
6579
tcg_gen_br(done_label);
6644
6620
tcg_gen_addi_i32(addr, addr, offset);
6645
6621
tmp = load_reg(s, 14);
6646
gen_st32(tmp, addr, 0);
6622
tcg_gen_qemu_st32(tmp, addr, 0);
6623
tcg_temp_free_i32(tmp);
6647
6624
tmp = load_cpu_field(spsr);
6648
6625
tcg_gen_addi_i32(addr, addr, 4);
6649
gen_st32(tmp, addr, 0);
6626
tcg_gen_qemu_st32(tmp, addr, 0);
6627
tcg_temp_free_i32(tmp);
6650
6628
if (writeback) {
6651
6629
switch (amode) {
6675
6653
static void disas_arm_insn(CPUARMState * env, DisasContext *s)
6677
6655
unsigned int cond, insn, val, op1, i, shift, rm, rs, rn, rd, sh;
6682
6660
TCGv_i64 tmp64;
6684
6662
insn = arm_ldl_code(env, s->pc, s->bswap_code);
6790
6768
tcg_gen_addi_i32(addr, addr, offset);
6791
6769
/* Load PC into tmp and CPSR into tmp2. */
6792
tmp = gen_ld32(addr, 0);
6770
tmp = tcg_temp_new_i32();
6771
tcg_gen_qemu_ld32u(tmp, addr, 0);
6793
6772
tcg_gen_addi_i32(addr, addr, 4);
6794
tmp2 = gen_ld32(addr, 0);
6773
tmp2 = tcg_temp_new_i32();
6774
tcg_gen_qemu_ld32u(tmp2, addr, 0);
6795
6775
if (insn & (1 << 21)) {
6796
6776
/* Base writeback. */
7301
7281
tcg_gen_mulu2_i32(tmp, tmp2, tmp, tmp2);
7303
7283
if (insn & (1 << 21)) { /* mult accumulate */
7304
TCGv al = load_reg(s, rn);
7305
TCGv ah = load_reg(s, rd);
7284
TCGv_i32 al = load_reg(s, rn);
7285
TCGv_i32 ah = load_reg(s, rd);
7306
7286
tcg_gen_add2_i32(tmp, tmp2, tmp, tmp2, al, ah);
7287
tcg_temp_free_i32(al);
7288
tcg_temp_free_i32(ah);
7310
7290
if (insn & (1 << 20)) {
7311
7291
gen_logicq_cc(tmp, tmp2);
7321
7301
rd = (insn >> 12) & 0xf;
7322
7302
if (insn & (1 << 23)) {
7323
7303
/* load/store exclusive */
7304
int op2 = (insn >> 8) & 3;
7324
7305
op1 = (insn >> 21) & 0x3;
7308
case 0: /* lda/stl */
7314
case 1: /* reserved */
7316
case 2: /* ldaex/stlex */
7319
case 3: /* ldrex/strex */
7329
7328
addr = tcg_temp_local_new_i32();
7330
7329
load_reg_var(s, addr, rn);
7331
if (insn & (1 << 20)) {
7331
/* Since the emulation does not have barriers,
7332
the acquire/release semantics need no special
7335
if (insn & (1 << 20)) {
7336
tmp = tcg_temp_new_i32();
7339
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
7342
tcg_gen_qemu_ld8u(tmp, addr, IS_USER(s));
7345
tcg_gen_qemu_ld16u(tmp, addr, IS_USER(s));
7350
store_reg(s, rd, tmp);
7353
tmp = load_reg(s, rm);
7356
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
7359
tcg_gen_qemu_st8(tmp, addr, IS_USER(s));
7362
tcg_gen_qemu_st16(tmp, addr, IS_USER(s));
7367
tcg_temp_free_i32(tmp);
7369
} else if (insn & (1 << 20)) {
7333
7371
case 0: /* ldrex */
7334
7372
gen_load_exclusive(s, rd, 15, addr, 2);
7374
7412
so it is good enough. */
7375
7413
addr = load_reg(s, rn);
7376
7414
tmp = load_reg(s, rm);
7415
tmp2 = tcg_temp_new_i32();
7377
7416
if (insn & (1 << 22)) {
7378
tmp2 = gen_ld8u(addr, IS_USER(s));
7379
gen_st8(tmp, addr, IS_USER(s));
7417
tcg_gen_qemu_ld8u(tmp2, addr, IS_USER(s));
7418
tcg_gen_qemu_st8(tmp, addr, IS_USER(s));
7381
tmp2 = gen_ld32(addr, IS_USER(s));
7382
gen_st32(tmp, addr, IS_USER(s));
7420
tcg_gen_qemu_ld32u(tmp2, addr, IS_USER(s));
7421
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
7423
tcg_temp_free_i32(tmp);
7384
7424
tcg_temp_free_i32(addr);
7385
7425
store_reg(s, rd, tmp2);
7397
7437
address_offset = 0;
7398
7438
if (insn & (1 << 20)) {
7440
tmp = tcg_temp_new_i32();
7402
tmp = gen_ld16u(addr, IS_USER(s));
7443
tcg_gen_qemu_ld16u(tmp, addr, IS_USER(s));
7405
tmp = gen_ld8s(addr, IS_USER(s));
7446
tcg_gen_qemu_ld8s(tmp, addr, IS_USER(s));
7409
tmp = gen_ld16s(addr, IS_USER(s));
7450
tcg_gen_qemu_ld16s(tmp, addr, IS_USER(s));
7418
7459
tmp = load_reg(s, rd);
7419
gen_st32(tmp, addr, IS_USER(s));
7460
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
7461
tcg_temp_free_i32(tmp);
7420
7462
tcg_gen_addi_i32(addr, addr, 4);
7421
7463
tmp = load_reg(s, rd + 1);
7422
gen_st32(tmp, addr, IS_USER(s));
7464
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
7465
tcg_temp_free_i32(tmp);
7426
tmp = gen_ld32(addr, IS_USER(s));
7469
tmp = tcg_temp_new_i32();
7470
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
7427
7471
store_reg(s, rd, tmp);
7428
7472
tcg_gen_addi_i32(addr, addr, 4);
7429
tmp = gen_ld32(addr, IS_USER(s));
7473
tmp = tcg_temp_new_i32();
7474
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
7764
7810
gen_add_data_offset(s, insn, tmp2);
7765
7811
if (insn & (1 << 20)) {
7813
tmp = tcg_temp_new_i32();
7767
7814
if (insn & (1 << 22)) {
7768
tmp = gen_ld8u(tmp2, i);
7815
tcg_gen_qemu_ld8u(tmp, tmp2, i);
7770
tmp = gen_ld32(tmp2, i);
7817
tcg_gen_qemu_ld32u(tmp, tmp2, i);
7774
7821
tmp = load_reg(s, rd);
7775
if (insn & (1 << 22))
7776
gen_st8(tmp, tmp2, i);
7778
gen_st32(tmp, tmp2, i);
7822
if (insn & (1 << 22)) {
7823
tcg_gen_qemu_st8(tmp, tmp2, i);
7825
tcg_gen_qemu_st32(tmp, tmp2, i);
7827
tcg_temp_free_i32(tmp);
7780
7829
if (!(insn & (1 << 24))) {
7781
7830
gen_add_data_offset(s, insn, tmp2);
7839
7888
if (insn & (1 << i)) {
7840
7889
if (insn & (1 << 20)) {
7842
tmp = gen_ld32(addr, IS_USER(s));
7891
tmp = tcg_temp_new_i32();
7892
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
7844
7894
tmp2 = tcg_const_i32(i);
7845
7895
gen_helper_set_user_reg(cpu_env, tmp2, tmp);
7960
8011
Returns zero if the opcode is valid. */
7963
gen_thumb2_data_op(DisasContext *s, int op, int conds, uint32_t shifter_out, TCGv t0, TCGv t1)
8014
gen_thumb2_data_op(DisasContext *s, int op, int conds, uint32_t shifter_out,
8015
TCGv_i32 t0, TCGv_i32 t1)
8123
8175
if (insn & (1 << 20)) {
8125
tmp = gen_ld32(addr, IS_USER(s));
8177
tmp = tcg_temp_new_i32();
8178
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
8126
8179
store_reg(s, rs, tmp);
8127
8180
tcg_gen_addi_i32(addr, addr, 4);
8128
tmp = gen_ld32(addr, IS_USER(s));
8181
tmp = tcg_temp_new_i32();
8182
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
8129
8183
store_reg(s, rd, tmp);
8132
8186
tmp = load_reg(s, rs);
8133
gen_st32(tmp, addr, IS_USER(s));
8187
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
8188
tcg_temp_free_i32(tmp);
8134
8189
tcg_gen_addi_i32(addr, addr, 4);
8135
8190
tmp = load_reg(s, rd);
8136
gen_st32(tmp, addr, IS_USER(s));
8191
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
8192
tcg_temp_free_i32(tmp);
8138
8194
if (insn & (1 << 21)) {
8139
8195
/* Base writeback. */
8147
8203
} else if ((insn & (1 << 23)) == 0) {
8148
8204
/* Load/store exclusive word. */
8149
addr = tcg_temp_local_new();
8205
addr = tcg_temp_local_new_i32();
8150
8206
load_reg_var(s, addr, rn);
8151
8207
tcg_gen_addi_i32(addr, addr, (insn & 0xff) << 2);
8152
8208
if (insn & (1 << 20)) {
8155
8211
gen_store_exclusive(s, rd, rs, 15, addr, 2);
8157
tcg_temp_free(addr);
8158
} else if ((insn & (1 << 6)) == 0) {
8213
tcg_temp_free_i32(addr);
8214
} else if ((insn & (7 << 5)) == 0) {
8159
8215
/* Table Branch. */
8160
8216
if (rn == 15) {
8161
8217
addr = tcg_temp_new_i32();
8170
8226
tcg_gen_add_i32(addr, addr, tmp);
8171
8227
tcg_temp_free_i32(tmp);
8172
tmp = gen_ld16u(addr, IS_USER(s));
8228
tmp = tcg_temp_new_i32();
8229
tcg_gen_qemu_ld16u(tmp, addr, IS_USER(s));
8173
8230
} else { /* tbb */
8174
8231
tcg_temp_free_i32(tmp);
8175
tmp = gen_ld8u(addr, IS_USER(s));
8232
tmp = tcg_temp_new_i32();
8233
tcg_gen_qemu_ld8u(tmp, addr, IS_USER(s));
8177
8235
tcg_temp_free_i32(addr);
8178
8236
tcg_gen_shli_i32(tmp, tmp, 1);
8179
8237
tcg_gen_addi_i32(tmp, tmp, s->pc);
8180
8238
store_reg(s, 15, tmp);
8182
/* Load/store exclusive byte/halfword/doubleword. */
8240
int op2 = (insn >> 6) & 0x3;
8184
8241
op = (insn >> 4) & 0x3;
8186
8244
goto illegal_op;
8246
/* Load/store exclusive byte/halfword/doubleword */
8253
/* Load-acquire/store-release */
8259
/* Load-acquire/store-release exclusive */
8188
addr = tcg_temp_local_new();
8263
addr = tcg_temp_local_new_i32();
8189
8264
load_reg_var(s, addr, rn);
8190
if (insn & (1 << 20)) {
8266
if (insn & (1 << 20)) {
8267
tmp = tcg_temp_new_i32();
8270
tcg_gen_qemu_ld8u(tmp, addr, IS_USER(s));
8273
tcg_gen_qemu_ld16u(tmp, addr, IS_USER(s));
8276
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
8281
store_reg(s, rs, tmp);
8283
tmp = load_reg(s, rs);
8286
tcg_gen_qemu_st8(tmp, addr, IS_USER(s));
8289
tcg_gen_qemu_st16(tmp, addr, IS_USER(s));
8292
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
8297
tcg_temp_free_i32(tmp);
8299
} else if (insn & (1 << 20)) {
8191
8300
gen_load_exclusive(s, rs, rd, addr, op);
8193
8302
gen_store_exclusive(s, rm, rs, rd, addr, op);
8195
tcg_temp_free(addr);
8304
tcg_temp_free_i32(addr);
8198
8307
/* Load/store multiple, RFE, SRS. */
8207
8316
if ((insn & (1 << 24)) == 0)
8208
8317
tcg_gen_addi_i32(addr, addr, -8);
8209
8318
/* Load PC into tmp and CPSR into tmp2. */
8210
tmp = gen_ld32(addr, 0);
8319
tmp = tcg_temp_new_i32();
8320
tcg_gen_qemu_ld32u(tmp, addr, 0);
8211
8321
tcg_gen_addi_i32(addr, addr, 4);
8212
tmp2 = gen_ld32(addr, 0);
8322
tmp2 = tcg_temp_new_i32();
8323
tcg_gen_qemu_ld32u(tmp2, addr, 0);
8213
8324
if (insn & (1 << 21)) {
8214
8325
/* Base writeback. */
8215
8326
if (insn & (1 << 24)) {
8241
8352
tcg_gen_addi_i32(addr, addr, -offset);
8244
TCGV_UNUSED(loaded_var);
8355
TCGV_UNUSED_I32(loaded_var);
8245
8356
for (i = 0; i < 16; i++) {
8246
8357
if ((insn & (1 << i)) == 0)
8248
8359
if (insn & (1 << 20)) {
8250
tmp = gen_ld32(addr, IS_USER(s));
8361
tmp = tcg_temp_new_i32();
8362
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
8252
8364
gen_bx(s, tmp);
8253
8365
} else if (i == rn) {
9036
9149
if (insn & (1 << 20)) {
9151
tmp = tcg_temp_new_i32();
9039
case 0: tmp = gen_ld8u(addr, user); break;
9040
case 4: tmp = gen_ld8s(addr, user); break;
9041
case 1: tmp = gen_ld16u(addr, user); break;
9042
case 5: tmp = gen_ld16s(addr, user); break;
9043
case 2: tmp = gen_ld32(addr, user); break;
9154
tcg_gen_qemu_ld8u(tmp, addr, user);
9157
tcg_gen_qemu_ld8s(tmp, addr, user);
9160
tcg_gen_qemu_ld16u(tmp, addr, user);
9163
tcg_gen_qemu_ld16s(tmp, addr, user);
9166
tcg_gen_qemu_ld32u(tmp, addr, user);
9169
tcg_temp_free_i32(tmp);
9045
9170
tcg_temp_free_i32(addr);
9046
9171
goto illegal_op;
9055
9180
tmp = load_reg(s, rs);
9057
case 0: gen_st8(tmp, addr, user); break;
9058
case 1: gen_st16(tmp, addr, user); break;
9059
case 2: gen_st32(tmp, addr, user); break;
9183
tcg_gen_qemu_st8(tmp, addr, user);
9186
tcg_gen_qemu_st16(tmp, addr, user);
9189
tcg_gen_qemu_st32(tmp, addr, user);
9192
tcg_temp_free_i32(tmp);
9061
9193
tcg_temp_free_i32(addr);
9062
9194
goto illegal_op;
9196
tcg_temp_free_i32(tmp);
9066
9199
tcg_gen_addi_i32(addr, addr, imm);
9189
9322
val &= ~(uint32_t)2;
9190
9323
addr = tcg_temp_new_i32();
9191
9324
tcg_gen_movi_i32(addr, val);
9192
tmp = gen_ld32(addr, IS_USER(s));
9325
tmp = tcg_temp_new_i32();
9326
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
9193
9327
tcg_temp_free_i32(addr);
9194
9328
store_reg(s, rd, tmp);
9384
9518
tcg_gen_add_i32(addr, addr, tmp);
9385
9519
tcg_temp_free_i32(tmp);
9387
if (op < 3) /* store */
9521
if (op < 3) { /* store */
9388
9522
tmp = load_reg(s, rd);
9524
tmp = tcg_temp_new_i32();
9391
9528
case 0: /* str */
9392
gen_st32(tmp, addr, IS_USER(s));
9529
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
9394
9531
case 1: /* strh */
9395
gen_st16(tmp, addr, IS_USER(s));
9532
tcg_gen_qemu_st16(tmp, addr, IS_USER(s));
9397
9534
case 2: /* strb */
9398
gen_st8(tmp, addr, IS_USER(s));
9535
tcg_gen_qemu_st8(tmp, addr, IS_USER(s));
9400
9537
case 3: /* ldrsb */
9401
tmp = gen_ld8s(addr, IS_USER(s));
9538
tcg_gen_qemu_ld8s(tmp, addr, IS_USER(s));
9403
9540
case 4: /* ldr */
9404
tmp = gen_ld32(addr, IS_USER(s));
9541
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
9406
9543
case 5: /* ldrh */
9407
tmp = gen_ld16u(addr, IS_USER(s));
9544
tcg_gen_qemu_ld16u(tmp, addr, IS_USER(s));
9409
9546
case 6: /* ldrb */
9410
tmp = gen_ld8u(addr, IS_USER(s));
9547
tcg_gen_qemu_ld8u(tmp, addr, IS_USER(s));
9412
9549
case 7: /* ldrsh */
9413
tmp = gen_ld16s(addr, IS_USER(s));
9550
tcg_gen_qemu_ld16s(tmp, addr, IS_USER(s));
9416
if (op >= 3) /* load */
9553
if (op >= 3) { /* load */
9417
9554
store_reg(s, rd, tmp);
9556
tcg_temp_free_i32(tmp);
9418
9558
tcg_temp_free_i32(addr);
9429
9569
if (insn & (1 << 11)) {
9431
tmp = gen_ld32(addr, IS_USER(s));
9571
tmp = tcg_temp_new_i32();
9572
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
9432
9573
store_reg(s, rd, tmp);
9435
9576
tmp = load_reg(s, rd);
9436
gen_st32(tmp, addr, IS_USER(s));
9577
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
9578
tcg_temp_free_i32(tmp);
9438
9580
tcg_temp_free_i32(addr);
9449
9591
if (insn & (1 << 11)) {
9451
tmp = gen_ld8u(addr, IS_USER(s));
9593
tmp = tcg_temp_new_i32();
9594
tcg_gen_qemu_ld8u(tmp, addr, IS_USER(s));
9452
9595
store_reg(s, rd, tmp);
9455
9598
tmp = load_reg(s, rd);
9456
gen_st8(tmp, addr, IS_USER(s));
9599
tcg_gen_qemu_st8(tmp, addr, IS_USER(s));
9600
tcg_temp_free_i32(tmp);
9458
9602
tcg_temp_free_i32(addr);
9469
9613
if (insn & (1 << 11)) {
9471
tmp = gen_ld16u(addr, IS_USER(s));
9615
tmp = tcg_temp_new_i32();
9616
tcg_gen_qemu_ld16u(tmp, addr, IS_USER(s));
9472
9617
store_reg(s, rd, tmp);
9475
9620
tmp = load_reg(s, rd);
9476
gen_st16(tmp, addr, IS_USER(s));
9621
tcg_gen_qemu_st16(tmp, addr, IS_USER(s));
9622
tcg_temp_free_i32(tmp);
9478
9624
tcg_temp_free_i32(addr);
9488
9634
if (insn & (1 << 11)) {
9490
tmp = gen_ld32(addr, IS_USER(s));
9636
tmp = tcg_temp_new_i32();
9637
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
9491
9638
store_reg(s, rd, tmp);
9494
9641
tmp = load_reg(s, rd);
9495
gen_st32(tmp, addr, IS_USER(s));
9642
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
9643
tcg_temp_free_i32(tmp);
9497
9645
tcg_temp_free_i32(addr);
9558
9706
if (insn & (1 << i)) {
9559
9707
if (insn & (1 << 11)) {
9561
tmp = gen_ld32(addr, IS_USER(s));
9709
tmp = tcg_temp_new_i32();
9710
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
9562
9711
store_reg(s, i, tmp);
9565
9714
tmp = load_reg(s, i);
9566
gen_st32(tmp, addr, IS_USER(s));
9715
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
9716
tcg_temp_free_i32(tmp);
9568
9718
/* advance to the next address. */
9569
9719
tcg_gen_addi_i32(addr, addr, 4);
9722
TCGV_UNUSED_I32(tmp);
9573
9723
if (insn & (1 << 8)) {
9574
9724
if (insn & (1 << 11)) {
9576
tmp = gen_ld32(addr, IS_USER(s));
9726
tmp = tcg_temp_new_i32();
9727
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
9577
9728
/* don't set the pc until the rest of the instruction
9578
9729
has completed */
9581
9732
tmp = load_reg(s, 14);
9582
gen_st32(tmp, addr, IS_USER(s));
9733
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
9734
tcg_temp_free_i32(tmp);
9584
9736
tcg_gen_addi_i32(addr, addr, 4);
9696
9848
/* load/store multiple */
9698
TCGV_UNUSED(loaded_var);
9849
TCGv_i32 loaded_var;
9850
TCGV_UNUSED_I32(loaded_var);
9699
9851
rn = (insn >> 8) & 0x7;
9700
9852
addr = load_reg(s, rn);
9701
9853
for (i = 0; i < 8; i++) {
9702
9854
if (insn & (1 << i)) {
9703
9855
if (insn & (1 << 11)) {
9705
tmp = gen_ld32(addr, IS_USER(s));
9857
tmp = tcg_temp_new_i32();
9858
tcg_gen_qemu_ld32u(tmp, addr, IS_USER(s));
9707
9860
loaded_var = tmp;
9713
9866
tmp = load_reg(s, i);
9714
gen_st32(tmp, addr, IS_USER(s));
9867
tcg_gen_qemu_st32(tmp, addr, IS_USER(s));
9868
tcg_temp_free_i32(tmp);
9716
9870
/* advance to the next address */
9717
9871
tcg_gen_addi_i32(addr, addr, 4);
9783
9937
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
9784
9938
basic block 'tb'. If search_pc is TRUE, also generate PC
9785
9939
information for each intermediate instruction. */
9786
static inline void gen_intermediate_code_internal(CPUARMState *env,
9940
static inline void gen_intermediate_code_internal(ARMCPU *cpu,
9787
9941
TranslationBlock *tb,
9944
CPUState *cs = CPU(cpu);
9945
CPUARMState *env = &cpu->env;
9790
9946
DisasContext dc1, *dc = &dc1;
9791
9947
CPUBreakpoint *bp;
9792
9948
uint16_t *gen_opc_end;
9871
10027
complications trying to do it at the end of the block. */
9872
10028
if (dc->condexec_mask || dc->condexec_cond)
9874
TCGv tmp = tcg_temp_new_i32();
10030
TCGv_i32 tmp = tcg_temp_new_i32();
9875
10031
tcg_gen_movi_i32(tmp, 0);
9876
10032
store_cpu_field(tmp, condexec_bits);
9973
10128
/* At this stage dc->condjmp will only be set when the skipped
9974
10129
instruction was a conditional branch or trap, and the PC has
9975
10130
already been written. */
9976
if (unlikely(env->singlestep_enabled)) {
10131
if (unlikely(cs->singlestep_enabled)) {
9977
10132
/* Make sure the pc is updated, and raise a debug exception. */
9978
10133
if (dc->condjmp) {
9979
10134
gen_set_condexec(dc);
10068
10223
void gen_intermediate_code(CPUARMState *env, TranslationBlock *tb)
10070
gen_intermediate_code_internal(env, tb, 0);
10225
gen_intermediate_code_internal(arm_env_get_cpu(env), tb, false);
10073
10228
void gen_intermediate_code_pc(CPUARMState *env, TranslationBlock *tb)
10075
gen_intermediate_code_internal(env, tb, 1);
10230
gen_intermediate_code_internal(arm_env_get_cpu(env), tb, true);
10078
10233
static const char *cpu_mode_names[16] = {
10080
10235
"???", "???", "???", "und", "???", "???", "???", "sys"
10083
void cpu_dump_state(CPUARMState *env, FILE *f, fprintf_function cpu_fprintf,
10238
void arm_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf,
10241
ARMCPU *cpu = ARM_CPU(cs);
10242
CPUARMState *env = &cpu->env;
10087
10244
uint32_t psr;