113
113
#define IS_IMM (insn & (1<<13))
115
static void disas_sparc_insn(DisasContext * dc);
117
static GenOpFunc * const gen_op_movl_TN_reg[2][32] = {
188
static GenOpFunc * const gen_op_movl_reg_TN[3][32] = {
293
static GenOpFunc1 * const gen_op_movl_TN_im[3] = {
299
// Sign extending version
300
static GenOpFunc1 * const gen_op_movl_TN_sim[3] = {
306
#ifdef TARGET_SPARC64
307
#define GEN32(func, NAME) \
308
static GenOpFunc * const NAME ## _table [64] = { \
309
NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
310
NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
311
NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
312
NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
313
NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
314
NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
315
NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
316
NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
317
NAME ## 32, 0, NAME ## 34, 0, NAME ## 36, 0, NAME ## 38, 0, \
318
NAME ## 40, 0, NAME ## 42, 0, NAME ## 44, 0, NAME ## 46, 0, \
319
NAME ## 48, 0, NAME ## 50, 0, NAME ## 52, 0, NAME ## 54, 0, \
320
NAME ## 56, 0, NAME ## 58, 0, NAME ## 60, 0, NAME ## 62, 0, \
322
static inline void func(int n) \
324
NAME ## _table[n](); \
327
#define GEN32(func, NAME) \
328
static GenOpFunc *const NAME ## _table [32] = { \
329
NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
330
NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
331
NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
332
NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
333
NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
334
NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
335
NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
336
NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
338
static inline void func(int n) \
340
NAME ## _table[n](); \
344
115
/* floating point registers moves */
345
GEN32(gen_op_load_fpr_FT0, gen_op_load_fpr_FT0_fprf);
346
GEN32(gen_op_load_fpr_FT1, gen_op_load_fpr_FT1_fprf);
347
GEN32(gen_op_store_FT0_fpr, gen_op_store_FT0_fpr_fprf);
348
GEN32(gen_op_store_FT1_fpr, gen_op_store_FT1_fpr_fprf);
350
GEN32(gen_op_load_fpr_DT0, gen_op_load_fpr_DT0_fprf);
351
GEN32(gen_op_load_fpr_DT1, gen_op_load_fpr_DT1_fprf);
352
GEN32(gen_op_store_DT0_fpr, gen_op_store_DT0_fpr_fprf);
353
GEN32(gen_op_store_DT1_fpr, gen_op_store_DT1_fpr_fprf);
355
#if defined(CONFIG_USER_ONLY)
356
GEN32(gen_op_load_fpr_QT0, gen_op_load_fpr_QT0_fprf);
357
GEN32(gen_op_load_fpr_QT1, gen_op_load_fpr_QT1_fprf);
358
GEN32(gen_op_store_QT0_fpr, gen_op_store_QT0_fpr_fprf);
359
GEN32(gen_op_store_QT1_fpr, gen_op_store_QT1_fpr_fprf);
116
static void gen_op_load_fpr_DT0(unsigned int src)
118
tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
119
offsetof(CPU_DoubleU, l.upper));
120
tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
121
offsetof(CPU_DoubleU, l.lower));
124
static void gen_op_load_fpr_DT1(unsigned int src)
126
tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
127
offsetof(CPU_DoubleU, l.upper));
128
tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
129
offsetof(CPU_DoubleU, l.lower));
132
static void gen_op_store_DT0_fpr(unsigned int dst)
134
tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
135
offsetof(CPU_DoubleU, l.upper));
136
tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
137
offsetof(CPU_DoubleU, l.lower));
140
static void gen_op_load_fpr_QT0(unsigned int src)
142
tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
143
offsetof(CPU_QuadU, l.upmost));
144
tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
145
offsetof(CPU_QuadU, l.upper));
146
tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
147
offsetof(CPU_QuadU, l.lower));
148
tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
149
offsetof(CPU_QuadU, l.lowest));
152
static void gen_op_load_fpr_QT1(unsigned int src)
154
tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
155
offsetof(CPU_QuadU, l.upmost));
156
tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
157
offsetof(CPU_QuadU, l.upper));
158
tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
159
offsetof(CPU_QuadU, l.lower));
160
tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
161
offsetof(CPU_QuadU, l.lowest));
164
static void gen_op_store_QT0_fpr(unsigned int dst)
166
tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
167
offsetof(CPU_QuadU, l.upmost));
168
tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
169
offsetof(CPU_QuadU, l.upper));
170
tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
171
offsetof(CPU_QuadU, l.lower));
172
tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
173
offsetof(CPU_QuadU, l.lowest));
363
177
#ifdef CONFIG_USER_ONLY
365
179
#ifdef TARGET_SPARC64
366
180
#define hypervisor(dc) 0
368
#define gen_op_ldst(name) gen_op_##name##_raw()
370
183
#define supervisor(dc) (dc->mem_idx >= 1)
371
184
#ifdef TARGET_SPARC64
372
185
#define hypervisor(dc) (dc->mem_idx == 2)
373
#define OP_LD_TABLE(width) \
374
static GenOpFunc * const gen_op_##width[] = { \
375
&gen_op_##width##_user, \
376
&gen_op_##width##_kernel, \
377
&gen_op_##width##_hypv, \
380
#define OP_LD_TABLE(width) \
381
static GenOpFunc * const gen_op_##width[] = { \
382
&gen_op_##width##_user, \
383
&gen_op_##width##_kernel, \
386
#define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
389
#ifndef CONFIG_USER_ONLY
407
#ifdef TARGET_SPARC64
416
#ifdef TARGET_SPARC64
417
static inline void gen_ld_asi(int insn, int size, int sign)
422
offset = GET_FIELD(insn, 25, 31);
423
gen_op_ld_asi_reg(offset, size, sign);
425
asi = GET_FIELD(insn, 19, 26);
426
gen_op_ld_asi(asi, size, sign);
430
static inline void gen_st_asi(int insn, int size)
435
offset = GET_FIELD(insn, 25, 31);
436
gen_op_st_asi_reg(offset, size);
438
asi = GET_FIELD(insn, 19, 26);
439
gen_op_st_asi(asi, size);
443
static inline void gen_ldf_asi(int insn, int size, int rd)
448
offset = GET_FIELD(insn, 25, 31);
449
gen_op_ldf_asi_reg(offset, size, rd);
451
asi = GET_FIELD(insn, 19, 26);
452
gen_op_ldf_asi(asi, size, rd);
456
static inline void gen_stf_asi(int insn, int size, int rd)
461
offset = GET_FIELD(insn, 25, 31);
462
gen_op_stf_asi_reg(offset, size, rd);
464
asi = GET_FIELD(insn, 19, 26);
465
gen_op_stf_asi(asi, size, rd);
469
static inline void gen_swap_asi(int insn)
474
offset = GET_FIELD(insn, 25, 31);
475
gen_op_swap_asi_reg(offset);
477
asi = GET_FIELD(insn, 19, 26);
478
gen_op_swap_asi(asi);
482
static inline void gen_ldstub_asi(int insn)
487
offset = GET_FIELD(insn, 25, 31);
488
gen_op_ldstub_asi_reg(offset);
490
asi = GET_FIELD(insn, 19, 26);
491
gen_op_ldstub_asi(asi);
495
static inline void gen_ldda_asi(int insn)
500
offset = GET_FIELD(insn, 25, 31);
501
gen_op_ldda_asi_reg(offset);
503
asi = GET_FIELD(insn, 19, 26);
504
gen_op_ldda_asi(asi);
508
static inline void gen_stda_asi(int insn)
513
offset = GET_FIELD(insn, 25, 31);
514
gen_op_stda_asi_reg(offset);
516
asi = GET_FIELD(insn, 19, 26);
517
gen_op_stda_asi(asi);
521
static inline void gen_cas_asi(int insn)
526
offset = GET_FIELD(insn, 25, 31);
527
gen_op_cas_asi_reg(offset);
529
asi = GET_FIELD(insn, 19, 26);
534
static inline void gen_casx_asi(int insn)
539
offset = GET_FIELD(insn, 25, 31);
540
gen_op_casx_asi_reg(offset);
542
asi = GET_FIELD(insn, 19, 26);
543
gen_op_casx_asi(asi);
547
#elif !defined(CONFIG_USER_ONLY)
549
static inline void gen_ld_asi(int insn, int size, int sign)
553
asi = GET_FIELD(insn, 19, 26);
554
gen_op_ld_asi(asi, size, sign);
557
static inline void gen_st_asi(int insn, int size)
561
asi = GET_FIELD(insn, 19, 26);
562
gen_op_st_asi(asi, size);
565
static inline void gen_ldstub_asi(int insn)
569
asi = GET_FIELD(insn, 19, 26);
570
gen_op_ldstub_asi(asi);
573
static inline void gen_swap_asi(int insn)
577
asi = GET_FIELD(insn, 19, 26);
578
gen_op_swap_asi(asi);
581
static inline void gen_ldda_asi(int insn)
585
asi = GET_FIELD(insn, 19, 26);
586
gen_op_ld_asi(asi, 8, 0);
589
static inline void gen_stda_asi(int insn)
593
asi = GET_FIELD(insn, 19, 26);
594
gen_op_st_asi(asi, 8);
598
static inline void gen_movl_imm_TN(int reg, uint32_t imm)
600
gen_op_movl_TN_im[reg](imm);
603
static inline void gen_movl_imm_T1(uint32_t val)
605
gen_movl_imm_TN(1, val);
608
static inline void gen_movl_imm_T0(uint32_t val)
610
gen_movl_imm_TN(0, val);
613
static inline void gen_movl_simm_TN(int reg, int32_t imm)
615
gen_op_movl_TN_sim[reg](imm);
618
static inline void gen_movl_simm_T1(int32_t val)
620
gen_movl_simm_TN(1, val);
623
static inline void gen_movl_simm_T0(int32_t val)
625
gen_movl_simm_TN(0, val);
628
static inline void gen_movl_reg_TN(int reg, int t)
631
gen_op_movl_reg_TN[t][reg] ();
633
gen_movl_imm_TN(t, 0);
636
static inline void gen_movl_reg_T0(int reg)
638
gen_movl_reg_TN(reg, 0);
641
static inline void gen_movl_reg_T1(int reg)
643
gen_movl_reg_TN(reg, 1);
646
static inline void gen_movl_reg_T2(int reg)
648
gen_movl_reg_TN(reg, 2);
651
static inline void gen_movl_TN_reg(int reg, int t)
654
gen_op_movl_TN_reg[t][reg] ();
657
static inline void gen_movl_T0_reg(int reg)
659
gen_movl_TN_reg(reg, 0);
662
static inline void gen_movl_T1_reg(int reg)
664
gen_movl_TN_reg(reg, 1);
667
static inline void gen_jmp_im(target_ulong pc)
669
#ifdef TARGET_SPARC64
670
if (pc == (uint32_t)pc) {
673
gen_op_jmp_im64(pc >> 32, pc);
680
static inline void gen_movl_npc_im(target_ulong npc)
682
#ifdef TARGET_SPARC64
683
if (npc == (uint32_t)npc) {
684
gen_op_movl_npc_im(npc);
686
gen_op_movq_npc_im64(npc >> 32, npc);
689
gen_op_movl_npc_im(npc);
190
#ifdef TARGET_SPARC64
192
#define AM_CHECK(dc) ((dc)->address_mask_32bit)
194
#define AM_CHECK(dc) (1)
198
static inline void gen_address_mask(DisasContext *dc, TCGv addr)
200
#ifdef TARGET_SPARC64
202
tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
206
static inline void gen_movl_reg_TN(int reg, TCGv tn)
209
tcg_gen_movi_tl(tn, 0);
211
tcg_gen_mov_tl(tn, cpu_gregs[reg]);
213
tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
217
static inline void gen_movl_TN_reg(int reg, TCGv tn)
222
tcg_gen_mov_tl(cpu_gregs[reg], tn);
224
tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
693
228
static inline void gen_goto_tb(DisasContext *s, int tb_num,
699
234
if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
700
235
(npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
701
236
/* jump to same page: we can use a direct jump */
703
gen_op_goto_tb0(TBPARAM(tb));
705
gen_op_goto_tb1(TBPARAM(tb));
707
gen_movl_npc_im(npc);
708
gen_op_movl_T0_im((long)tb + tb_num);
237
tcg_gen_goto_tb(tb_num);
238
tcg_gen_movi_tl(cpu_pc, pc);
239
tcg_gen_movi_tl(cpu_npc, npc);
240
tcg_gen_exit_tb((long)tb + tb_num);
711
242
/* jump to another page: currently not optimized */
713
gen_movl_npc_im(npc);
243
tcg_gen_movi_tl(cpu_pc, pc);
244
tcg_gen_movi_tl(cpu_npc, npc);
250
static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
252
tcg_gen_extu_i32_tl(reg, src);
253
tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
254
tcg_gen_andi_tl(reg, reg, 0x1);
257
static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
259
tcg_gen_extu_i32_tl(reg, src);
260
tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
261
tcg_gen_andi_tl(reg, reg, 0x1);
264
static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
266
tcg_gen_extu_i32_tl(reg, src);
267
tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
268
tcg_gen_andi_tl(reg, reg, 0x1);
271
static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
273
tcg_gen_extu_i32_tl(reg, src);
274
tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
275
tcg_gen_andi_tl(reg, reg, 0x1);
278
static inline void gen_cc_clear_icc(void)
280
tcg_gen_movi_i32(cpu_psr, 0);
283
#ifdef TARGET_SPARC64
284
static inline void gen_cc_clear_xcc(void)
286
tcg_gen_movi_i32(cpu_xcc, 0);
292
env->psr |= PSR_ZERO;
293
if ((int32_t) T0 < 0)
296
static inline void gen_cc_NZ_icc(TCGv dst)
301
l1 = gen_new_label();
302
l2 = gen_new_label();
303
r_temp = tcg_temp_new();
304
tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
305
tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
306
tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
308
tcg_gen_ext32s_tl(r_temp, dst);
309
tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
310
tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
312
tcg_temp_free(r_temp);
315
#ifdef TARGET_SPARC64
316
static inline void gen_cc_NZ_xcc(TCGv dst)
320
l1 = gen_new_label();
321
l2 = gen_new_label();
322
tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
323
tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
325
tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
326
tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
333
env->psr |= PSR_CARRY;
335
static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
337
TCGv r_temp1, r_temp2;
340
l1 = gen_new_label();
341
r_temp1 = tcg_temp_new();
342
r_temp2 = tcg_temp_new();
343
tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
344
tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
345
tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
346
tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
348
tcg_temp_free(r_temp1);
349
tcg_temp_free(r_temp2);
352
#ifdef TARGET_SPARC64
353
static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
357
l1 = gen_new_label();
358
tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
359
tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
365
if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
368
static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
372
r_temp = tcg_temp_new();
373
tcg_gen_xor_tl(r_temp, src1, src2);
374
tcg_gen_not_tl(r_temp, r_temp);
375
tcg_gen_xor_tl(cpu_tmp0, src1, dst);
376
tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
377
tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
378
tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
379
tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
380
tcg_temp_free(r_temp);
381
tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
384
#ifdef TARGET_SPARC64
385
static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
389
r_temp = tcg_temp_new();
390
tcg_gen_xor_tl(r_temp, src1, src2);
391
tcg_gen_not_tl(r_temp, r_temp);
392
tcg_gen_xor_tl(cpu_tmp0, src1, dst);
393
tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
394
tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
395
tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
396
tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
397
tcg_temp_free(r_temp);
398
tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
402
static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
408
l1 = gen_new_label();
410
r_temp = tcg_temp_new();
411
tcg_gen_xor_tl(r_temp, src1, src2);
412
tcg_gen_not_tl(r_temp, r_temp);
413
tcg_gen_xor_tl(cpu_tmp0, src1, dst);
414
tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
415
tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
416
tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
417
r_const = tcg_const_i32(TT_TOVF);
418
gen_helper_raise_exception(r_const);
419
tcg_temp_free_i32(r_const);
421
tcg_temp_free(r_temp);
424
static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
428
l1 = gen_new_label();
429
tcg_gen_or_tl(cpu_tmp0, src1, src2);
430
tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
431
tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
432
tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
436
static inline void gen_tag_tv(TCGv src1, TCGv src2)
441
l1 = gen_new_label();
442
tcg_gen_or_tl(cpu_tmp0, src1, src2);
443
tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
444
tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
445
r_const = tcg_const_i32(TT_TOVF);
446
gen_helper_raise_exception(r_const);
447
tcg_temp_free_i32(r_const);
451
static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
453
tcg_gen_mov_tl(cpu_cc_src, src1);
454
tcg_gen_mov_tl(cpu_cc_src2, src2);
455
tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
457
gen_cc_NZ_icc(cpu_cc_dst);
458
gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
459
gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
460
#ifdef TARGET_SPARC64
462
gen_cc_NZ_xcc(cpu_cc_dst);
463
gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
464
gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
466
tcg_gen_mov_tl(dst, cpu_cc_dst);
469
static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
471
tcg_gen_mov_tl(cpu_cc_src, src1);
472
tcg_gen_mov_tl(cpu_cc_src2, src2);
473
gen_mov_reg_C(cpu_tmp0, cpu_psr);
474
tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
476
gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
477
#ifdef TARGET_SPARC64
479
gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
481
tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
482
gen_cc_NZ_icc(cpu_cc_dst);
483
gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
484
gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
485
#ifdef TARGET_SPARC64
486
gen_cc_NZ_xcc(cpu_cc_dst);
487
gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
488
gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
490
tcg_gen_mov_tl(dst, cpu_cc_dst);
493
static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
495
tcg_gen_mov_tl(cpu_cc_src, src1);
496
tcg_gen_mov_tl(cpu_cc_src2, src2);
497
tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
499
gen_cc_NZ_icc(cpu_cc_dst);
500
gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
501
gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
502
gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
503
#ifdef TARGET_SPARC64
505
gen_cc_NZ_xcc(cpu_cc_dst);
506
gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
507
gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
509
tcg_gen_mov_tl(dst, cpu_cc_dst);
512
static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
514
tcg_gen_mov_tl(cpu_cc_src, src1);
515
tcg_gen_mov_tl(cpu_cc_src2, src2);
516
gen_tag_tv(cpu_cc_src, cpu_cc_src2);
517
tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
518
gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
520
gen_cc_NZ_icc(cpu_cc_dst);
521
gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
522
#ifdef TARGET_SPARC64
524
gen_cc_NZ_xcc(cpu_cc_dst);
525
gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
526
gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
528
tcg_gen_mov_tl(dst, cpu_cc_dst);
533
env->psr |= PSR_CARRY;
535
static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
537
TCGv r_temp1, r_temp2;
540
l1 = gen_new_label();
541
r_temp1 = tcg_temp_new();
542
r_temp2 = tcg_temp_new();
543
tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
544
tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
545
tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
546
tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
548
tcg_temp_free(r_temp1);
549
tcg_temp_free(r_temp2);
552
#ifdef TARGET_SPARC64
553
static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
557
l1 = gen_new_label();
558
tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
559
tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
565
if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
568
static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
572
r_temp = tcg_temp_new();
573
tcg_gen_xor_tl(r_temp, src1, src2);
574
tcg_gen_xor_tl(cpu_tmp0, src1, dst);
575
tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
576
tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
577
tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
578
tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
579
tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
580
tcg_temp_free(r_temp);
583
#ifdef TARGET_SPARC64
584
static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
588
r_temp = tcg_temp_new();
589
tcg_gen_xor_tl(r_temp, src1, src2);
590
tcg_gen_xor_tl(cpu_tmp0, src1, dst);
591
tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
592
tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
593
tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
594
tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
595
tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
596
tcg_temp_free(r_temp);
600
static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
606
l1 = gen_new_label();
608
r_temp = tcg_temp_new();
609
tcg_gen_xor_tl(r_temp, src1, src2);
610
tcg_gen_xor_tl(cpu_tmp0, src1, dst);
611
tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
612
tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
613
tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
614
r_const = tcg_const_i32(TT_TOVF);
615
gen_helper_raise_exception(r_const);
616
tcg_temp_free_i32(r_const);
618
tcg_temp_free(r_temp);
621
static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
623
tcg_gen_mov_tl(cpu_cc_src, src1);
624
tcg_gen_mov_tl(cpu_cc_src2, src2);
625
tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
627
gen_cc_NZ_icc(cpu_cc_dst);
628
gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
629
gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
630
#ifdef TARGET_SPARC64
632
gen_cc_NZ_xcc(cpu_cc_dst);
633
gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
634
gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
636
tcg_gen_mov_tl(dst, cpu_cc_dst);
639
static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
641
tcg_gen_mov_tl(cpu_cc_src, src1);
642
tcg_gen_mov_tl(cpu_cc_src2, src2);
643
gen_mov_reg_C(cpu_tmp0, cpu_psr);
644
tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
646
gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
647
#ifdef TARGET_SPARC64
649
gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
651
tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
652
gen_cc_NZ_icc(cpu_cc_dst);
653
gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
654
gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
655
#ifdef TARGET_SPARC64
656
gen_cc_NZ_xcc(cpu_cc_dst);
657
gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
658
gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
660
tcg_gen_mov_tl(dst, cpu_cc_dst);
663
static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
665
tcg_gen_mov_tl(cpu_cc_src, src1);
666
tcg_gen_mov_tl(cpu_cc_src2, src2);
667
tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
669
gen_cc_NZ_icc(cpu_cc_dst);
670
gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
671
gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
672
gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
673
#ifdef TARGET_SPARC64
675
gen_cc_NZ_xcc(cpu_cc_dst);
676
gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
677
gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
679
tcg_gen_mov_tl(dst, cpu_cc_dst);
682
static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
684
tcg_gen_mov_tl(cpu_cc_src, src1);
685
tcg_gen_mov_tl(cpu_cc_src2, src2);
686
gen_tag_tv(cpu_cc_src, cpu_cc_src2);
687
tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
688
gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
690
gen_cc_NZ_icc(cpu_cc_dst);
691
gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
692
#ifdef TARGET_SPARC64
694
gen_cc_NZ_xcc(cpu_cc_dst);
695
gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
696
gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
698
tcg_gen_mov_tl(dst, cpu_cc_dst);
701
static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
706
l1 = gen_new_label();
707
r_temp = tcg_temp_new();
713
tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
714
tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
715
tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
716
tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
717
tcg_gen_movi_tl(cpu_cc_src2, 0);
721
// env->y = (b2 << 31) | (env->y >> 1);
722
tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
723
tcg_gen_shli_tl(r_temp, r_temp, 31);
724
tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
725
tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
726
tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
727
tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
730
gen_mov_reg_N(cpu_tmp0, cpu_psr);
731
gen_mov_reg_V(r_temp, cpu_psr);
732
tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
733
tcg_temp_free(r_temp);
735
// T0 = (b1 << 31) | (T0 >> 1);
737
tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
738
tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
739
tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
741
/* do addition and update flags */
742
tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
745
gen_cc_NZ_icc(cpu_cc_dst);
746
gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
747
gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
748
tcg_gen_mov_tl(dst, cpu_cc_dst);
751
static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
753
TCGv_i64 r_temp, r_temp2;
755
r_temp = tcg_temp_new_i64();
756
r_temp2 = tcg_temp_new_i64();
758
tcg_gen_extu_tl_i64(r_temp, src2);
759
tcg_gen_extu_tl_i64(r_temp2, src1);
760
tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
762
tcg_gen_shri_i64(r_temp, r_temp2, 32);
763
tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
764
tcg_temp_free_i64(r_temp);
765
tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
766
#ifdef TARGET_SPARC64
767
tcg_gen_mov_i64(dst, r_temp2);
769
tcg_gen_trunc_i64_tl(dst, r_temp2);
771
tcg_temp_free_i64(r_temp2);
774
static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
776
TCGv_i64 r_temp, r_temp2;
778
r_temp = tcg_temp_new_i64();
779
r_temp2 = tcg_temp_new_i64();
781
tcg_gen_ext_tl_i64(r_temp, src2);
782
tcg_gen_ext_tl_i64(r_temp2, src1);
783
tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
785
tcg_gen_shri_i64(r_temp, r_temp2, 32);
786
tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
787
tcg_temp_free_i64(r_temp);
788
tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
789
#ifdef TARGET_SPARC64
790
tcg_gen_mov_i64(dst, r_temp2);
792
tcg_gen_trunc_i64_tl(dst, r_temp2);
794
tcg_temp_free_i64(r_temp2);
797
#ifdef TARGET_SPARC64
798
static inline void gen_trap_ifdivzero_tl(TCGv divisor)
803
l1 = gen_new_label();
804
tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
805
r_const = tcg_const_i32(TT_DIV_ZERO);
806
gen_helper_raise_exception(r_const);
807
tcg_temp_free_i32(r_const);
811
static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
815
l1 = gen_new_label();
816
l2 = gen_new_label();
817
tcg_gen_mov_tl(cpu_cc_src, src1);
818
tcg_gen_mov_tl(cpu_cc_src2, src2);
819
gen_trap_ifdivzero_tl(cpu_cc_src2);
820
tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
821
tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
822
tcg_gen_movi_i64(dst, INT64_MIN);
825
tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
830
static inline void gen_op_div_cc(TCGv dst)
834
tcg_gen_mov_tl(cpu_cc_dst, dst);
836
gen_cc_NZ_icc(cpu_cc_dst);
837
l1 = gen_new_label();
838
tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
839
tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
843
static inline void gen_op_logic_cc(TCGv dst)
845
tcg_gen_mov_tl(cpu_cc_dst, dst);
848
gen_cc_NZ_icc(cpu_cc_dst);
849
#ifdef TARGET_SPARC64
851
gen_cc_NZ_xcc(cpu_cc_dst);
856
static inline void gen_op_eval_ba(TCGv dst)
858
tcg_gen_movi_tl(dst, 1);
862
static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
864
gen_mov_reg_Z(dst, src);
868
static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
870
gen_mov_reg_N(cpu_tmp0, src);
871
gen_mov_reg_V(dst, src);
872
tcg_gen_xor_tl(dst, dst, cpu_tmp0);
873
gen_mov_reg_Z(cpu_tmp0, src);
874
tcg_gen_or_tl(dst, dst, cpu_tmp0);
878
static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
880
gen_mov_reg_V(cpu_tmp0, src);
881
gen_mov_reg_N(dst, src);
882
tcg_gen_xor_tl(dst, dst, cpu_tmp0);
886
static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
888
gen_mov_reg_Z(cpu_tmp0, src);
889
gen_mov_reg_C(dst, src);
890
tcg_gen_or_tl(dst, dst, cpu_tmp0);
894
static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
896
gen_mov_reg_C(dst, src);
900
static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
902
gen_mov_reg_V(dst, src);
906
static inline void gen_op_eval_bn(TCGv dst)
908
tcg_gen_movi_tl(dst, 0);
912
static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
914
gen_mov_reg_N(dst, src);
918
static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
920
gen_mov_reg_Z(dst, src);
921
tcg_gen_xori_tl(dst, dst, 0x1);
925
static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
927
gen_mov_reg_N(cpu_tmp0, src);
928
gen_mov_reg_V(dst, src);
929
tcg_gen_xor_tl(dst, dst, cpu_tmp0);
930
gen_mov_reg_Z(cpu_tmp0, src);
931
tcg_gen_or_tl(dst, dst, cpu_tmp0);
932
tcg_gen_xori_tl(dst, dst, 0x1);
936
static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
938
gen_mov_reg_V(cpu_tmp0, src);
939
gen_mov_reg_N(dst, src);
940
tcg_gen_xor_tl(dst, dst, cpu_tmp0);
941
tcg_gen_xori_tl(dst, dst, 0x1);
945
static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
947
gen_mov_reg_Z(cpu_tmp0, src);
948
gen_mov_reg_C(dst, src);
949
tcg_gen_or_tl(dst, dst, cpu_tmp0);
950
tcg_gen_xori_tl(dst, dst, 0x1);
954
static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
956
gen_mov_reg_C(dst, src);
957
tcg_gen_xori_tl(dst, dst, 0x1);
961
static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
963
gen_mov_reg_N(dst, src);
964
tcg_gen_xori_tl(dst, dst, 0x1);
968
static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
970
gen_mov_reg_V(dst, src);
971
tcg_gen_xori_tl(dst, dst, 0x1);
975
FPSR bit field FCC1 | FCC0:
981
static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
982
unsigned int fcc_offset)
984
tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
985
tcg_gen_andi_tl(reg, reg, 0x1);
988
static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
989
unsigned int fcc_offset)
991
tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
992
tcg_gen_andi_tl(reg, reg, 0x1);
996
static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
997
unsigned int fcc_offset)
999
gen_mov_reg_FCC0(dst, src, fcc_offset);
1000
gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1001
tcg_gen_or_tl(dst, dst, cpu_tmp0);
1004
// 1 or 2: FCC0 ^ FCC1
1005
static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1006
unsigned int fcc_offset)
1008
gen_mov_reg_FCC0(dst, src, fcc_offset);
1009
gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1010
tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1014
static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1015
unsigned int fcc_offset)
1017
gen_mov_reg_FCC0(dst, src, fcc_offset);
1021
static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1022
unsigned int fcc_offset)
1024
gen_mov_reg_FCC0(dst, src, fcc_offset);
1025
gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1026
tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1027
tcg_gen_and_tl(dst, dst, cpu_tmp0);
1031
static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1032
unsigned int fcc_offset)
1034
gen_mov_reg_FCC1(dst, src, fcc_offset);
1038
static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1039
unsigned int fcc_offset)
1041
gen_mov_reg_FCC0(dst, src, fcc_offset);
1042
tcg_gen_xori_tl(dst, dst, 0x1);
1043
gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1044
tcg_gen_and_tl(dst, dst, cpu_tmp0);
1048
static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1049
unsigned int fcc_offset)
1051
gen_mov_reg_FCC0(dst, src, fcc_offset);
1052
gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1053
tcg_gen_and_tl(dst, dst, cpu_tmp0);
1056
// 0: !(FCC0 | FCC1)
1057
static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1058
unsigned int fcc_offset)
1060
gen_mov_reg_FCC0(dst, src, fcc_offset);
1061
gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1062
tcg_gen_or_tl(dst, dst, cpu_tmp0);
1063
tcg_gen_xori_tl(dst, dst, 0x1);
1066
// 0 or 3: !(FCC0 ^ FCC1)
1067
static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1068
unsigned int fcc_offset)
1070
gen_mov_reg_FCC0(dst, src, fcc_offset);
1071
gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1072
tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1073
tcg_gen_xori_tl(dst, dst, 0x1);
1077
static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1078
unsigned int fcc_offset)
1080
gen_mov_reg_FCC0(dst, src, fcc_offset);
1081
tcg_gen_xori_tl(dst, dst, 0x1);
1084
// !1: !(FCC0 & !FCC1)
1085
static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1086
unsigned int fcc_offset)
1088
gen_mov_reg_FCC0(dst, src, fcc_offset);
1089
gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1090
tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1091
tcg_gen_and_tl(dst, dst, cpu_tmp0);
1092
tcg_gen_xori_tl(dst, dst, 0x1);
1096
static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1097
unsigned int fcc_offset)
1099
gen_mov_reg_FCC1(dst, src, fcc_offset);
1100
tcg_gen_xori_tl(dst, dst, 0x1);
1103
// !2: !(!FCC0 & FCC1)
1104
static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1105
unsigned int fcc_offset)
1107
gen_mov_reg_FCC0(dst, src, fcc_offset);
1108
tcg_gen_xori_tl(dst, dst, 0x1);
1109
gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1110
tcg_gen_and_tl(dst, dst, cpu_tmp0);
1111
tcg_gen_xori_tl(dst, dst, 0x1);
1114
// !3: !(FCC0 & FCC1)
1115
static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1116
unsigned int fcc_offset)
1118
gen_mov_reg_FCC0(dst, src, fcc_offset);
1119
gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1120
tcg_gen_and_tl(dst, dst, cpu_tmp0);
1121
tcg_gen_xori_tl(dst, dst, 0x1);
719
1124
static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1125
target_ulong pc2, TCGv r_cond)
724
1129
l1 = gen_new_label();
726
gen_op_jz_T2_label(l1);
1131
tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
728
1133
gen_goto_tb(dc, 0, pc1, pc1 + 4);
1649
static inline void gen_op_clear_ieee_excp_and_FTT(void)
1651
tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1654
static inline void gen_clear_float_exceptions(void)
1656
gen_helper_clear_float_exceptions();
1660
#ifdef TARGET_SPARC64
1661
static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1667
r_asi = tcg_temp_new_i32();
1668
tcg_gen_mov_i32(r_asi, cpu_asi);
1670
asi = GET_FIELD(insn, 19, 26);
1671
r_asi = tcg_const_i32(asi);
1676
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1679
TCGv_i32 r_asi, r_size, r_sign;
1681
r_asi = gen_get_asi(insn, addr);
1682
r_size = tcg_const_i32(size);
1683
r_sign = tcg_const_i32(sign);
1684
gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1685
tcg_temp_free_i32(r_sign);
1686
tcg_temp_free_i32(r_size);
1687
tcg_temp_free_i32(r_asi);
1690
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1692
TCGv_i32 r_asi, r_size;
1694
r_asi = gen_get_asi(insn, addr);
1695
r_size = tcg_const_i32(size);
1696
gen_helper_st_asi(addr, src, r_asi, r_size);
1697
tcg_temp_free_i32(r_size);
1698
tcg_temp_free_i32(r_asi);
1701
static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1703
TCGv_i32 r_asi, r_size, r_rd;
1705
r_asi = gen_get_asi(insn, addr);
1706
r_size = tcg_const_i32(size);
1707
r_rd = tcg_const_i32(rd);
1708
gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1709
tcg_temp_free_i32(r_rd);
1710
tcg_temp_free_i32(r_size);
1711
tcg_temp_free_i32(r_asi);
1714
static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1716
TCGv_i32 r_asi, r_size, r_rd;
1718
r_asi = gen_get_asi(insn, addr);
1719
r_size = tcg_const_i32(size);
1720
r_rd = tcg_const_i32(rd);
1721
gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1722
tcg_temp_free_i32(r_rd);
1723
tcg_temp_free_i32(r_size);
1724
tcg_temp_free_i32(r_asi);
1727
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1729
TCGv_i32 r_asi, r_size, r_sign;
1731
r_asi = gen_get_asi(insn, addr);
1732
r_size = tcg_const_i32(4);
1733
r_sign = tcg_const_i32(0);
1734
gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1735
tcg_temp_free_i32(r_sign);
1736
gen_helper_st_asi(addr, dst, r_asi, r_size);
1737
tcg_temp_free_i32(r_size);
1738
tcg_temp_free_i32(r_asi);
1739
tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1742
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1744
TCGv_i32 r_asi, r_rd;
1746
r_asi = gen_get_asi(insn, addr);
1747
r_rd = tcg_const_i32(rd);
1748
gen_helper_ldda_asi(addr, r_asi, r_rd);
1749
tcg_temp_free_i32(r_rd);
1750
tcg_temp_free_i32(r_asi);
1753
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1755
TCGv_i32 r_asi, r_size;
1757
gen_movl_reg_TN(rd + 1, cpu_tmp0);
1758
tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1759
r_asi = gen_get_asi(insn, addr);
1760
r_size = tcg_const_i32(8);
1761
gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1762
tcg_temp_free_i32(r_size);
1763
tcg_temp_free_i32(r_asi);
1766
static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1772
r_val1 = tcg_temp_new();
1773
gen_movl_reg_TN(rd, r_val1);
1774
r_asi = gen_get_asi(insn, addr);
1775
gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1776
tcg_temp_free_i32(r_asi);
1777
tcg_temp_free(r_val1);
1780
static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1785
gen_movl_reg_TN(rd, cpu_tmp64);
1786
r_asi = gen_get_asi(insn, addr);
1787
gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1788
tcg_temp_free_i32(r_asi);
1791
#elif !defined(CONFIG_USER_ONLY)
1793
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1796
TCGv_i32 r_asi, r_size, r_sign;
1798
r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1799
r_size = tcg_const_i32(size);
1800
r_sign = tcg_const_i32(sign);
1801
gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1802
tcg_temp_free(r_sign);
1803
tcg_temp_free(r_size);
1804
tcg_temp_free(r_asi);
1805
tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1808
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1810
TCGv_i32 r_asi, r_size;
1812
tcg_gen_extu_tl_i64(cpu_tmp64, src);
1813
r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1814
r_size = tcg_const_i32(size);
1815
gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1816
tcg_temp_free(r_size);
1817
tcg_temp_free(r_asi);
1820
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1822
TCGv_i32 r_asi, r_size, r_sign;
1825
r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1826
r_size = tcg_const_i32(4);
1827
r_sign = tcg_const_i32(0);
1828
gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1829
tcg_temp_free(r_sign);
1830
r_val = tcg_temp_new_i64();
1831
tcg_gen_extu_tl_i64(r_val, dst);
1832
gen_helper_st_asi(addr, r_val, r_asi, r_size);
1833
tcg_temp_free_i64(r_val);
1834
tcg_temp_free(r_size);
1835
tcg_temp_free(r_asi);
1836
tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1839
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1841
TCGv_i32 r_asi, r_size, r_sign;
1843
r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1844
r_size = tcg_const_i32(8);
1845
r_sign = tcg_const_i32(0);
1846
gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1847
tcg_temp_free(r_sign);
1848
tcg_temp_free(r_size);
1849
tcg_temp_free(r_asi);
1850
tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1851
gen_movl_TN_reg(rd + 1, cpu_tmp0);
1852
tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1853
tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1854
gen_movl_TN_reg(rd, hi);
1857
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1859
TCGv_i32 r_asi, r_size;
1861
gen_movl_reg_TN(rd + 1, cpu_tmp0);
1862
tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1863
r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1864
r_size = tcg_const_i32(8);
1865
gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1866
tcg_temp_free(r_size);
1867
tcg_temp_free(r_asi);
1871
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1872
static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1875
TCGv_i32 r_asi, r_size;
1877
gen_ld_asi(dst, addr, insn, 1, 0);
1879
r_val = tcg_const_i64(0xffULL);
1880
r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1881
r_size = tcg_const_i32(1);
1882
gen_helper_st_asi(addr, r_val, r_asi, r_size);
1883
tcg_temp_free_i32(r_size);
1884
tcg_temp_free_i32(r_asi);
1885
tcg_temp_free_i64(r_val);
1889
static inline TCGv get_src1(unsigned int insn, TCGv def)
1894
rs1 = GET_FIELD(insn, 13, 17);
1896
r_rs1 = tcg_const_tl(0); // XXX how to free?
1898
r_rs1 = cpu_gregs[rs1];
1900
tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1904
static inline TCGv get_src2(unsigned int insn, TCGv def)
1909
if (IS_IMM) { /* immediate */
1910
rs2 = GET_FIELDs(insn, 19, 31);
1911
r_rs2 = tcg_const_tl((int)rs2); // XXX how to free?
1912
} else { /* register */
1913
rs2 = GET_FIELD(insn, 27, 31);
1915
r_rs2 = tcg_const_tl(0); // XXX how to free?
1917
r_rs2 = cpu_gregs[rs2];
1919
tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1924
#define CHECK_IU_FEATURE(dc, FEATURE) \
1925
if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1927
#define CHECK_FPU_FEATURE(dc, FEATURE) \
1928
if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1115
1931
/* before an instruction, dc->pc must be static */
1116
1932
static void disas_sparc_insn(DisasContext * dc)
1118
1934
unsigned int insn, opc, rs1, rs2, rd;
1936
if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1937
tcg_gen_debug_insn_start(dc->pc);
1120
1938
insn = ldl_code(dc->pc);
1121
1939
opc = GET_FIELD(insn, 0, 1);
1123
1941
rd = GET_FIELD(insn, 2, 6);
1943
cpu_src1 = tcg_temp_new(); // const
1944
cpu_src2 = tcg_temp_new(); // const
1125
1947
case 0: /* branches/sethi */
1483
2388
xop = GET_FIELD(insn, 18, 26);
1485
2390
case 0x1: /* fmovs */
1486
gen_op_load_fpr_FT0(rs2);
1487
gen_op_store_FT0_fpr(rd);
2391
tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
1489
2393
case 0x5: /* fnegs */
1490
gen_op_load_fpr_FT1(rs2);
1492
gen_op_store_FT0_fpr(rd);
2394
gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
1494
2396
case 0x9: /* fabss */
1495
gen_op_load_fpr_FT1(rs2);
1497
gen_op_store_FT0_fpr(rd);
2397
gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
1499
2399
case 0x29: /* fsqrts */
1500
gen_op_load_fpr_FT1(rs2);
1502
gen_op_store_FT0_fpr(rd);
2400
CHECK_FPU_FEATURE(dc, FSQRT);
2401
gen_clear_float_exceptions();
2402
gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2403
gen_helper_check_ieee_exceptions();
2404
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
1504
2406
case 0x2a: /* fsqrtd */
2407
CHECK_FPU_FEATURE(dc, FSQRT);
1505
2408
gen_op_load_fpr_DT1(DFPREG(rs2));
2409
gen_clear_float_exceptions();
2410
gen_helper_fsqrtd();
2411
gen_helper_check_ieee_exceptions();
1507
2412
gen_op_store_DT0_fpr(DFPREG(rd));
1509
2414
case 0x2b: /* fsqrtq */
1510
#if defined(CONFIG_USER_ONLY)
2415
CHECK_FPU_FEATURE(dc, FLOAT128);
1511
2416
gen_op_load_fpr_QT1(QFPREG(rs2));
2417
gen_clear_float_exceptions();
2418
gen_helper_fsqrtq();
2419
gen_helper_check_ieee_exceptions();
1513
2420
gen_op_store_QT0_fpr(QFPREG(rd));
1519
gen_op_load_fpr_FT0(rs1);
1520
gen_op_load_fpr_FT1(rs2);
1522
gen_op_store_FT0_fpr(rd);
2422
case 0x41: /* fadds */
2423
gen_clear_float_exceptions();
2424
gen_helper_fadds(cpu_tmp32,
2425
cpu_fpr[rs1], cpu_fpr[rs2]);
2426
gen_helper_check_ieee_exceptions();
2427
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
1525
2430
gen_op_load_fpr_DT0(DFPREG(rs1));
1526
2431
gen_op_load_fpr_DT1(DFPREG(rs2));
2432
gen_clear_float_exceptions();
2434
gen_helper_check_ieee_exceptions();
1528
2435
gen_op_store_DT0_fpr(DFPREG(rd));
1530
2437
case 0x43: /* faddq */
1531
#if defined(CONFIG_USER_ONLY)
2438
CHECK_FPU_FEATURE(dc, FLOAT128);
1532
2439
gen_op_load_fpr_QT0(QFPREG(rs1));
1533
2440
gen_op_load_fpr_QT1(QFPREG(rs2));
2441
gen_clear_float_exceptions();
2443
gen_helper_check_ieee_exceptions();
1535
2444
gen_op_store_QT0_fpr(QFPREG(rd));
1541
gen_op_load_fpr_FT0(rs1);
1542
gen_op_load_fpr_FT1(rs2);
1544
gen_op_store_FT0_fpr(rd);
2446
case 0x45: /* fsubs */
2447
gen_clear_float_exceptions();
2448
gen_helper_fsubs(cpu_tmp32,
2449
cpu_fpr[rs1], cpu_fpr[rs2]);
2450
gen_helper_check_ieee_exceptions();
2451
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
1547
2454
gen_op_load_fpr_DT0(DFPREG(rs1));
1548
2455
gen_op_load_fpr_DT1(DFPREG(rs2));
2456
gen_clear_float_exceptions();
2458
gen_helper_check_ieee_exceptions();
1550
2459
gen_op_store_DT0_fpr(DFPREG(rd));
1552
2461
case 0x47: /* fsubq */
1553
#if defined(CONFIG_USER_ONLY)
2462
CHECK_FPU_FEATURE(dc, FLOAT128);
1554
2463
gen_op_load_fpr_QT0(QFPREG(rs1));
1555
2464
gen_op_load_fpr_QT1(QFPREG(rs2));
2465
gen_clear_float_exceptions();
2467
gen_helper_check_ieee_exceptions();
1557
2468
gen_op_store_QT0_fpr(QFPREG(rd));
1563
gen_op_load_fpr_FT0(rs1);
1564
gen_op_load_fpr_FT1(rs2);
1566
gen_op_store_FT0_fpr(rd);
2470
case 0x49: /* fmuls */
2471
CHECK_FPU_FEATURE(dc, FMUL);
2472
gen_clear_float_exceptions();
2473
gen_helper_fmuls(cpu_tmp32,
2474
cpu_fpr[rs1], cpu_fpr[rs2]);
2475
gen_helper_check_ieee_exceptions();
2476
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2478
case 0x4a: /* fmuld */
2479
CHECK_FPU_FEATURE(dc, FMUL);
1569
2480
gen_op_load_fpr_DT0(DFPREG(rs1));
1570
2481
gen_op_load_fpr_DT1(DFPREG(rs2));
2482
gen_clear_float_exceptions();
2484
gen_helper_check_ieee_exceptions();
1572
2485
gen_op_store_DT0_fpr(DFPREG(rd));
1574
2487
case 0x4b: /* fmulq */
1575
#if defined(CONFIG_USER_ONLY)
2488
CHECK_FPU_FEATURE(dc, FLOAT128);
2489
CHECK_FPU_FEATURE(dc, FMUL);
1576
2490
gen_op_load_fpr_QT0(QFPREG(rs1));
1577
2491
gen_op_load_fpr_QT1(QFPREG(rs2));
2492
gen_clear_float_exceptions();
2494
gen_helper_check_ieee_exceptions();
1579
2495
gen_op_store_QT0_fpr(QFPREG(rd));
1585
gen_op_load_fpr_FT0(rs1);
1586
gen_op_load_fpr_FT1(rs2);
1588
gen_op_store_FT0_fpr(rd);
2497
case 0x4d: /* fdivs */
2498
gen_clear_float_exceptions();
2499
gen_helper_fdivs(cpu_tmp32,
2500
cpu_fpr[rs1], cpu_fpr[rs2]);
2501
gen_helper_check_ieee_exceptions();
2502
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
1591
2505
gen_op_load_fpr_DT0(DFPREG(rs1));
1592
2506
gen_op_load_fpr_DT1(DFPREG(rs2));
2507
gen_clear_float_exceptions();
2509
gen_helper_check_ieee_exceptions();
1594
2510
gen_op_store_DT0_fpr(DFPREG(rd));
1596
2512
case 0x4f: /* fdivq */
1597
#if defined(CONFIG_USER_ONLY)
2513
CHECK_FPU_FEATURE(dc, FLOAT128);
1598
2514
gen_op_load_fpr_QT0(QFPREG(rs1));
1599
2515
gen_op_load_fpr_QT1(QFPREG(rs2));
2516
gen_clear_float_exceptions();
2518
gen_helper_check_ieee_exceptions();
1601
2519
gen_op_store_QT0_fpr(QFPREG(rd));
1607
gen_op_load_fpr_FT0(rs1);
1608
gen_op_load_fpr_FT1(rs2);
2521
case 0x69: /* fsmuld */
2522
CHECK_FPU_FEATURE(dc, FSMULD);
2523
gen_clear_float_exceptions();
2524
gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2525
gen_helper_check_ieee_exceptions();
1610
2526
gen_op_store_DT0_fpr(DFPREG(rd));
1612
2528
case 0x6e: /* fdmulq */
1613
#if defined(CONFIG_USER_ONLY)
2529
CHECK_FPU_FEATURE(dc, FLOAT128);
1614
2530
gen_op_load_fpr_DT0(DFPREG(rs1));
1615
2531
gen_op_load_fpr_DT1(DFPREG(rs2));
2532
gen_clear_float_exceptions();
2533
gen_helper_fdmulq();
2534
gen_helper_check_ieee_exceptions();
1617
2535
gen_op_store_QT0_fpr(QFPREG(rd));
1623
gen_op_load_fpr_FT1(rs2);
1625
gen_op_store_FT0_fpr(rd);
2537
case 0xc4: /* fitos */
2538
gen_clear_float_exceptions();
2539
gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2540
gen_helper_check_ieee_exceptions();
2541
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2543
case 0xc6: /* fdtos */
1628
2544
gen_op_load_fpr_DT1(DFPREG(rs2));
1630
gen_op_store_FT0_fpr(rd);
2545
gen_clear_float_exceptions();
2546
gen_helper_fdtos(cpu_tmp32);
2547
gen_helper_check_ieee_exceptions();
2548
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
1632
2550
case 0xc7: /* fqtos */
1633
#if defined(CONFIG_USER_ONLY)
2551
CHECK_FPU_FEATURE(dc, FLOAT128);
1634
2552
gen_op_load_fpr_QT1(QFPREG(rs2));
1636
gen_op_store_FT0_fpr(rd);
2553
gen_clear_float_exceptions();
2554
gen_helper_fqtos(cpu_tmp32);
2555
gen_helper_check_ieee_exceptions();
2556
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
1642
gen_op_load_fpr_FT1(rs2);
2558
case 0xc8: /* fitod */
2559
gen_helper_fitod(cpu_fpr[rs2]);
1644
2560
gen_op_store_DT0_fpr(DFPREG(rd));
1647
gen_op_load_fpr_FT1(rs2);
2562
case 0xc9: /* fstod */
2563
gen_helper_fstod(cpu_fpr[rs2]);
1649
2564
gen_op_store_DT0_fpr(DFPREG(rd));
1651
2566
case 0xcb: /* fqtod */
1652
#if defined(CONFIG_USER_ONLY)
2567
CHECK_FPU_FEATURE(dc, FLOAT128);
1653
2568
gen_op_load_fpr_QT1(QFPREG(rs2));
2569
gen_clear_float_exceptions();
2571
gen_helper_check_ieee_exceptions();
1655
2572
gen_op_store_DT0_fpr(DFPREG(rd));
1660
2574
case 0xcc: /* fitoq */
1661
#if defined(CONFIG_USER_ONLY)
1662
gen_op_load_fpr_FT1(rs2);
2575
CHECK_FPU_FEATURE(dc, FLOAT128);
2576
gen_helper_fitoq(cpu_fpr[rs2]);
1664
2577
gen_op_store_QT0_fpr(QFPREG(rd));
1669
2579
case 0xcd: /* fstoq */
1670
#if defined(CONFIG_USER_ONLY)
1671
gen_op_load_fpr_FT1(rs2);
2580
CHECK_FPU_FEATURE(dc, FLOAT128);
2581
gen_helper_fstoq(cpu_fpr[rs2]);
1673
2582
gen_op_store_QT0_fpr(QFPREG(rd));
1678
2584
case 0xce: /* fdtoq */
1679
#if defined(CONFIG_USER_ONLY)
2585
CHECK_FPU_FEATURE(dc, FLOAT128);
1680
2586
gen_op_load_fpr_DT1(DFPREG(rs2));
1682
2588
gen_op_store_QT0_fpr(QFPREG(rd));
1688
gen_op_load_fpr_FT1(rs2);
1690
gen_op_store_FT0_fpr(rd);
2590
case 0xd1: /* fstoi */
2591
gen_clear_float_exceptions();
2592
gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2593
gen_helper_check_ieee_exceptions();
2594
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2596
case 0xd2: /* fdtoi */
1693
2597
gen_op_load_fpr_DT1(DFPREG(rs2));
1695
gen_op_store_FT0_fpr(rd);
2598
gen_clear_float_exceptions();
2599
gen_helper_fdtoi(cpu_tmp32);
2600
gen_helper_check_ieee_exceptions();
2601
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
1697
2603
case 0xd3: /* fqtoi */
1698
#if defined(CONFIG_USER_ONLY)
2604
CHECK_FPU_FEATURE(dc, FLOAT128);
1699
2605
gen_op_load_fpr_QT1(QFPREG(rs2));
1701
gen_op_store_FT0_fpr(rd);
2606
gen_clear_float_exceptions();
2607
gen_helper_fqtoi(cpu_tmp32);
2608
gen_helper_check_ieee_exceptions();
2609
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
1706
2611
#ifdef TARGET_SPARC64
1707
2612
case 0x2: /* V9 fmovd */
1708
gen_op_load_fpr_DT0(DFPREG(rs2));
1709
gen_op_store_DT0_fpr(DFPREG(rd));
2613
tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],
2614
cpu_fpr[DFPREG(rs2)]);
2615
tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2616
cpu_fpr[DFPREG(rs2) + 1]);
1711
2618
case 0x3: /* V9 fmovq */
1712
#if defined(CONFIG_USER_ONLY)
1713
gen_op_load_fpr_QT0(QFPREG(rs2));
1714
gen_op_store_QT0_fpr(QFPREG(rd));
2619
CHECK_FPU_FEATURE(dc, FLOAT128);
2620
tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],
2621
cpu_fpr[QFPREG(rs2)]);
2622
tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2623
cpu_fpr[QFPREG(rs2) + 1]);
2624
tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2625
cpu_fpr[QFPREG(rs2) + 2]);
2626
tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2627
cpu_fpr[QFPREG(rs2) + 3]);
1719
2629
case 0x6: /* V9 fnegd */
1720
2630
gen_op_load_fpr_DT1(DFPREG(rs2));
1722
2632
gen_op_store_DT0_fpr(DFPREG(rd));
1724
2634
case 0x7: /* V9 fnegq */
1725
#if defined(CONFIG_USER_ONLY)
2635
CHECK_FPU_FEATURE(dc, FLOAT128);
1726
2636
gen_op_load_fpr_QT1(QFPREG(rs2));
1728
2638
gen_op_store_QT0_fpr(QFPREG(rd));
1733
2640
case 0xa: /* V9 fabsd */
1734
2641
gen_op_load_fpr_DT1(DFPREG(rs2));
1736
2643
gen_op_store_DT0_fpr(DFPREG(rd));
1738
2645
case 0xb: /* V9 fabsq */
1739
#if defined(CONFIG_USER_ONLY)
2646
CHECK_FPU_FEATURE(dc, FLOAT128);
1740
2647
gen_op_load_fpr_QT1(QFPREG(rs2));
1742
2649
gen_op_store_QT0_fpr(QFPREG(rd));
1747
2651
case 0x81: /* V9 fstox */
1748
gen_op_load_fpr_FT1(rs2);
2652
gen_clear_float_exceptions();
2653
gen_helper_fstox(cpu_fpr[rs2]);
2654
gen_helper_check_ieee_exceptions();
1750
2655
gen_op_store_DT0_fpr(DFPREG(rd));
1752
2657
case 0x82: /* V9 fdtox */
1753
2658
gen_op_load_fpr_DT1(DFPREG(rs2));
2659
gen_clear_float_exceptions();
2661
gen_helper_check_ieee_exceptions();
1755
2662
gen_op_store_DT0_fpr(DFPREG(rd));
1757
2664
case 0x83: /* V9 fqtox */
1758
#if defined(CONFIG_USER_ONLY)
2665
CHECK_FPU_FEATURE(dc, FLOAT128);
1759
2666
gen_op_load_fpr_QT1(QFPREG(rs2));
2667
gen_clear_float_exceptions();
2669
gen_helper_check_ieee_exceptions();
1761
2670
gen_op_store_DT0_fpr(DFPREG(rd));
1766
2672
case 0x84: /* V9 fxtos */
1767
2673
gen_op_load_fpr_DT1(DFPREG(rs2));
1769
gen_op_store_FT0_fpr(rd);
2674
gen_clear_float_exceptions();
2675
gen_helper_fxtos(cpu_tmp32);
2676
gen_helper_check_ieee_exceptions();
2677
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
1771
2679
case 0x88: /* V9 fxtod */
1772
2680
gen_op_load_fpr_DT1(DFPREG(rs2));
2681
gen_clear_float_exceptions();
2683
gen_helper_check_ieee_exceptions();
1774
2684
gen_op_store_DT0_fpr(DFPREG(rd));
1776
2686
case 0x8c: /* V9 fxtoq */
1777
#if defined(CONFIG_USER_ONLY)
2687
CHECK_FPU_FEATURE(dc, FLOAT128);
1778
2688
gen_op_load_fpr_DT1(DFPREG(rs2));
2689
gen_clear_float_exceptions();
2691
gen_helper_check_ieee_exceptions();
1780
2692
gen_op_store_QT0_fpr(QFPREG(rd));
1787
2696
goto illegal_insn;
1798
2707
xop = GET_FIELD(insn, 18, 26);
1799
2708
#ifdef TARGET_SPARC64
1800
2709
if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2712
l1 = gen_new_label();
1801
2713
cond = GET_FIELD_SP(insn, 14, 17);
1802
gen_op_load_fpr_FT0(rd);
1803
gen_op_load_fpr_FT1(rs2);
1804
rs1 = GET_FIELD(insn, 13, 17);
1805
gen_movl_reg_T0(rs1);
1809
gen_op_store_FT0_fpr(rd);
2714
cpu_src1 = get_src1(insn, cpu_src1);
2715
tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2717
tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
1811
2720
} else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2723
l1 = gen_new_label();
1812
2724
cond = GET_FIELD_SP(insn, 14, 17);
1813
gen_op_load_fpr_DT0(DFPREG(rd));
1814
gen_op_load_fpr_DT1(DFPREG(rs2));
1816
rs1 = GET_FIELD(insn, 13, 17);
1817
gen_movl_reg_T0(rs1);
1820
gen_op_store_DT0_fpr(DFPREG(rd));
2725
cpu_src1 = get_src1(insn, cpu_src1);
2726
tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2728
tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2729
tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
1822
2732
} else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
1823
#if defined(CONFIG_USER_ONLY)
2735
CHECK_FPU_FEATURE(dc, FLOAT128);
2736
l1 = gen_new_label();
1824
2737
cond = GET_FIELD_SP(insn, 14, 17);
1825
gen_op_load_fpr_QT0(QFPREG(rd));
1826
gen_op_load_fpr_QT1(QFPREG(rs2));
1828
rs1 = GET_FIELD(insn, 13, 17);
1829
gen_movl_reg_T0(rs1);
1832
gen_op_store_QT0_fpr(QFPREG(rd));
2738
cpu_src1 = get_src1(insn, cpu_src1);
2739
tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2741
tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2742
tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2743
tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2744
tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
1840
2750
#ifdef TARGET_SPARC64
2751
#define FMOVSCC(fcc) \
2756
l1 = gen_new_label(); \
2757
r_cond = tcg_temp_new(); \
2758
cond = GET_FIELD_SP(insn, 14, 17); \
2759
gen_fcond(r_cond, fcc, cond); \
2760
tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2762
tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2763
gen_set_label(l1); \
2764
tcg_temp_free(r_cond); \
2766
#define FMOVDCC(fcc) \
2771
l1 = gen_new_label(); \
2772
r_cond = tcg_temp_new(); \
2773
cond = GET_FIELD_SP(insn, 14, 17); \
2774
gen_fcond(r_cond, fcc, cond); \
2775
tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2777
tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2778
cpu_fpr[DFPREG(rs2)]); \
2779
tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2780
cpu_fpr[DFPREG(rs2) + 1]); \
2781
gen_set_label(l1); \
2782
tcg_temp_free(r_cond); \
2784
#define FMOVQCC(fcc) \
2789
l1 = gen_new_label(); \
2790
r_cond = tcg_temp_new(); \
2791
cond = GET_FIELD_SP(insn, 14, 17); \
2792
gen_fcond(r_cond, fcc, cond); \
2793
tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2795
tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2796
cpu_fpr[QFPREG(rs2)]); \
2797
tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2798
cpu_fpr[QFPREG(rs2) + 1]); \
2799
tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2800
cpu_fpr[QFPREG(rs2) + 2]); \
2801
tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2802
cpu_fpr[QFPREG(rs2) + 3]); \
2803
gen_set_label(l1); \
2804
tcg_temp_free(r_cond); \
1841
2806
case 0x001: /* V9 fmovscc %fcc0 */
1842
cond = GET_FIELD_SP(insn, 14, 17);
1843
gen_op_load_fpr_FT0(rd);
1844
gen_op_load_fpr_FT1(rs2);
1846
gen_fcond[0][cond]();
1848
gen_op_store_FT0_fpr(rd);
1850
2809
case 0x002: /* V9 fmovdcc %fcc0 */
1851
cond = GET_FIELD_SP(insn, 14, 17);
1852
gen_op_load_fpr_DT0(DFPREG(rd));
1853
gen_op_load_fpr_DT1(DFPREG(rs2));
1855
gen_fcond[0][cond]();
1857
gen_op_store_DT0_fpr(DFPREG(rd));
1859
2812
case 0x003: /* V9 fmovqcc %fcc0 */
1860
#if defined(CONFIG_USER_ONLY)
1861
cond = GET_FIELD_SP(insn, 14, 17);
1862
gen_op_load_fpr_QT0(QFPREG(rd));
1863
gen_op_load_fpr_QT1(QFPREG(rs2));
1865
gen_fcond[0][cond]();
1867
gen_op_store_QT0_fpr(QFPREG(rd));
2813
CHECK_FPU_FEATURE(dc, FLOAT128);
1872
2816
case 0x041: /* V9 fmovscc %fcc1 */
1873
cond = GET_FIELD_SP(insn, 14, 17);
1874
gen_op_load_fpr_FT0(rd);
1875
gen_op_load_fpr_FT1(rs2);
1877
gen_fcond[1][cond]();
1879
gen_op_store_FT0_fpr(rd);
1881
2819
case 0x042: /* V9 fmovdcc %fcc1 */
1882
cond = GET_FIELD_SP(insn, 14, 17);
1883
gen_op_load_fpr_DT0(DFPREG(rd));
1884
gen_op_load_fpr_DT1(DFPREG(rs2));
1886
gen_fcond[1][cond]();
1888
gen_op_store_DT0_fpr(DFPREG(rd));
1890
2822
case 0x043: /* V9 fmovqcc %fcc1 */
1891
#if defined(CONFIG_USER_ONLY)
1892
cond = GET_FIELD_SP(insn, 14, 17);
1893
gen_op_load_fpr_QT0(QFPREG(rd));
1894
gen_op_load_fpr_QT1(QFPREG(rs2));
1896
gen_fcond[1][cond]();
1898
gen_op_store_QT0_fpr(QFPREG(rd));
2823
CHECK_FPU_FEATURE(dc, FLOAT128);
1903
2826
case 0x081: /* V9 fmovscc %fcc2 */
1904
cond = GET_FIELD_SP(insn, 14, 17);
1905
gen_op_load_fpr_FT0(rd);
1906
gen_op_load_fpr_FT1(rs2);
1908
gen_fcond[2][cond]();
1910
gen_op_store_FT0_fpr(rd);
1912
2829
case 0x082: /* V9 fmovdcc %fcc2 */
1913
cond = GET_FIELD_SP(insn, 14, 17);
1914
gen_op_load_fpr_DT0(DFPREG(rd));
1915
gen_op_load_fpr_DT1(DFPREG(rs2));
1917
gen_fcond[2][cond]();
1919
gen_op_store_DT0_fpr(DFPREG(rd));
1921
2832
case 0x083: /* V9 fmovqcc %fcc2 */
1922
#if defined(CONFIG_USER_ONLY)
1923
cond = GET_FIELD_SP(insn, 14, 17);
1924
gen_op_load_fpr_QT0(rd);
1925
gen_op_load_fpr_QT1(rs2);
1927
gen_fcond[2][cond]();
1929
gen_op_store_QT0_fpr(rd);
2833
CHECK_FPU_FEATURE(dc, FLOAT128);
1934
2836
case 0x0c1: /* V9 fmovscc %fcc3 */
1935
cond = GET_FIELD_SP(insn, 14, 17);
1936
gen_op_load_fpr_FT0(rd);
1937
gen_op_load_fpr_FT1(rs2);
1939
gen_fcond[3][cond]();
1941
gen_op_store_FT0_fpr(rd);
1943
2839
case 0x0c2: /* V9 fmovdcc %fcc3 */
1944
cond = GET_FIELD_SP(insn, 14, 17);
1945
gen_op_load_fpr_DT0(DFPREG(rd));
1946
gen_op_load_fpr_DT1(DFPREG(rs2));
1948
gen_fcond[3][cond]();
1950
gen_op_store_DT0_fpr(DFPREG(rd));
1952
2842
case 0x0c3: /* V9 fmovqcc %fcc3 */
1953
#if defined(CONFIG_USER_ONLY)
1954
cond = GET_FIELD_SP(insn, 14, 17);
1955
gen_op_load_fpr_QT0(QFPREG(rd));
1956
gen_op_load_fpr_QT1(QFPREG(rs2));
1958
gen_fcond[3][cond]();
1960
gen_op_store_QT0_fpr(QFPREG(rd));
2843
CHECK_FPU_FEATURE(dc, FLOAT128);
2849
#define FMOVCC(size_FDQ, icc) \
2854
l1 = gen_new_label(); \
2855
r_cond = tcg_temp_new(); \
2856
cond = GET_FIELD_SP(insn, 14, 17); \
2857
gen_cond(r_cond, icc, cond); \
2858
tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2860
glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2861
(glue(size_FDQ, FPREG(rs2))); \
2862
glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2863
(glue(size_FDQ, FPREG(rd))); \
2864
gen_set_label(l1); \
2865
tcg_temp_free(r_cond); \
2867
#define FMOVSCC(icc) \
2872
l1 = gen_new_label(); \
2873
r_cond = tcg_temp_new(); \
2874
cond = GET_FIELD_SP(insn, 14, 17); \
2875
gen_cond(r_cond, icc, cond); \
2876
tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2878
tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2879
gen_set_label(l1); \
2880
tcg_temp_free(r_cond); \
2882
#define FMOVDCC(icc) \
2887
l1 = gen_new_label(); \
2888
r_cond = tcg_temp_new(); \
2889
cond = GET_FIELD_SP(insn, 14, 17); \
2890
gen_cond(r_cond, icc, cond); \
2891
tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2893
tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2894
cpu_fpr[DFPREG(rs2)]); \
2895
tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2896
cpu_fpr[DFPREG(rs2) + 1]); \
2897
gen_set_label(l1); \
2898
tcg_temp_free(r_cond); \
2900
#define FMOVQCC(icc) \
2905
l1 = gen_new_label(); \
2906
r_cond = tcg_temp_new(); \
2907
cond = GET_FIELD_SP(insn, 14, 17); \
2908
gen_cond(r_cond, icc, cond); \
2909
tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2911
tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2912
cpu_fpr[QFPREG(rs2)]); \
2913
tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2914
cpu_fpr[QFPREG(rs2) + 1]); \
2915
tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2916
cpu_fpr[QFPREG(rs2) + 2]); \
2917
tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2918
cpu_fpr[QFPREG(rs2) + 3]); \
2919
gen_set_label(l1); \
2920
tcg_temp_free(r_cond); \
1965
2923
case 0x101: /* V9 fmovscc %icc */
1966
cond = GET_FIELD_SP(insn, 14, 17);
1967
gen_op_load_fpr_FT0(rd);
1968
gen_op_load_fpr_FT1(rs2);
1970
gen_cond[0][cond]();
1972
gen_op_store_FT0_fpr(rd);
1974
2926
case 0x102: /* V9 fmovdcc %icc */
1975
cond = GET_FIELD_SP(insn, 14, 17);
1976
gen_op_load_fpr_DT0(DFPREG(rd));
1977
gen_op_load_fpr_DT1(DFPREG(rs2));
1979
gen_cond[0][cond]();
1981
gen_op_store_DT0_fpr(DFPREG(rd));
1983
2928
case 0x103: /* V9 fmovqcc %icc */
1984
#if defined(CONFIG_USER_ONLY)
1985
cond = GET_FIELD_SP(insn, 14, 17);
1986
gen_op_load_fpr_QT0(rd);
1987
gen_op_load_fpr_QT1(rs2);
1989
gen_cond[0][cond]();
1991
gen_op_store_QT0_fpr(rd);
2929
CHECK_FPU_FEATURE(dc, FLOAT128);
1996
2932
case 0x181: /* V9 fmovscc %xcc */
1997
cond = GET_FIELD_SP(insn, 14, 17);
1998
gen_op_load_fpr_FT0(rd);
1999
gen_op_load_fpr_FT1(rs2);
2001
gen_cond[1][cond]();
2003
gen_op_store_FT0_fpr(rd);
2005
2935
case 0x182: /* V9 fmovdcc %xcc */
2006
cond = GET_FIELD_SP(insn, 14, 17);
2007
gen_op_load_fpr_DT0(DFPREG(rd));
2008
gen_op_load_fpr_DT1(DFPREG(rs2));
2010
gen_cond[1][cond]();
2012
gen_op_store_DT0_fpr(DFPREG(rd));
2014
2938
case 0x183: /* V9 fmovqcc %xcc */
2015
#if defined(CONFIG_USER_ONLY)
2016
cond = GET_FIELD_SP(insn, 14, 17);
2017
gen_op_load_fpr_QT0(rd);
2018
gen_op_load_fpr_QT1(rs2);
2020
gen_cond[1][cond]();
2022
gen_op_store_QT0_fpr(rd);
2939
CHECK_FPU_FEATURE(dc, FLOAT128);
2028
2946
case 0x51: /* fcmps, V9 %fcc */
2029
gen_op_load_fpr_FT0(rs1);
2030
gen_op_load_fpr_FT1(rs2);
2031
#ifdef TARGET_SPARC64
2032
gen_fcmps[rd & 3]();
2947
gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2037
2949
case 0x52: /* fcmpd, V9 %fcc */
2038
2950
gen_op_load_fpr_DT0(DFPREG(rs1));
2039
2951
gen_op_load_fpr_DT1(DFPREG(rs2));
2040
#ifdef TARGET_SPARC64
2041
gen_fcmpd[rd & 3]();
2952
gen_op_fcmpd(rd & 3);
2046
2954
case 0x53: /* fcmpq, V9 %fcc */
2047
#if defined(CONFIG_USER_ONLY)
2955
CHECK_FPU_FEATURE(dc, FLOAT128);
2048
2956
gen_op_load_fpr_QT0(QFPREG(rs1));
2049
2957
gen_op_load_fpr_QT1(QFPREG(rs2));
2050
#ifdef TARGET_SPARC64
2051
gen_fcmpq[rd & 3]();
2958
gen_op_fcmpq(rd & 3);
2056
#else /* !defined(CONFIG_USER_ONLY) */
2059
2960
case 0x55: /* fcmpes, V9 %fcc */
2060
gen_op_load_fpr_FT0(rs1);
2061
gen_op_load_fpr_FT1(rs2);
2062
#ifdef TARGET_SPARC64
2063
gen_fcmpes[rd & 3]();
2961
gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2068
2963
case 0x56: /* fcmped, V9 %fcc */
2069
2964
gen_op_load_fpr_DT0(DFPREG(rs1));
2070
2965
gen_op_load_fpr_DT1(DFPREG(rs2));
2071
#ifdef TARGET_SPARC64
2072
gen_fcmped[rd & 3]();
2966
gen_op_fcmped(rd & 3);
2077
2968
case 0x57: /* fcmpeq, V9 %fcc */
2078
#if defined(CONFIG_USER_ONLY)
2969
CHECK_FPU_FEATURE(dc, FLOAT128);
2079
2970
gen_op_load_fpr_QT0(QFPREG(rs1));
2080
2971
gen_op_load_fpr_QT1(QFPREG(rs2));
2081
#ifdef TARGET_SPARC64
2082
gen_fcmpeq[rd & 3]();
2972
gen_op_fcmpeq(rd & 3);
2087
#else/* !defined(CONFIG_USER_ONLY) */
2091
2975
goto illegal_insn;
2094
2977
} else if (xop == 0x2) {
2095
2978
// clr/mov shortcut
2097
2980
rs1 = GET_FIELD(insn, 13, 17);
2098
2981
if (rs1 == 0) {
2099
// or %g0, x, y -> mov T1, x; mov y, T1
2982
// or %g0, x, y -> mov T0, x; mov y, T0
2100
2983
if (IS_IMM) { /* immediate */
2101
2986
rs2 = GET_FIELDs(insn, 19, 31);
2102
gen_movl_simm_T1(rs2);
2987
r_const = tcg_const_tl((int)rs2);
2988
gen_movl_TN_reg(rd, r_const);
2989
tcg_temp_free(r_const);
2103
2990
} else { /* register */
2104
2991
rs2 = GET_FIELD(insn, 27, 31);
2105
gen_movl_reg_T1(rs2);
2992
gen_movl_reg_TN(rs2, cpu_dst);
2993
gen_movl_TN_reg(rd, cpu_dst);
2107
gen_movl_T1_reg(rd);
2109
gen_movl_reg_T0(rs1);
2996
cpu_src1 = get_src1(insn, cpu_src1);
2110
2997
if (IS_IMM) { /* immediate */
2111
// or x, #0, y -> mov T1, x; mov y, T1
2112
2998
rs2 = GET_FIELDs(insn, 19, 31);
2114
gen_movl_simm_T1(rs2);
2999
tcg_gen_ori_tl(cpu_dst, cpu_src1, (int)rs2);
3000
gen_movl_TN_reg(rd, cpu_dst);
2117
3001
} else { /* register */
2118
3002
// or x, %g0, y -> mov T1, x; mov y, T1
2119
3003
rs2 = GET_FIELD(insn, 27, 31);
2120
3004
if (rs2 != 0) {
2121
gen_movl_reg_T1(rs2);
3005
gen_movl_reg_TN(rs2, cpu_src2);
3006
tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3007
gen_movl_TN_reg(rd, cpu_dst);
3009
gen_movl_TN_reg(rd, cpu_src1);
2125
gen_movl_T0_reg(rd);
2128
3012
#ifdef TARGET_SPARC64
2129
3013
} else if (xop == 0x25) { /* sll, V9 sllx */
2130
rs1 = GET_FIELD(insn, 13, 17);
2131
gen_movl_reg_T0(rs1);
3014
cpu_src1 = get_src1(insn, cpu_src1);
2132
3015
if (IS_IMM) { /* immediate */
2133
3016
rs2 = GET_FIELDs(insn, 20, 31);
2134
gen_movl_simm_T1(rs2);
3017
if (insn & (1 << 12)) {
3018
tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3020
tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x1f);
2135
3022
} else { /* register */
2136
3023
rs2 = GET_FIELD(insn, 27, 31);
2137
gen_movl_reg_T1(rs2);
3024
gen_movl_reg_TN(rs2, cpu_src2);
3025
if (insn & (1 << 12)) {
3026
tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3028
tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3030
tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2139
if (insn & (1 << 12))
2143
gen_movl_T0_reg(rd);
3032
gen_movl_TN_reg(rd, cpu_dst);
2144
3033
} else if (xop == 0x26) { /* srl, V9 srlx */
2145
rs1 = GET_FIELD(insn, 13, 17);
2146
gen_movl_reg_T0(rs1);
3034
cpu_src1 = get_src1(insn, cpu_src1);
2147
3035
if (IS_IMM) { /* immediate */
2148
3036
rs2 = GET_FIELDs(insn, 20, 31);
2149
gen_movl_simm_T1(rs2);
3037
if (insn & (1 << 12)) {
3038
tcg_gen_shri_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3040
tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3041
tcg_gen_shri_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
2150
3043
} else { /* register */
2151
3044
rs2 = GET_FIELD(insn, 27, 31);
2152
gen_movl_reg_T1(rs2);
3045
gen_movl_reg_TN(rs2, cpu_src2);
3046
if (insn & (1 << 12)) {
3047
tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3048
tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3050
tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3051
tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3052
tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2154
if (insn & (1 << 12))
2158
gen_movl_T0_reg(rd);
3055
gen_movl_TN_reg(rd, cpu_dst);
2159
3056
} else if (xop == 0x27) { /* sra, V9 srax */
2160
rs1 = GET_FIELD(insn, 13, 17);
2161
gen_movl_reg_T0(rs1);
3057
cpu_src1 = get_src1(insn, cpu_src1);
2162
3058
if (IS_IMM) { /* immediate */
2163
3059
rs2 = GET_FIELDs(insn, 20, 31);
2164
gen_movl_simm_T1(rs2);
3060
if (insn & (1 << 12)) {
3061
tcg_gen_sari_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3063
tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3064
tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3065
tcg_gen_sari_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
2165
3067
} else { /* register */
2166
3068
rs2 = GET_FIELD(insn, 27, 31);
2167
gen_movl_reg_T1(rs2);
3069
gen_movl_reg_TN(rs2, cpu_src2);
3070
if (insn & (1 << 12)) {
3071
tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3072
tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3074
tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3075
tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3076
tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3077
tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2169
if (insn & (1 << 12))
2173
gen_movl_T0_reg(rd);
3080
gen_movl_TN_reg(rd, cpu_dst);
2175
3082
} else if (xop < 0x36) {
2176
rs1 = GET_FIELD(insn, 13, 17);
2177
gen_movl_reg_T0(rs1);
2178
if (IS_IMM) { /* immediate */
2179
rs2 = GET_FIELDs(insn, 19, 31);
2180
gen_movl_simm_T1(rs2);
2181
} else { /* register */
2182
rs2 = GET_FIELD(insn, 27, 31);
2183
gen_movl_reg_T1(rs2);
3083
cpu_src1 = get_src1(insn, cpu_src1);
3084
cpu_src2 = get_src2(insn, cpu_src2);
2185
3085
if (xop < 0x20) {
2186
3086
switch (xop & ~0x10) {
2188
3088
if (xop & 0x10)
2189
gen_op_add_T1_T0_cc();
3089
gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3091
tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3094
tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
2195
3095
if (xop & 0x10)
2196
gen_op_logic_T0_cc();
3096
gen_op_logic_cc(cpu_dst);
3099
tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2200
3100
if (xop & 0x10)
2201
gen_op_logic_T0_cc();
3101
gen_op_logic_cc(cpu_dst);
3104
tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
2205
3105
if (xop & 0x10)
2206
gen_op_logic_T0_cc();
3106
gen_op_logic_cc(cpu_dst);
2209
3109
if (xop & 0x10)
2210
gen_op_sub_T1_T0_cc();
3110
gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3112
tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
2215
gen_op_andn_T1_T0();
3115
tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
2216
3116
if (xop & 0x10)
2217
gen_op_logic_T0_cc();
3117
gen_op_logic_cc(cpu_dst);
3120
tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
2221
3121
if (xop & 0x10)
2222
gen_op_logic_T0_cc();
3122
gen_op_logic_cc(cpu_dst);
2225
gen_op_xnor_T1_T0();
3125
tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3126
tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
2226
3127
if (xop & 0x10)
2227
gen_op_logic_T0_cc();
3128
gen_op_logic_cc(cpu_dst);
2230
3131
if (xop & 0x10)
2231
gen_op_addx_T1_T0_cc();
2233
gen_op_addx_T1_T0();
3132
gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3134
gen_mov_reg_C(cpu_tmp0, cpu_psr);
3135
tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3136
tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2235
3139
#ifdef TARGET_SPARC64
2236
3140
case 0x9: /* V9 mulx */
2237
gen_op_mulx_T1_T0();
3141
tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
2241
gen_op_umul_T1_T0();
3145
CHECK_IU_FEATURE(dc, MUL);
3146
gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
2242
3147
if (xop & 0x10)
2243
gen_op_logic_T0_cc();
3148
gen_op_logic_cc(cpu_dst);
2246
gen_op_smul_T1_T0();
3151
CHECK_IU_FEATURE(dc, MUL);
3152
gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
2247
3153
if (xop & 0x10)
2248
gen_op_logic_T0_cc();
3154
gen_op_logic_cc(cpu_dst);
2251
3157
if (xop & 0x10)
2252
gen_op_subx_T1_T0_cc();
2254
gen_op_subx_T1_T0();
3158
gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3160
gen_mov_reg_C(cpu_tmp0, cpu_psr);
3161
tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3162
tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
2256
3165
#ifdef TARGET_SPARC64
2257
3166
case 0xd: /* V9 udivx */
2258
gen_op_udivx_T1_T0();
3167
tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3168
tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3169
gen_trap_ifdivzero_tl(cpu_cc_src2);
3170
tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
2262
gen_op_udiv_T1_T0();
3174
CHECK_IU_FEATURE(dc, DIV);
3175
gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
2263
3176
if (xop & 0x10)
3177
gen_op_div_cc(cpu_dst);
2267
gen_op_sdiv_T1_T0();
3180
CHECK_IU_FEATURE(dc, DIV);
3181
gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
2268
3182
if (xop & 0x10)
3183
gen_op_div_cc(cpu_dst);
2272
3186
goto illegal_insn;
2274
gen_movl_T0_reg(rd);
3188
gen_movl_TN_reg(rd, cpu_dst);
2277
3191
case 0x20: /* taddcc */
2278
gen_op_tadd_T1_T0_cc();
2279
gen_movl_T0_reg(rd);
3192
gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3193
gen_movl_TN_reg(rd, cpu_dst);
2281
3195
case 0x21: /* tsubcc */
2282
gen_op_tsub_T1_T0_cc();
2283
gen_movl_T0_reg(rd);
3196
gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3197
gen_movl_TN_reg(rd, cpu_dst);
2285
3199
case 0x22: /* taddcctv */
2287
gen_op_tadd_T1_T0_ccTV();
2288
gen_movl_T0_reg(rd);
3200
save_state(dc, cpu_cond);
3201
gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3202
gen_movl_TN_reg(rd, cpu_dst);
2290
3204
case 0x23: /* tsubcctv */
2292
gen_op_tsub_T1_T0_ccTV();
2293
gen_movl_T0_reg(rd);
3205
save_state(dc, cpu_cond);
3206
gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3207
gen_movl_TN_reg(rd, cpu_dst);
2295
3209
case 0x24: /* mulscc */
2296
gen_op_mulscc_T1_T0();
2297
gen_movl_T0_reg(rd);
3210
gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3211
gen_movl_TN_reg(rd, cpu_dst);
2299
3213
#ifndef TARGET_SPARC64
2300
3214
case 0x25: /* sll */
2302
gen_movl_T0_reg(rd);
3215
if (IS_IMM) { /* immediate */
3216
rs2 = GET_FIELDs(insn, 20, 31);
3217
tcg_gen_shli_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3218
} else { /* register */
3219
tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3220
tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3222
gen_movl_TN_reg(rd, cpu_dst);
2304
3224
case 0x26: /* srl */
2306
gen_movl_T0_reg(rd);
3225
if (IS_IMM) { /* immediate */
3226
rs2 = GET_FIELDs(insn, 20, 31);
3227
tcg_gen_shri_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3228
} else { /* register */
3229
tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3230
tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3232
gen_movl_TN_reg(rd, cpu_dst);
2308
3234
case 0x27: /* sra */
2310
gen_movl_T0_reg(rd);
3235
if (IS_IMM) { /* immediate */
3236
rs2 = GET_FIELDs(insn, 20, 31);
3237
tcg_gen_sari_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3238
} else { /* register */
3239
tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3240
tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3242
gen_movl_TN_reg(rd, cpu_dst);
2316
3248
case 0: /* wry */
2318
gen_op_movtl_env_T0(offsetof(CPUSPARCState, y));
3249
tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3250
tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
2320
3252
#ifndef TARGET_SPARC64
2321
3253
case 0x01 ... 0x0f: /* undefined in the
2763
3847
goto illegal_insn;
2764
3848
case 0x048: /* VIS I faligndata */
3849
CHECK_FPU_FEATURE(dc, VIS1);
2765
3850
gen_op_load_fpr_DT0(DFPREG(rs1));
2766
3851
gen_op_load_fpr_DT1(DFPREG(rs2));
2767
gen_op_faligndata();
3852
gen_helper_faligndata();
2768
3853
gen_op_store_DT0_fpr(DFPREG(rd));
2770
3855
case 0x04b: /* VIS I fpmerge */
3856
CHECK_FPU_FEATURE(dc, VIS1);
2771
3857
gen_op_load_fpr_DT0(DFPREG(rs1));
2772
3858
gen_op_load_fpr_DT1(DFPREG(rs2));
3859
gen_helper_fpmerge();
2774
3860
gen_op_store_DT0_fpr(DFPREG(rd));
2776
3862
case 0x04c: /* VIS II bshuffle */
2778
3864
goto illegal_insn;
2779
3865
case 0x04d: /* VIS I fexpand */
3866
CHECK_FPU_FEATURE(dc, VIS1);
2780
3867
gen_op_load_fpr_DT0(DFPREG(rs1));
2781
3868
gen_op_load_fpr_DT1(DFPREG(rs2));
3869
gen_helper_fexpand();
2783
3870
gen_op_store_DT0_fpr(DFPREG(rd));
2785
3872
case 0x050: /* VIS I fpadd16 */
3873
CHECK_FPU_FEATURE(dc, VIS1);
2786
3874
gen_op_load_fpr_DT0(DFPREG(rs1));
2787
3875
gen_op_load_fpr_DT1(DFPREG(rs2));
3876
gen_helper_fpadd16();
2789
3877
gen_op_store_DT0_fpr(DFPREG(rd));
2791
3879
case 0x051: /* VIS I fpadd16s */
2792
gen_op_load_fpr_FT0(rs1);
2793
gen_op_load_fpr_FT1(rs2);
2795
gen_op_store_FT0_fpr(rd);
3880
CHECK_FPU_FEATURE(dc, VIS1);
3881
gen_helper_fpadd16s(cpu_fpr[rd],
3882
cpu_fpr[rs1], cpu_fpr[rs2]);
2797
3884
case 0x052: /* VIS I fpadd32 */
3885
CHECK_FPU_FEATURE(dc, VIS1);
2798
3886
gen_op_load_fpr_DT0(DFPREG(rs1));
2799
3887
gen_op_load_fpr_DT1(DFPREG(rs2));
3888
gen_helper_fpadd32();
2801
3889
gen_op_store_DT0_fpr(DFPREG(rd));
2803
3891
case 0x053: /* VIS I fpadd32s */
2804
gen_op_load_fpr_FT0(rs1);
2805
gen_op_load_fpr_FT1(rs2);
2807
gen_op_store_FT0_fpr(rd);
3892
CHECK_FPU_FEATURE(dc, VIS1);
3893
gen_helper_fpadd32s(cpu_fpr[rd],
3894
cpu_fpr[rs1], cpu_fpr[rs2]);
2809
3896
case 0x054: /* VIS I fpsub16 */
3897
CHECK_FPU_FEATURE(dc, VIS1);
2810
3898
gen_op_load_fpr_DT0(DFPREG(rs1));
2811
3899
gen_op_load_fpr_DT1(DFPREG(rs2));
3900
gen_helper_fpsub16();
2813
3901
gen_op_store_DT0_fpr(DFPREG(rd));
2815
3903
case 0x055: /* VIS I fpsub16s */
2816
gen_op_load_fpr_FT0(rs1);
2817
gen_op_load_fpr_FT1(rs2);
2819
gen_op_store_FT0_fpr(rd);
3904
CHECK_FPU_FEATURE(dc, VIS1);
3905
gen_helper_fpsub16s(cpu_fpr[rd],
3906
cpu_fpr[rs1], cpu_fpr[rs2]);
2821
3908
case 0x056: /* VIS I fpsub32 */
3909
CHECK_FPU_FEATURE(dc, VIS1);
2822
3910
gen_op_load_fpr_DT0(DFPREG(rs1));
2823
3911
gen_op_load_fpr_DT1(DFPREG(rs2));
3912
gen_helper_fpsub32();
2825
3913
gen_op_store_DT0_fpr(DFPREG(rd));
2827
3915
case 0x057: /* VIS I fpsub32s */
2828
gen_op_load_fpr_FT0(rs1);
2829
gen_op_load_fpr_FT1(rs2);
2831
gen_op_store_FT0_fpr(rd);
3916
CHECK_FPU_FEATURE(dc, VIS1);
3917
gen_helper_fpsub32s(cpu_fpr[rd],
3918
cpu_fpr[rs1], cpu_fpr[rs2]);
2833
3920
case 0x060: /* VIS I fzero */
2834
gen_op_movl_DT0_0();
2835
gen_op_store_DT0_fpr(DFPREG(rd));
3921
CHECK_FPU_FEATURE(dc, VIS1);
3922
tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3923
tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
2837
3925
case 0x061: /* VIS I fzeros */
2838
gen_op_movl_FT0_0();
2839
gen_op_store_FT0_fpr(rd);
3926
CHECK_FPU_FEATURE(dc, VIS1);
3927
tcg_gen_movi_i32(cpu_fpr[rd], 0);
2841
3929
case 0x062: /* VIS I fnor */
2842
gen_op_load_fpr_DT0(DFPREG(rs1));
2843
gen_op_load_fpr_DT1(DFPREG(rs2));
2845
gen_op_store_DT0_fpr(DFPREG(rd));
3930
CHECK_FPU_FEATURE(dc, VIS1);
3931
tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3932
cpu_fpr[DFPREG(rs2)]);
3933
tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3934
cpu_fpr[DFPREG(rs2) + 1]);
2847
3936
case 0x063: /* VIS I fnors */
2848
gen_op_load_fpr_FT0(rs1);
2849
gen_op_load_fpr_FT1(rs2);
2851
gen_op_store_FT0_fpr(rd);
3937
CHECK_FPU_FEATURE(dc, VIS1);
3938
tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2853
3940
case 0x064: /* VIS I fandnot2 */
2854
gen_op_load_fpr_DT1(DFPREG(rs1));
2855
gen_op_load_fpr_DT0(DFPREG(rs2));
2857
gen_op_store_DT0_fpr(DFPREG(rd));
3941
CHECK_FPU_FEATURE(dc, VIS1);
3942
tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3943
cpu_fpr[DFPREG(rs2)]);
3944
tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3945
cpu_fpr[DFPREG(rs1) + 1],
3946
cpu_fpr[DFPREG(rs2) + 1]);
2859
3948
case 0x065: /* VIS I fandnot2s */
2860
gen_op_load_fpr_FT1(rs1);
2861
gen_op_load_fpr_FT0(rs2);
2863
gen_op_store_FT0_fpr(rd);
3949
CHECK_FPU_FEATURE(dc, VIS1);
3950
tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
2865
3952
case 0x066: /* VIS I fnot2 */
2866
gen_op_load_fpr_DT1(DFPREG(rs2));
2868
gen_op_store_DT0_fpr(DFPREG(rd));
3953
CHECK_FPU_FEATURE(dc, VIS1);
3954
tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3955
tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3956
cpu_fpr[DFPREG(rs2) + 1]);
2870
3958
case 0x067: /* VIS I fnot2s */
2871
gen_op_load_fpr_FT1(rs2);
2873
gen_op_store_FT0_fpr(rd);
3959
CHECK_FPU_FEATURE(dc, VIS1);
3960
tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2875
3962
case 0x068: /* VIS I fandnot1 */
2876
gen_op_load_fpr_DT0(DFPREG(rs1));
2877
gen_op_load_fpr_DT1(DFPREG(rs2));
2879
gen_op_store_DT0_fpr(DFPREG(rd));
3963
CHECK_FPU_FEATURE(dc, VIS1);
3964
tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3965
cpu_fpr[DFPREG(rs1)]);
3966
tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3967
cpu_fpr[DFPREG(rs2) + 1],
3968
cpu_fpr[DFPREG(rs1) + 1]);
2881
3970
case 0x069: /* VIS I fandnot1s */
2882
gen_op_load_fpr_FT0(rs1);
2883
gen_op_load_fpr_FT1(rs2);
2885
gen_op_store_FT0_fpr(rd);
3971
CHECK_FPU_FEATURE(dc, VIS1);
3972
tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
2887
3974
case 0x06a: /* VIS I fnot1 */
2888
gen_op_load_fpr_DT1(DFPREG(rs1));
2890
gen_op_store_DT0_fpr(DFPREG(rd));
3975
CHECK_FPU_FEATURE(dc, VIS1);
3976
tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3977
tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3978
cpu_fpr[DFPREG(rs1) + 1]);
2892
3980
case 0x06b: /* VIS I fnot1s */
2893
gen_op_load_fpr_FT1(rs1);
2895
gen_op_store_FT0_fpr(rd);
3981
CHECK_FPU_FEATURE(dc, VIS1);
3982
tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
2897
3984
case 0x06c: /* VIS I fxor */
2898
gen_op_load_fpr_DT0(DFPREG(rs1));
2899
gen_op_load_fpr_DT1(DFPREG(rs2));
2901
gen_op_store_DT0_fpr(DFPREG(rd));
3985
CHECK_FPU_FEATURE(dc, VIS1);
3986
tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3987
cpu_fpr[DFPREG(rs2)]);
3988
tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3989
cpu_fpr[DFPREG(rs1) + 1],
3990
cpu_fpr[DFPREG(rs2) + 1]);
2903
3992
case 0x06d: /* VIS I fxors */
2904
gen_op_load_fpr_FT0(rs1);
2905
gen_op_load_fpr_FT1(rs2);
2907
gen_op_store_FT0_fpr(rd);
3993
CHECK_FPU_FEATURE(dc, VIS1);
3994
tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
2909
3996
case 0x06e: /* VIS I fnand */
2910
gen_op_load_fpr_DT0(DFPREG(rs1));
2911
gen_op_load_fpr_DT1(DFPREG(rs2));
2913
gen_op_store_DT0_fpr(DFPREG(rd));
3997
CHECK_FPU_FEATURE(dc, VIS1);
3998
tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3999
cpu_fpr[DFPREG(rs2)]);
4000
tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4001
cpu_fpr[DFPREG(rs2) + 1]);
2915
4003
case 0x06f: /* VIS I fnands */
2916
gen_op_load_fpr_FT0(rs1);
2917
gen_op_load_fpr_FT1(rs2);
2919
gen_op_store_FT0_fpr(rd);
4004
CHECK_FPU_FEATURE(dc, VIS1);
4005
tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2921
4007
case 0x070: /* VIS I fand */
2922
gen_op_load_fpr_DT0(DFPREG(rs1));
2923
gen_op_load_fpr_DT1(DFPREG(rs2));
2925
gen_op_store_DT0_fpr(DFPREG(rd));
4008
CHECK_FPU_FEATURE(dc, VIS1);
4009
tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4010
cpu_fpr[DFPREG(rs2)]);
4011
tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4012
cpu_fpr[DFPREG(rs1) + 1],
4013
cpu_fpr[DFPREG(rs2) + 1]);
2927
4015
case 0x071: /* VIS I fands */
2928
gen_op_load_fpr_FT0(rs1);
2929
gen_op_load_fpr_FT1(rs2);
2931
gen_op_store_FT0_fpr(rd);
4016
CHECK_FPU_FEATURE(dc, VIS1);
4017
tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
2933
4019
case 0x072: /* VIS I fxnor */
2934
gen_op_load_fpr_DT0(DFPREG(rs1));
2935
gen_op_load_fpr_DT1(DFPREG(rs2));
2937
gen_op_store_DT0_fpr(DFPREG(rd));
4020
CHECK_FPU_FEATURE(dc, VIS1);
4021
tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4022
tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4023
cpu_fpr[DFPREG(rs1)]);
4024
tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4025
tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4026
cpu_fpr[DFPREG(rs1) + 1]);
2939
4028
case 0x073: /* VIS I fxnors */
2940
gen_op_load_fpr_FT0(rs1);
2941
gen_op_load_fpr_FT1(rs2);
2943
gen_op_store_FT0_fpr(rd);
4029
CHECK_FPU_FEATURE(dc, VIS1);
4030
tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4031
tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
2945
4033
case 0x074: /* VIS I fsrc1 */
2946
gen_op_load_fpr_DT0(DFPREG(rs1));
2947
gen_op_store_DT0_fpr(DFPREG(rd));
4034
CHECK_FPU_FEATURE(dc, VIS1);
4035
tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4036
tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4037
cpu_fpr[DFPREG(rs1) + 1]);
2949
4039
case 0x075: /* VIS I fsrc1s */
2950
gen_op_load_fpr_FT0(rs1);
2951
gen_op_store_FT0_fpr(rd);
4040
CHECK_FPU_FEATURE(dc, VIS1);
4041
tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
2953
4043
case 0x076: /* VIS I fornot2 */
2954
gen_op_load_fpr_DT1(DFPREG(rs1));
2955
gen_op_load_fpr_DT0(DFPREG(rs2));
2957
gen_op_store_DT0_fpr(DFPREG(rd));
4044
CHECK_FPU_FEATURE(dc, VIS1);
4045
tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4046
cpu_fpr[DFPREG(rs2)]);
4047
tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4048
cpu_fpr[DFPREG(rs1) + 1],
4049
cpu_fpr[DFPREG(rs2) + 1]);
2959
4051
case 0x077: /* VIS I fornot2s */
2960
gen_op_load_fpr_FT1(rs1);
2961
gen_op_load_fpr_FT0(rs2);
2963
gen_op_store_FT0_fpr(rd);
4052
CHECK_FPU_FEATURE(dc, VIS1);
4053
tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
2965
4055
case 0x078: /* VIS I fsrc2 */
4056
CHECK_FPU_FEATURE(dc, VIS1);
2966
4057
gen_op_load_fpr_DT0(DFPREG(rs2));
2967
4058
gen_op_store_DT0_fpr(DFPREG(rd));
2969
4060
case 0x079: /* VIS I fsrc2s */
2970
gen_op_load_fpr_FT0(rs2);
2971
gen_op_store_FT0_fpr(rd);
4061
CHECK_FPU_FEATURE(dc, VIS1);
4062
tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2973
4064
case 0x07a: /* VIS I fornot1 */
2974
gen_op_load_fpr_DT0(DFPREG(rs1));
2975
gen_op_load_fpr_DT1(DFPREG(rs2));
2977
gen_op_store_DT0_fpr(DFPREG(rd));
4065
CHECK_FPU_FEATURE(dc, VIS1);
4066
tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4067
cpu_fpr[DFPREG(rs1)]);
4068
tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4069
cpu_fpr[DFPREG(rs2) + 1],
4070
cpu_fpr[DFPREG(rs1) + 1]);
2979
4072
case 0x07b: /* VIS I fornot1s */
2980
gen_op_load_fpr_FT0(rs1);
2981
gen_op_load_fpr_FT1(rs2);
2983
gen_op_store_FT0_fpr(rd);
4073
CHECK_FPU_FEATURE(dc, VIS1);
4074
tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
2985
4076
case 0x07c: /* VIS I for */
2986
gen_op_load_fpr_DT0(DFPREG(rs1));
2987
gen_op_load_fpr_DT1(DFPREG(rs2));
2989
gen_op_store_DT0_fpr(DFPREG(rd));
4077
CHECK_FPU_FEATURE(dc, VIS1);
4078
tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4079
cpu_fpr[DFPREG(rs2)]);
4080
tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4081
cpu_fpr[DFPREG(rs1) + 1],
4082
cpu_fpr[DFPREG(rs2) + 1]);
2991
4084
case 0x07d: /* VIS I fors */
2992
gen_op_load_fpr_FT0(rs1);
2993
gen_op_load_fpr_FT1(rs2);
2995
gen_op_store_FT0_fpr(rd);
4085
CHECK_FPU_FEATURE(dc, VIS1);
4086
tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
2997
4088
case 0x07e: /* VIS I fone */
2998
gen_op_movl_DT0_1();
2999
gen_op_store_DT0_fpr(DFPREG(rd));
4089
CHECK_FPU_FEATURE(dc, VIS1);
4090
tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4091
tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
3001
4093
case 0x07f: /* VIS I fones */
3002
gen_op_movl_FT0_1();
3003
gen_op_store_FT0_fpr(rd);
4094
CHECK_FPU_FEATURE(dc, VIS1);
4095
tcg_gen_movi_i32(cpu_fpr[rd], -1);
3005
4097
case 0x080: /* VIS I shutdown */
3006
4098
case 0x081: /* VIS II siam */
3319
4410
#ifdef TARGET_SPARC64
3320
4411
case 0x08: /* V9 ldsw */
3321
gen_op_check_align_T0_3();
4412
gen_address_mask(dc, cpu_addr);
4413
tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
3324
4415
case 0x0b: /* V9 ldx */
3325
gen_op_check_align_T0_7();
4416
gen_address_mask(dc, cpu_addr);
4417
tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
3328
4419
case 0x18: /* V9 ldswa */
3329
gen_op_check_align_T0_3();
3330
gen_ld_asi(insn, 4, 1);
4420
save_state(dc, cpu_cond);
4421
gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
3332
4423
case 0x1b: /* V9 ldxa */
3333
gen_op_check_align_T0_7();
3334
gen_ld_asi(insn, 8, 0);
4424
save_state(dc, cpu_cond);
4425
gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
3336
4427
case 0x2d: /* V9 prefetch, no effect */
3337
4428
goto skip_move;
3338
4429
case 0x30: /* V9 ldfa */
3339
gen_op_check_align_T0_3();
3340
gen_ldf_asi(insn, 4, rd);
4430
save_state(dc, cpu_cond);
4431
gen_ldf_asi(cpu_addr, insn, 4, rd);
3341
4432
goto skip_move;
3342
4433
case 0x33: /* V9 lddfa */
3343
gen_op_check_align_T0_3();
3344
gen_ldf_asi(insn, 8, DFPREG(rd));
4434
save_state(dc, cpu_cond);
4435
gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
3345
4436
goto skip_move;
3346
4437
case 0x3d: /* V9 prefetcha, no effect */
3347
4438
goto skip_move;
3348
4439
case 0x32: /* V9 ldqfa */
3349
#if defined(CONFIG_USER_ONLY)
3350
gen_op_check_align_T0_3();
3351
gen_ldf_asi(insn, 16, QFPREG(rd));
4440
CHECK_FPU_FEATURE(dc, FLOAT128);
4441
save_state(dc, cpu_cond);
4442
gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
3352
4443
goto skip_move;
3358
4446
goto illegal_insn;
3360
gen_movl_T1_reg(rd);
3361
#ifdef TARGET_SPARC64
4448
gen_movl_TN_reg(rd, cpu_val);
4449
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
3364
4452
} else if (xop >= 0x20 && xop < 0x24) {
3365
if (gen_trap_ifnofpu(dc))
4453
if (gen_trap_ifnofpu(dc, cpu_cond))
4455
save_state(dc, cpu_cond);
3368
4457
case 0x20: /* load fpreg */
3369
gen_op_check_align_T0_3();
3371
gen_op_store_FT0_fpr(rd);
4458
gen_address_mask(dc, cpu_addr);
4459
tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4460
tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
3373
case 0x21: /* load fsr */
3374
gen_op_check_align_T0_3();
4462
case 0x21: /* ldfsr, V9 ldxfsr */
4463
#ifdef TARGET_SPARC64
4464
gen_address_mask(dc, cpu_addr);
4466
tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4467
gen_helper_ldxfsr(cpu_tmp64);
4471
tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4472
gen_helper_ldfsr(cpu_tmp32);
3378
4476
case 0x22: /* load quad fpreg */
3379
#if defined(CONFIG_USER_ONLY)
3380
gen_op_check_align_T0_7();
3382
gen_op_store_QT0_fpr(QFPREG(rd));
4480
CHECK_FPU_FEATURE(dc, FLOAT128);
4481
r_const = tcg_const_i32(dc->mem_idx);
4482
gen_helper_ldqf(cpu_addr, r_const);
4483
tcg_temp_free_i32(r_const);
4484
gen_op_store_QT0_fpr(QFPREG(rd));
3387
4487
case 0x23: /* load double fpreg */
3388
gen_op_check_align_T0_7();
3390
gen_op_store_DT0_fpr(DFPREG(rd));
4491
r_const = tcg_const_i32(dc->mem_idx);
4492
gen_helper_lddf(cpu_addr, r_const);
4493
tcg_temp_free_i32(r_const);
4494
gen_op_store_DT0_fpr(DFPREG(rd));
3393
4498
goto illegal_insn;
3395
4500
} else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
3396
4501
xop == 0xe || xop == 0x1e) {
3397
gen_movl_reg_T1(rd);
4502
gen_movl_reg_TN(rd, cpu_val);
3400
gen_op_check_align_T0_3();
3407
gen_op_check_align_T0_1();
4504
case 0x4: /* store word */
4505
gen_address_mask(dc, cpu_addr);
4506
tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4508
case 0x5: /* store byte */
4509
gen_address_mask(dc, cpu_addr);
4510
tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4512
case 0x6: /* store halfword */
4513
gen_address_mask(dc, cpu_addr);
4514
tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4516
case 0x7: /* store double word */
3412
4518
goto illegal_insn;
3413
gen_op_check_align_T0_7();
3415
gen_movl_reg_T2(rd + 1);
4522
save_state(dc, cpu_cond);
4523
gen_address_mask(dc, cpu_addr);
4524
r_const = tcg_const_i32(7);
4525
gen_helper_check_align(cpu_addr, r_const); // XXX remove
4526
tcg_temp_free_i32(r_const);
4527
gen_movl_reg_TN(rd + 1, cpu_tmp0);
4528
tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4529
tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
3418
4532
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
3420
#ifndef TARGET_SPARC64
3423
if (!supervisor(dc))
3426
gen_op_check_align_T0_3();
3427
gen_st_asi(insn, 4);
3430
#ifndef TARGET_SPARC64
3433
if (!supervisor(dc))
3436
gen_st_asi(insn, 1);
3439
#ifndef TARGET_SPARC64
3442
if (!supervisor(dc))
3445
gen_op_check_align_T0_1();
3446
gen_st_asi(insn, 2);
4533
case 0x14: /* store word alternate */
4534
#ifndef TARGET_SPARC64
4537
if (!supervisor(dc))
4540
save_state(dc, cpu_cond);
4541
gen_st_asi(cpu_val, cpu_addr, insn, 4);
4543
case 0x15: /* store byte alternate */
4544
#ifndef TARGET_SPARC64
4547
if (!supervisor(dc))
4550
save_state(dc, cpu_cond);
4551
gen_st_asi(cpu_val, cpu_addr, insn, 1);
4553
case 0x16: /* store halfword alternate */
4554
#ifndef TARGET_SPARC64
4557
if (!supervisor(dc))
4560
save_state(dc, cpu_cond);
4561
gen_st_asi(cpu_val, cpu_addr, insn, 2);
4563
case 0x17: /* store double word alternate */
3449
4564
#ifndef TARGET_SPARC64
3451
4566
goto illegal_insn;
3711
4896
while (lj <= j)
3712
4897
gen_opc_instr_start[lj++] = 0;
3718
4901
gen_opc_jump_pc[0] = dc->jump_pc[0];
3719
4902
gen_opc_jump_pc[1] = dc->jump_pc[1];
3721
4904
tb->size = last_pc + 4 - pc_start;
4905
tb->icount = num_insns;
3723
4907
#ifdef DEBUG_DISAS
3724
if (loglevel & CPU_LOG_TB_IN_ASM) {
3725
fprintf(logfile, "--------------\n");
3726
fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
3727
target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
3728
fprintf(logfile, "\n");
3729
if (loglevel & CPU_LOG_TB_OP) {
3730
fprintf(logfile, "OP:\n");
3731
dump_ops(gen_opc_buf, gen_opparam_buf);
3732
fprintf(logfile, "\n");
3739
int gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
3741
return gen_intermediate_code_internal(tb, 0, env);
3744
int gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
3746
return gen_intermediate_code_internal(tb, 1, env);
3749
extern int ram_size;
3751
void cpu_reset(CPUSPARCState *env)
3756
env->regwptr = env->regbase + (env->cwp * 16);
3757
#if defined(CONFIG_USER_ONLY)
3758
env->user_mode_only = 1;
3759
#ifdef TARGET_SPARC64
3760
env->cleanwin = NWINDOWS - 2;
3761
env->cansave = NWINDOWS - 2;
3762
env->pstate = PS_RMO | PS_PEF | PS_IE;
3763
env->asi = 0x82; // Primary no-fault
3769
#ifdef TARGET_SPARC64
3770
env->pstate = PS_PRIV;
3771
env->hpstate = HS_PRIV;
3772
env->pc = 0x1fff0000000ULL;
3775
env->mmuregs[0] &= ~(MMU_E | MMU_NF);
3776
env->mmuregs[0] |= env->mmu_bm;
3778
env->npc = env->pc + 4;
3782
CPUSPARCState *cpu_sparc_init(const char *cpu_model)
3785
const sparc_def_t *def;
3787
def = cpu_sparc_find_by_name(cpu_model);
3791
env = qemu_mallocz(sizeof(CPUSPARCState));
3795
env->cpu_model_str = cpu_model;
3796
env->version = def->iu_version;
3797
env->fsr = def->fpu_version;
3798
#if !defined(TARGET_SPARC64)
3799
env->mmu_bm = def->mmu_bm;
3800
env->mmuregs[0] |= def->mmu_version;
3801
cpu_sparc_set_id(env, 0);
3808
void cpu_sparc_set_id(CPUSPARCState *env, unsigned int cpu)
3810
#if !defined(TARGET_SPARC64)
3811
env->mxccregs[7] = ((cpu + 8) & 0xf) << 24;
3815
static const sparc_def_t sparc_defs[] = {
3816
#ifdef TARGET_SPARC64
3818
.name = "Fujitsu Sparc64",
3819
.iu_version = ((0x04ULL << 48) | (0x02ULL << 32) | (0ULL << 24)
3820
| (MAXTL << 8) | (NWINDOWS - 1)),
3821
.fpu_version = 0x00000000,
3825
.name = "Fujitsu Sparc64 III",
3826
.iu_version = ((0x04ULL << 48) | (0x03ULL << 32) | (0ULL << 24)
3827
| (MAXTL << 8) | (NWINDOWS - 1)),
3828
.fpu_version = 0x00000000,
3832
.name = "Fujitsu Sparc64 IV",
3833
.iu_version = ((0x04ULL << 48) | (0x04ULL << 32) | (0ULL << 24)
3834
| (MAXTL << 8) | (NWINDOWS - 1)),
3835
.fpu_version = 0x00000000,
3839
.name = "Fujitsu Sparc64 V",
3840
.iu_version = ((0x04ULL << 48) | (0x05ULL << 32) | (0x51ULL << 24)
3841
| (MAXTL << 8) | (NWINDOWS - 1)),
3842
.fpu_version = 0x00000000,
3846
.name = "TI UltraSparc I",
3847
.iu_version = ((0x17ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
3848
| (MAXTL << 8) | (NWINDOWS - 1)),
3849
.fpu_version = 0x00000000,
3853
.name = "TI UltraSparc II",
3854
.iu_version = ((0x17ULL << 48) | (0x11ULL << 32) | (0x20ULL << 24)
3855
| (MAXTL << 8) | (NWINDOWS - 1)),
3856
.fpu_version = 0x00000000,
3860
.name = "TI UltraSparc IIi",
3861
.iu_version = ((0x17ULL << 48) | (0x12ULL << 32) | (0x91ULL << 24)
3862
| (MAXTL << 8) | (NWINDOWS - 1)),
3863
.fpu_version = 0x00000000,
3867
.name = "TI UltraSparc IIe",
3868
.iu_version = ((0x17ULL << 48) | (0x13ULL << 32) | (0x14ULL << 24)
3869
| (MAXTL << 8) | (NWINDOWS - 1)),
3870
.fpu_version = 0x00000000,
3874
.name = "Sun UltraSparc III",
3875
.iu_version = ((0x3eULL << 48) | (0x14ULL << 32) | (0x34ULL << 24)
3876
| (MAXTL << 8) | (NWINDOWS - 1)),
3877
.fpu_version = 0x00000000,
3881
.name = "Sun UltraSparc III Cu",
3882
.iu_version = ((0x3eULL << 48) | (0x15ULL << 32) | (0x41ULL << 24)
3883
| (MAXTL << 8) | (NWINDOWS - 1)),
3884
.fpu_version = 0x00000000,
3888
.name = "Sun UltraSparc IIIi",
3889
.iu_version = ((0x3eULL << 48) | (0x16ULL << 32) | (0x34ULL << 24)
3890
| (MAXTL << 8) | (NWINDOWS - 1)),
3891
.fpu_version = 0x00000000,
3895
.name = "Sun UltraSparc IV",
3896
.iu_version = ((0x3eULL << 48) | (0x18ULL << 32) | (0x31ULL << 24)
3897
| (MAXTL << 8) | (NWINDOWS - 1)),
3898
.fpu_version = 0x00000000,
3902
.name = "Sun UltraSparc IV+",
3903
.iu_version = ((0x3eULL << 48) | (0x19ULL << 32) | (0x22ULL << 24)
3904
| (MAXTL << 8) | (NWINDOWS - 1)),
3905
.fpu_version = 0x00000000,
3909
.name = "Sun UltraSparc IIIi+",
3910
.iu_version = ((0x3eULL << 48) | (0x22ULL << 32) | (0ULL << 24)
3911
| (MAXTL << 8) | (NWINDOWS - 1)),
3912
.fpu_version = 0x00000000,
3916
.name = "NEC UltraSparc I",
3917
.iu_version = ((0x22ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
3918
| (MAXTL << 8) | (NWINDOWS - 1)),
3919
.fpu_version = 0x00000000,
3924
.name = "Fujitsu MB86900",
3925
.iu_version = 0x00 << 24, /* Impl 0, ver 0 */
3926
.fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
3927
.mmu_version = 0x00 << 24, /* Impl 0, ver 0 */
3928
.mmu_bm = 0x00004000,
3931
.name = "Fujitsu MB86904",
3932
.iu_version = 0x04 << 24, /* Impl 0, ver 4 */
3933
.fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
3934
.mmu_version = 0x04 << 24, /* Impl 0, ver 4 */
3935
.mmu_bm = 0x00004000,
3938
.name = "Fujitsu MB86907",
3939
.iu_version = 0x05 << 24, /* Impl 0, ver 5 */
3940
.fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
3941
.mmu_version = 0x05 << 24, /* Impl 0, ver 5 */
3942
.mmu_bm = 0x00004000,
3945
.name = "LSI L64811",
3946
.iu_version = 0x10 << 24, /* Impl 1, ver 0 */
3947
.fpu_version = 1 << 17, /* FPU version 1 (LSI L64814) */
3948
.mmu_version = 0x10 << 24,
3949
.mmu_bm = 0x00004000,
3952
.name = "Cypress CY7C601",
3953
.iu_version = 0x11 << 24, /* Impl 1, ver 1 */
3954
.fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
3955
.mmu_version = 0x10 << 24,
3956
.mmu_bm = 0x00004000,
3959
.name = "Cypress CY7C611",
3960
.iu_version = 0x13 << 24, /* Impl 1, ver 3 */
3961
.fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
3962
.mmu_version = 0x10 << 24,
3963
.mmu_bm = 0x00004000,
3966
.name = "TI SuperSparc II",
3967
.iu_version = 0x40000000,
3968
.fpu_version = 0 << 17,
3969
.mmu_version = 0x04000000,
3970
.mmu_bm = 0x00002000,
3973
.name = "TI MicroSparc I",
3974
.iu_version = 0x41000000,
3975
.fpu_version = 4 << 17,
3976
.mmu_version = 0x41000000,
3977
.mmu_bm = 0x00004000,
3980
.name = "TI MicroSparc II",
3981
.iu_version = 0x42000000,
3982
.fpu_version = 4 << 17,
3983
.mmu_version = 0x02000000,
3984
.mmu_bm = 0x00004000,
3987
.name = "TI MicroSparc IIep",
3988
.iu_version = 0x42000000,
3989
.fpu_version = 4 << 17,
3990
.mmu_version = 0x04000000,
3991
.mmu_bm = 0x00004000,
3994
.name = "TI SuperSparc 51",
3995
.iu_version = 0x43000000,
3996
.fpu_version = 0 << 17,
3997
.mmu_version = 0x04000000,
3998
.mmu_bm = 0x00002000,
4001
.name = "TI SuperSparc 61",
4002
.iu_version = 0x44000000,
4003
.fpu_version = 0 << 17,
4004
.mmu_version = 0x04000000,
4005
.mmu_bm = 0x00002000,
4008
.name = "Ross RT625",
4009
.iu_version = 0x1e000000,
4010
.fpu_version = 1 << 17,
4011
.mmu_version = 0x1e000000,
4012
.mmu_bm = 0x00004000,
4015
.name = "Ross RT620",
4016
.iu_version = 0x1f000000,
4017
.fpu_version = 1 << 17,
4018
.mmu_version = 0x1f000000,
4019
.mmu_bm = 0x00004000,
4022
.name = "BIT B5010",
4023
.iu_version = 0x20000000,
4024
.fpu_version = 0 << 17, /* B5010/B5110/B5120/B5210 */
4025
.mmu_version = 0x20000000,
4026
.mmu_bm = 0x00004000,
4029
.name = "Matsushita MN10501",
4030
.iu_version = 0x50000000,
4031
.fpu_version = 0 << 17,
4032
.mmu_version = 0x50000000,
4033
.mmu_bm = 0x00004000,
4036
.name = "Weitek W8601",
4037
.iu_version = 0x90 << 24, /* Impl 9, ver 0 */
4038
.fpu_version = 3 << 17, /* FPU version 3 (Weitek WTL3170/2) */
4039
.mmu_version = 0x10 << 24,
4040
.mmu_bm = 0x00004000,
4044
.iu_version = 0xf2000000,
4045
.fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4046
.mmu_version = 0xf2000000,
4047
.mmu_bm = 0x00004000,
4051
.iu_version = 0xf3000000,
4052
.fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4053
.mmu_version = 0xf3000000,
4054
.mmu_bm = 0x00004000,
4059
static const sparc_def_t *cpu_sparc_find_by_name(const unsigned char *name)
4063
for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
4064
if (strcasecmp(name, sparc_defs[i].name) == 0) {
4065
return &sparc_defs[i];
4071
void sparc_cpu_list (FILE *f, int (*cpu_fprintf)(FILE *f, const char *fmt, ...))
4075
for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
4076
(*cpu_fprintf)(f, "Sparc %16s IU " TARGET_FMT_lx " FPU %08x MMU %08x\n",
4078
sparc_defs[i].iu_version,
4079
sparc_defs[i].fpu_version,
4080
sparc_defs[i].mmu_version);
4084
#define GET_FLAG(a,b) ((env->psr & a)?b:'-')
4086
void cpu_dump_state(CPUState *env, FILE *f,
4087
int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
4092
cpu_fprintf(f, "pc: " TARGET_FMT_lx " npc: " TARGET_FMT_lx "\n", env->pc, env->npc);
4093
cpu_fprintf(f, "General Registers:\n");
4094
for (i = 0; i < 4; i++)
4095
cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
4096
cpu_fprintf(f, "\n");
4098
cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
4099
cpu_fprintf(f, "\nCurrent Register Window:\n");
4100
for (x = 0; x < 3; x++) {
4101
for (i = 0; i < 4; i++)
4102
cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
4103
(x == 0 ? 'o' : (x == 1 ? 'l' : 'i')), i,
4104
env->regwptr[i + x * 8]);
4105
cpu_fprintf(f, "\n");
4107
cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
4108
(x == 0 ? 'o' : x == 1 ? 'l' : 'i'), i,
4109
env->regwptr[i + x * 8]);
4110
cpu_fprintf(f, "\n");
4112
cpu_fprintf(f, "\nFloating Point Registers:\n");
4113
for (i = 0; i < 32; i++) {
4115
cpu_fprintf(f, "%%f%02d:", i);
4116
cpu_fprintf(f, " %016lf", env->fpr[i]);
4118
cpu_fprintf(f, "\n");
4120
#ifdef TARGET_SPARC64
4121
cpu_fprintf(f, "pstate: 0x%08x ccr: 0x%02x asi: 0x%02x tl: %d fprs: %d\n",
4122
env->pstate, GET_CCR(env), env->asi, env->tl, env->fprs);
4123
cpu_fprintf(f, "cansave: %d canrestore: %d otherwin: %d wstate %d cleanwin %d cwp %d\n",
4124
env->cansave, env->canrestore, env->otherwin, env->wstate,
4125
env->cleanwin, NWINDOWS - 1 - env->cwp);
4127
cpu_fprintf(f, "psr: 0x%08x -> %c%c%c%c %c%c%c wim: 0x%08x\n", GET_PSR(env),
4128
GET_FLAG(PSR_ZERO, 'Z'), GET_FLAG(PSR_OVF, 'V'),
4129
GET_FLAG(PSR_NEG, 'N'), GET_FLAG(PSR_CARRY, 'C'),
4130
env->psrs?'S':'-', env->psrps?'P':'-',
4131
env->psret?'E':'-', env->wim);
4133
cpu_fprintf(f, "fsr: 0x%08x\n", GET_FSR32(env));
4136
#if defined(CONFIG_USER_ONLY)
4137
target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
4143
extern int get_physical_address (CPUState *env, target_phys_addr_t *physical, int *prot,
4144
int *access_index, target_ulong address, int rw,
4147
target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
4149
target_phys_addr_t phys_addr;
4150
int prot, access_index;
4152
if (get_physical_address(env, &phys_addr, &prot, &access_index, addr, 2, 0) != 0)
4153
if (get_physical_address(env, &phys_addr, &prot, &access_index, addr, 0, 0) != 0)
4155
if (cpu_get_physical_page_desc(phys_addr) == IO_MEM_UNASSIGNED)
4161
void helper_flush(target_ulong addr)
4164
tb_invalidate_page_range(addr, addr + 8);
4908
if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4909
qemu_log("--------------\n");
4910
qemu_log("IN: %s\n", lookup_symbol(pc_start));
4911
log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4917
void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4919
gen_intermediate_code_internal(tb, 0, env);
4922
void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4924
gen_intermediate_code_internal(tb, 1, env);
4927
void gen_intermediate_code_init(CPUSPARCState *env)
4931
static const char * const gregnames[8] = {
4932
NULL, // g0 not used
4941
static const char * const fregnames[64] = {
4942
"f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4943
"f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4944
"f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4945
"f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4946
"f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4947
"f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4948
"f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4949
"f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4952
/* init various static tables */
4956
cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4957
cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4958
offsetof(CPUState, regwptr),
4960
#ifdef TARGET_SPARC64
4961
cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4963
cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4965
cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4967
cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4969
cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4970
offsetof(CPUState, tick_cmpr),
4972
cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
4973
offsetof(CPUState, stick_cmpr),
4975
cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
4976
offsetof(CPUState, hstick_cmpr),
4978
cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
4980
cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
4982
cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
4984
cpu_ssr = tcg_global_mem_new(TCG_AREG0,
4985
offsetof(CPUState, ssr), "ssr");
4986
cpu_ver = tcg_global_mem_new(TCG_AREG0,
4987
offsetof(CPUState, version), "ver");
4988
cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
4989
offsetof(CPUState, softint),
4992
cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
4995
cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
4997
cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
4999
cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5000
offsetof(CPUState, cc_src2),
5002
cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5004
cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5006
cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5008
cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5010
cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5012
cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5013
#ifndef CONFIG_USER_ONLY
5014
cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5017
for (i = 1; i < 8; i++)
5018
cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5019
offsetof(CPUState, gregs[i]),
5021
for (i = 0; i < TARGET_FPREGS; i++)
5022
cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5023
offsetof(CPUState, fpr[i]),
5026
/* register helpers */
5028
#define GEN_HELPER 2
5033
void gen_pc_load(CPUState *env, TranslationBlock *tb,
5034
unsigned long searched_pc, int pc_pos, void *puc)
5037
env->pc = gen_opc_pc[pc_pos];
5038
npc = gen_opc_npc[pc_pos];
5040
/* dynamic NPC: already stored */
5041
} else if (npc == 2) {
5042
target_ulong t2 = (target_ulong)(unsigned long)puc;
5043
/* jump PC: use T2 and the jump targets of the translation */
5045
env->npc = gen_opc_jump_pc[0];
5047
env->npc = gen_opc_jump_pc[1];