2
* PowerPC emulation for qemu: main translation routines.
4
* Copyright (c) 2003-2007 Jocelyn Mayer
5
* Copyright (C) 2011 Freescale Semiconductor, Inc.
7
* This library is free software; you can redistribute it and/or
8
* modify it under the terms of the GNU Lesser General Public
9
* License as published by the Free Software Foundation; either
10
* version 2 of the License, or (at your option) any later version.
12
* This library is distributed in the hope that it will be useful,
13
* but WITHOUT ANY WARRANTY; without even the implied warranty of
14
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15
* Lesser General Public License for more details.
17
* You should have received a copy of the GNU Lesser General Public
18
* License along with this library; if not, see <http://www.gnu.org/licenses/>.
29
#include "qemu-common.h"
30
#include "host-utils.h"
36
#define CPU_SINGLE_STEP 0x1
37
#define CPU_BRANCH_STEP 0x2
38
#define GDBSTUB_SINGLE_STEP 0x4
40
/* Include definitions for instructions classes and implementations flags */
41
//#define PPC_DEBUG_DISAS
42
//#define DO_PPC_STATISTICS
44
#ifdef PPC_DEBUG_DISAS
45
# define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
47
# define LOG_DISAS(...) do { } while (0)
49
/*****************************************************************************/
50
/* Code translation helpers */
52
/* global register indexes */
53
static TCGv_ptr cpu_env;
54
static char cpu_reg_names[10*3 + 22*4 /* GPR */
55
#if !defined(TARGET_PPC64)
56
+ 10*4 + 22*5 /* SPE GPRh */
58
+ 10*4 + 22*5 /* FPR */
59
+ 2*(10*6 + 22*7) /* AVRh, AVRl */
61
static TCGv cpu_gpr[32];
62
#if !defined(TARGET_PPC64)
63
static TCGv cpu_gprh[32];
65
static TCGv_i64 cpu_fpr[32];
66
static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
67
static TCGv_i32 cpu_crf[8];
72
#if defined(TARGET_PPC64)
76
static TCGv cpu_reserve;
77
static TCGv_i32 cpu_fpscr;
78
static TCGv_i32 cpu_access_type;
80
#include "gen-icount.h"
82
void ppc_translate_init(void)
86
size_t cpu_reg_names_size;
87
static int done_init = 0;
92
cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
95
cpu_reg_names_size = sizeof(cpu_reg_names);
97
for (i = 0; i < 8; i++) {
98
snprintf(p, cpu_reg_names_size, "crf%d", i);
99
cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0,
100
offsetof(CPUState, crf[i]), p);
102
cpu_reg_names_size -= 5;
105
for (i = 0; i < 32; i++) {
106
snprintf(p, cpu_reg_names_size, "r%d", i);
107
cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
108
offsetof(CPUState, gpr[i]), p);
109
p += (i < 10) ? 3 : 4;
110
cpu_reg_names_size -= (i < 10) ? 3 : 4;
111
#if !defined(TARGET_PPC64)
112
snprintf(p, cpu_reg_names_size, "r%dH", i);
113
cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0,
114
offsetof(CPUState, gprh[i]), p);
115
p += (i < 10) ? 4 : 5;
116
cpu_reg_names_size -= (i < 10) ? 4 : 5;
119
snprintf(p, cpu_reg_names_size, "fp%d", i);
120
cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
121
offsetof(CPUState, fpr[i]), p);
122
p += (i < 10) ? 4 : 5;
123
cpu_reg_names_size -= (i < 10) ? 4 : 5;
125
snprintf(p, cpu_reg_names_size, "avr%dH", i);
126
#ifdef HOST_WORDS_BIGENDIAN
127
cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
128
offsetof(CPUState, avr[i].u64[0]), p);
130
cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
131
offsetof(CPUState, avr[i].u64[1]), p);
133
p += (i < 10) ? 6 : 7;
134
cpu_reg_names_size -= (i < 10) ? 6 : 7;
136
snprintf(p, cpu_reg_names_size, "avr%dL", i);
137
#ifdef HOST_WORDS_BIGENDIAN
138
cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
139
offsetof(CPUState, avr[i].u64[1]), p);
141
cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
142
offsetof(CPUState, avr[i].u64[0]), p);
144
p += (i < 10) ? 6 : 7;
145
cpu_reg_names_size -= (i < 10) ? 6 : 7;
148
cpu_nip = tcg_global_mem_new(TCG_AREG0,
149
offsetof(CPUState, nip), "nip");
151
cpu_msr = tcg_global_mem_new(TCG_AREG0,
152
offsetof(CPUState, msr), "msr");
154
cpu_ctr = tcg_global_mem_new(TCG_AREG0,
155
offsetof(CPUState, ctr), "ctr");
157
cpu_lr = tcg_global_mem_new(TCG_AREG0,
158
offsetof(CPUState, lr), "lr");
160
#if defined(TARGET_PPC64)
161
cpu_cfar = tcg_global_mem_new(TCG_AREG0,
162
offsetof(CPUState, cfar), "cfar");
165
cpu_xer = tcg_global_mem_new(TCG_AREG0,
166
offsetof(CPUState, xer), "xer");
168
cpu_reserve = tcg_global_mem_new(TCG_AREG0,
169
offsetof(CPUState, reserve_addr),
172
cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
173
offsetof(CPUState, fpscr), "fpscr");
175
cpu_access_type = tcg_global_mem_new_i32(TCG_AREG0,
176
offsetof(CPUState, access_type), "access_type");
178
/* register helpers */
185
/* internal defines */
186
typedef struct DisasContext {
187
struct TranslationBlock *tb;
191
/* Routine used to access memory */
194
/* Translation flags */
196
#if defined(TARGET_PPC64)
203
ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
204
int singlestep_enabled;
207
struct opc_handler_t {
208
/* invalid bits for instruction 1 (Rc(opcode) == 0) */
210
/* invalid bits for instruction 2 (Rc(opcode) == 1) */
212
/* instruction type */
214
/* extended instruction type */
217
void (*handler)(DisasContext *ctx);
218
#if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
221
#if defined(DO_PPC_STATISTICS)
226
static inline void gen_reset_fpstatus(void)
228
gen_helper_reset_fpstatus();
231
static inline void gen_compute_fprf(TCGv_i64 arg, int set_fprf, int set_rc)
233
TCGv_i32 t0 = tcg_temp_new_i32();
236
/* This case might be optimized later */
237
tcg_gen_movi_i32(t0, 1);
238
gen_helper_compute_fprf(t0, arg, t0);
239
if (unlikely(set_rc)) {
240
tcg_gen_mov_i32(cpu_crf[1], t0);
242
gen_helper_float_check_status();
243
} else if (unlikely(set_rc)) {
244
/* We always need to compute fpcc */
245
tcg_gen_movi_i32(t0, 0);
246
gen_helper_compute_fprf(t0, arg, t0);
247
tcg_gen_mov_i32(cpu_crf[1], t0);
250
tcg_temp_free_i32(t0);
253
static inline void gen_set_access_type(DisasContext *ctx, int access_type)
255
if (ctx->access_type != access_type) {
256
tcg_gen_movi_i32(cpu_access_type, access_type);
257
ctx->access_type = access_type;
261
static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
263
#if defined(TARGET_PPC64)
265
tcg_gen_movi_tl(cpu_nip, nip);
268
tcg_gen_movi_tl(cpu_nip, (uint32_t)nip);
271
static inline void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
274
if (ctx->exception == POWERPC_EXCP_NONE) {
275
gen_update_nip(ctx, ctx->nip);
277
t0 = tcg_const_i32(excp);
278
t1 = tcg_const_i32(error);
279
gen_helper_raise_exception_err(t0, t1);
280
tcg_temp_free_i32(t0);
281
tcg_temp_free_i32(t1);
282
ctx->exception = (excp);
285
static inline void gen_exception(DisasContext *ctx, uint32_t excp)
288
if (ctx->exception == POWERPC_EXCP_NONE) {
289
gen_update_nip(ctx, ctx->nip);
291
t0 = tcg_const_i32(excp);
292
gen_helper_raise_exception(t0);
293
tcg_temp_free_i32(t0);
294
ctx->exception = (excp);
297
static inline void gen_debug_exception(DisasContext *ctx)
301
if ((ctx->exception != POWERPC_EXCP_BRANCH) &&
302
(ctx->exception != POWERPC_EXCP_SYNC)) {
303
gen_update_nip(ctx, ctx->nip);
305
t0 = tcg_const_i32(EXCP_DEBUG);
306
gen_helper_raise_exception(t0);
307
tcg_temp_free_i32(t0);
310
static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
312
gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_INVAL | error);
315
/* Stop translation */
316
static inline void gen_stop_exception(DisasContext *ctx)
318
gen_update_nip(ctx, ctx->nip);
319
ctx->exception = POWERPC_EXCP_STOP;
322
/* No need to update nip here, as execution flow will change */
323
static inline void gen_sync_exception(DisasContext *ctx)
325
ctx->exception = POWERPC_EXCP_SYNC;
328
#define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
329
GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
331
#define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
332
GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
334
#define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
335
GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
337
#define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
338
GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
340
typedef struct opcode_t {
341
unsigned char opc1, opc2, opc3;
342
#if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
343
unsigned char pad[5];
345
unsigned char pad[1];
347
opc_handler_t handler;
351
/*****************************************************************************/
352
/*** Instruction decoding ***/
353
#define EXTRACT_HELPER(name, shift, nb) \
354
static inline uint32_t name(uint32_t opcode) \
356
return (opcode >> (shift)) & ((1 << (nb)) - 1); \
359
#define EXTRACT_SHELPER(name, shift, nb) \
360
static inline int32_t name(uint32_t opcode) \
362
return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
366
EXTRACT_HELPER(opc1, 26, 6);
368
EXTRACT_HELPER(opc2, 1, 5);
370
EXTRACT_HELPER(opc3, 6, 5);
371
/* Update Cr0 flags */
372
EXTRACT_HELPER(Rc, 0, 1);
374
EXTRACT_HELPER(rD, 21, 5);
376
EXTRACT_HELPER(rS, 21, 5);
378
EXTRACT_HELPER(rA, 16, 5);
380
EXTRACT_HELPER(rB, 11, 5);
382
EXTRACT_HELPER(rC, 6, 5);
384
EXTRACT_HELPER(crfD, 23, 3);
385
EXTRACT_HELPER(crfS, 18, 3);
386
EXTRACT_HELPER(crbD, 21, 5);
387
EXTRACT_HELPER(crbA, 16, 5);
388
EXTRACT_HELPER(crbB, 11, 5);
390
EXTRACT_HELPER(_SPR, 11, 10);
391
static inline uint32_t SPR(uint32_t opcode)
393
uint32_t sprn = _SPR(opcode);
395
return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
397
/*** Get constants ***/
398
EXTRACT_HELPER(IMM, 12, 8);
399
/* 16 bits signed immediate value */
400
EXTRACT_SHELPER(SIMM, 0, 16);
401
/* 16 bits unsigned immediate value */
402
EXTRACT_HELPER(UIMM, 0, 16);
403
/* 5 bits signed immediate value */
404
EXTRACT_HELPER(SIMM5, 16, 5);
405
/* 5 bits signed immediate value */
406
EXTRACT_HELPER(UIMM5, 16, 5);
408
EXTRACT_HELPER(NB, 11, 5);
410
EXTRACT_HELPER(SH, 11, 5);
411
/* Vector shift count */
412
EXTRACT_HELPER(VSH, 6, 4);
414
EXTRACT_HELPER(MB, 6, 5);
416
EXTRACT_HELPER(ME, 1, 5);
418
EXTRACT_HELPER(TO, 21, 5);
420
EXTRACT_HELPER(CRM, 12, 8);
421
EXTRACT_HELPER(FM, 17, 8);
422
EXTRACT_HELPER(SR, 16, 4);
423
EXTRACT_HELPER(FPIMM, 12, 4);
425
/*** Jump target decoding ***/
427
EXTRACT_SHELPER(d, 0, 16);
428
/* Immediate address */
429
static inline target_ulong LI(uint32_t opcode)
431
return (opcode >> 0) & 0x03FFFFFC;
434
static inline uint32_t BD(uint32_t opcode)
436
return (opcode >> 0) & 0xFFFC;
439
EXTRACT_HELPER(BO, 21, 5);
440
EXTRACT_HELPER(BI, 16, 5);
441
/* Absolute/relative address */
442
EXTRACT_HELPER(AA, 1, 1);
444
EXTRACT_HELPER(LK, 0, 1);
446
/* Create a mask between <start> and <end> bits */
447
static inline target_ulong MASK(uint32_t start, uint32_t end)
451
#if defined(TARGET_PPC64)
452
if (likely(start == 0)) {
453
ret = UINT64_MAX << (63 - end);
454
} else if (likely(end == 63)) {
455
ret = UINT64_MAX >> start;
458
if (likely(start == 0)) {
459
ret = UINT32_MAX << (31 - end);
460
} else if (likely(end == 31)) {
461
ret = UINT32_MAX >> start;
465
ret = (((target_ulong)(-1ULL)) >> (start)) ^
466
(((target_ulong)(-1ULL) >> (end)) >> 1);
467
if (unlikely(start > end))
474
/*****************************************************************************/
475
/* PowerPC instructions table */
477
#if defined(DO_PPC_STATISTICS)
478
#define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
488
.handler = &gen_##name, \
489
.oname = stringify(name), \
491
.oname = stringify(name), \
493
#define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
504
.handler = &gen_##name, \
505
.oname = stringify(name), \
507
.oname = stringify(name), \
509
#define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
519
.handler = &gen_##name, \
525
#define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
535
.handler = &gen_##name, \
537
.oname = stringify(name), \
539
#define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
550
.handler = &gen_##name, \
552
.oname = stringify(name), \
554
#define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
564
.handler = &gen_##name, \
570
/* SPR load/store helpers */
571
static inline void gen_load_spr(TCGv t, int reg)
573
tcg_gen_ld_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
576
static inline void gen_store_spr(int reg, TCGv t)
578
tcg_gen_st_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
581
/* Invalid instruction */
582
static void gen_invalid(DisasContext *ctx)
584
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
587
static opc_handler_t invalid_handler = {
588
.inval1 = 0xFFFFFFFF,
589
.inval2 = 0xFFFFFFFF,
592
.handler = gen_invalid,
595
/*** Integer comparison ***/
597
static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
601
tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_xer);
602
tcg_gen_shri_i32(cpu_crf[crf], cpu_crf[crf], XER_SO);
603
tcg_gen_andi_i32(cpu_crf[crf], cpu_crf[crf], 1);
605
l1 = gen_new_label();
606
l2 = gen_new_label();
607
l3 = gen_new_label();
609
tcg_gen_brcond_tl(TCG_COND_LT, arg0, arg1, l1);
610
tcg_gen_brcond_tl(TCG_COND_GT, arg0, arg1, l2);
612
tcg_gen_brcond_tl(TCG_COND_LTU, arg0, arg1, l1);
613
tcg_gen_brcond_tl(TCG_COND_GTU, arg0, arg1, l2);
615
tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_EQ);
618
tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_LT);
621
tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_GT);
625
static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
627
TCGv t0 = tcg_const_local_tl(arg1);
628
gen_op_cmp(arg0, t0, s, crf);
632
#if defined(TARGET_PPC64)
633
static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
636
t0 = tcg_temp_local_new();
637
t1 = tcg_temp_local_new();
639
tcg_gen_ext32s_tl(t0, arg0);
640
tcg_gen_ext32s_tl(t1, arg1);
642
tcg_gen_ext32u_tl(t0, arg0);
643
tcg_gen_ext32u_tl(t1, arg1);
645
gen_op_cmp(t0, t1, s, crf);
650
static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
652
TCGv t0 = tcg_const_local_tl(arg1);
653
gen_op_cmp32(arg0, t0, s, crf);
658
static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
660
#if defined(TARGET_PPC64)
662
gen_op_cmpi32(reg, 0, 1, 0);
665
gen_op_cmpi(reg, 0, 1, 0);
669
static void gen_cmp(DisasContext *ctx)
671
#if defined(TARGET_PPC64)
672
if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
673
gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
674
1, crfD(ctx->opcode));
677
gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
678
1, crfD(ctx->opcode));
682
static void gen_cmpi(DisasContext *ctx)
684
#if defined(TARGET_PPC64)
685
if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
686
gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
687
1, crfD(ctx->opcode));
690
gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
691
1, crfD(ctx->opcode));
695
static void gen_cmpl(DisasContext *ctx)
697
#if defined(TARGET_PPC64)
698
if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
699
gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
700
0, crfD(ctx->opcode));
703
gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
704
0, crfD(ctx->opcode));
708
static void gen_cmpli(DisasContext *ctx)
710
#if defined(TARGET_PPC64)
711
if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
712
gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
713
0, crfD(ctx->opcode));
716
gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
717
0, crfD(ctx->opcode));
720
/* isel (PowerPC 2.03 specification) */
721
static void gen_isel(DisasContext *ctx)
724
uint32_t bi = rC(ctx->opcode);
728
l1 = gen_new_label();
729
l2 = gen_new_label();
731
mask = 1 << (3 - (bi & 0x03));
732
t0 = tcg_temp_new_i32();
733
tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask);
734
tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
735
if (rA(ctx->opcode) == 0)
736
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
738
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
741
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
743
tcg_temp_free_i32(t0);
746
/*** Integer arithmetic ***/
748
static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
749
TCGv arg1, TCGv arg2, int sub)
754
l1 = gen_new_label();
755
/* Start with XER OV disabled, the most likely case */
756
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
757
t0 = tcg_temp_local_new();
758
tcg_gen_xor_tl(t0, arg0, arg1);
759
#if defined(TARGET_PPC64)
761
tcg_gen_ext32s_tl(t0, t0);
764
tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
766
tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
767
tcg_gen_xor_tl(t0, arg1, arg2);
768
#if defined(TARGET_PPC64)
770
tcg_gen_ext32s_tl(t0, t0);
773
tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
775
tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
776
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
781
static inline void gen_op_arith_compute_ca(DisasContext *ctx, TCGv arg1,
784
int l1 = gen_new_label();
786
#if defined(TARGET_PPC64)
787
if (!(ctx->sf_mode)) {
792
tcg_gen_ext32u_tl(t0, arg1);
793
tcg_gen_ext32u_tl(t1, arg2);
795
tcg_gen_brcond_tl(TCG_COND_GTU, t0, t1, l1);
797
tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
799
tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
807
tcg_gen_brcond_tl(TCG_COND_GTU, arg1, arg2, l1);
809
tcg_gen_brcond_tl(TCG_COND_GEU, arg1, arg2, l1);
811
tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
816
/* Common add function */
817
static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
818
TCGv arg2, int add_ca, int compute_ca,
823
if ((!compute_ca && !compute_ov) ||
824
(!TCGV_EQUAL(ret,arg1) && !TCGV_EQUAL(ret, arg2))) {
827
t0 = tcg_temp_local_new();
831
t1 = tcg_temp_local_new();
832
tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
833
tcg_gen_shri_tl(t1, t1, XER_CA);
838
if (compute_ca && compute_ov) {
839
/* Start with XER CA and OV disabled, the most likely case */
840
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
841
} else if (compute_ca) {
842
/* Start with XER CA disabled, the most likely case */
843
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
844
} else if (compute_ov) {
845
/* Start with XER OV disabled, the most likely case */
846
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
849
tcg_gen_add_tl(t0, arg1, arg2);
852
gen_op_arith_compute_ca(ctx, t0, arg1, 0);
855
tcg_gen_add_tl(t0, t0, t1);
856
gen_op_arith_compute_ca(ctx, t0, t1, 0);
860
gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
863
if (unlikely(Rc(ctx->opcode) != 0))
864
gen_set_Rc0(ctx, t0);
866
if (!TCGV_EQUAL(t0, ret)) {
867
tcg_gen_mov_tl(ret, t0);
871
/* Add functions with two operands */
872
#define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
873
static void glue(gen_, name)(DisasContext *ctx) \
875
gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
876
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
877
add_ca, compute_ca, compute_ov); \
879
/* Add functions with one operand and one immediate */
880
#define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
881
add_ca, compute_ca, compute_ov) \
882
static void glue(gen_, name)(DisasContext *ctx) \
884
TCGv t0 = tcg_const_local_tl(const_val); \
885
gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
886
cpu_gpr[rA(ctx->opcode)], t0, \
887
add_ca, compute_ca, compute_ov); \
891
/* add add. addo addo. */
892
GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
893
GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
894
/* addc addc. addco addco. */
895
GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
896
GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
897
/* adde adde. addeo addeo. */
898
GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
899
GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
900
/* addme addme. addmeo addmeo. */
901
GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
902
GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
903
/* addze addze. addzeo addzeo.*/
904
GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
905
GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
907
static void gen_addi(DisasContext *ctx)
909
target_long simm = SIMM(ctx->opcode);
911
if (rA(ctx->opcode) == 0) {
913
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
915
tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm);
919
static inline void gen_op_addic(DisasContext *ctx, TCGv ret, TCGv arg1,
922
target_long simm = SIMM(ctx->opcode);
924
/* Start with XER CA and OV disabled, the most likely case */
925
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
927
if (likely(simm != 0)) {
928
TCGv t0 = tcg_temp_local_new();
929
tcg_gen_addi_tl(t0, arg1, simm);
930
gen_op_arith_compute_ca(ctx, t0, arg1, 0);
931
tcg_gen_mov_tl(ret, t0);
934
tcg_gen_mov_tl(ret, arg1);
937
gen_set_Rc0(ctx, ret);
941
static void gen_addic(DisasContext *ctx)
943
gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
946
static void gen_addic_(DisasContext *ctx)
948
gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
952
static void gen_addis(DisasContext *ctx)
954
target_long simm = SIMM(ctx->opcode);
956
if (rA(ctx->opcode) == 0) {
958
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
960
tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm << 16);
964
static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
965
TCGv arg2, int sign, int compute_ov)
967
int l1 = gen_new_label();
968
int l2 = gen_new_label();
969
TCGv_i32 t0 = tcg_temp_local_new_i32();
970
TCGv_i32 t1 = tcg_temp_local_new_i32();
972
tcg_gen_trunc_tl_i32(t0, arg1);
973
tcg_gen_trunc_tl_i32(t1, arg2);
974
tcg_gen_brcondi_i32(TCG_COND_EQ, t1, 0, l1);
976
int l3 = gen_new_label();
977
tcg_gen_brcondi_i32(TCG_COND_NE, t1, -1, l3);
978
tcg_gen_brcondi_i32(TCG_COND_EQ, t0, INT32_MIN, l1);
980
tcg_gen_div_i32(t0, t0, t1);
982
tcg_gen_divu_i32(t0, t0, t1);
985
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
990
tcg_gen_sari_i32(t0, t0, 31);
992
tcg_gen_movi_i32(t0, 0);
995
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
998
tcg_gen_extu_i32_tl(ret, t0);
999
tcg_temp_free_i32(t0);
1000
tcg_temp_free_i32(t1);
1001
if (unlikely(Rc(ctx->opcode) != 0))
1002
gen_set_Rc0(ctx, ret);
1005
#define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
1006
static void glue(gen_, name)(DisasContext *ctx) \
1008
gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
1009
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1010
sign, compute_ov); \
1012
/* divwu divwu. divwuo divwuo. */
1013
GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1014
GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1015
/* divw divw. divwo divwo. */
1016
GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1017
GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1018
#if defined(TARGET_PPC64)
1019
static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
1020
TCGv arg2, int sign, int compute_ov)
1022
int l1 = gen_new_label();
1023
int l2 = gen_new_label();
1025
tcg_gen_brcondi_i64(TCG_COND_EQ, arg2, 0, l1);
1027
int l3 = gen_new_label();
1028
tcg_gen_brcondi_i64(TCG_COND_NE, arg2, -1, l3);
1029
tcg_gen_brcondi_i64(TCG_COND_EQ, arg1, INT64_MIN, l1);
1031
tcg_gen_div_i64(ret, arg1, arg2);
1033
tcg_gen_divu_i64(ret, arg1, arg2);
1036
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1041
tcg_gen_sari_i64(ret, arg1, 63);
1043
tcg_gen_movi_i64(ret, 0);
1046
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1049
if (unlikely(Rc(ctx->opcode) != 0))
1050
gen_set_Rc0(ctx, ret);
1052
#define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1053
static void glue(gen_, name)(DisasContext *ctx) \
1055
gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1056
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1057
sign, compute_ov); \
1059
/* divwu divwu. divwuo divwuo. */
1060
GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1061
GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1062
/* divw divw. divwo divwo. */
1063
GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1064
GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1068
static void gen_mulhw(DisasContext *ctx)
1072
t0 = tcg_temp_new_i64();
1073
t1 = tcg_temp_new_i64();
1074
#if defined(TARGET_PPC64)
1075
tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1076
tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1077
tcg_gen_mul_i64(t0, t0, t1);
1078
tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1080
tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1081
tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1082
tcg_gen_mul_i64(t0, t0, t1);
1083
tcg_gen_shri_i64(t0, t0, 32);
1084
tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1086
tcg_temp_free_i64(t0);
1087
tcg_temp_free_i64(t1);
1088
if (unlikely(Rc(ctx->opcode) != 0))
1089
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1092
/* mulhwu mulhwu. */
1093
static void gen_mulhwu(DisasContext *ctx)
1097
t0 = tcg_temp_new_i64();
1098
t1 = tcg_temp_new_i64();
1099
#if defined(TARGET_PPC64)
1100
tcg_gen_ext32u_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1101
tcg_gen_ext32u_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1102
tcg_gen_mul_i64(t0, t0, t1);
1103
tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1105
tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1106
tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1107
tcg_gen_mul_i64(t0, t0, t1);
1108
tcg_gen_shri_i64(t0, t0, 32);
1109
tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1111
tcg_temp_free_i64(t0);
1112
tcg_temp_free_i64(t1);
1113
if (unlikely(Rc(ctx->opcode) != 0))
1114
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1118
static void gen_mullw(DisasContext *ctx)
1120
tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1121
cpu_gpr[rB(ctx->opcode)]);
1122
tcg_gen_ext32s_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)]);
1123
if (unlikely(Rc(ctx->opcode) != 0))
1124
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1127
/* mullwo mullwo. */
1128
static void gen_mullwo(DisasContext *ctx)
1133
t0 = tcg_temp_new_i64();
1134
t1 = tcg_temp_new_i64();
1135
l1 = gen_new_label();
1136
/* Start with XER OV disabled, the most likely case */
1137
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1138
#if defined(TARGET_PPC64)
1139
tcg_gen_ext32s_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1140
tcg_gen_ext32s_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1142
tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1143
tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1145
tcg_gen_mul_i64(t0, t0, t1);
1146
#if defined(TARGET_PPC64)
1147
tcg_gen_ext32s_i64(cpu_gpr[rD(ctx->opcode)], t0);
1148
tcg_gen_brcond_i64(TCG_COND_EQ, t0, cpu_gpr[rD(ctx->opcode)], l1);
1150
tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1151
tcg_gen_ext32s_i64(t1, t0);
1152
tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
1154
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1156
tcg_temp_free_i64(t0);
1157
tcg_temp_free_i64(t1);
1158
if (unlikely(Rc(ctx->opcode) != 0))
1159
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1163
static void gen_mulli(DisasContext *ctx)
1165
tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1168
#if defined(TARGET_PPC64)
1169
#define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
1170
static void glue(gen_, name)(DisasContext *ctx) \
1172
gen_helper_##name (cpu_gpr[rD(ctx->opcode)], \
1173
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
1174
if (unlikely(Rc(ctx->opcode) != 0)) \
1175
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
1178
GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00);
1179
/* mulhdu mulhdu. */
1180
GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02);
1183
static void gen_mulld(DisasContext *ctx)
1185
tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1186
cpu_gpr[rB(ctx->opcode)]);
1187
if (unlikely(Rc(ctx->opcode) != 0))
1188
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1190
/* mulldo mulldo. */
1191
GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17);
1194
/* neg neg. nego nego. */
1195
static inline void gen_op_arith_neg(DisasContext *ctx, TCGv ret, TCGv arg1,
1198
int l1 = gen_new_label();
1199
int l2 = gen_new_label();
1200
TCGv t0 = tcg_temp_local_new();
1201
#if defined(TARGET_PPC64)
1203
tcg_gen_mov_tl(t0, arg1);
1204
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT64_MIN, l1);
1208
tcg_gen_ext32s_tl(t0, arg1);
1209
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT32_MIN, l1);
1211
tcg_gen_neg_tl(ret, arg1);
1213
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1217
tcg_gen_mov_tl(ret, t0);
1219
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1223
if (unlikely(Rc(ctx->opcode) != 0))
1224
gen_set_Rc0(ctx, ret);
1227
static void gen_neg(DisasContext *ctx)
1229
gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
1232
static void gen_nego(DisasContext *ctx)
1234
gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
1237
/* Common subf function */
1238
static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1239
TCGv arg2, int add_ca, int compute_ca,
1244
if ((!compute_ca && !compute_ov) ||
1245
(!TCGV_EQUAL(ret, arg1) && !TCGV_EQUAL(ret, arg2))) {
1248
t0 = tcg_temp_local_new();
1252
t1 = tcg_temp_local_new();
1253
tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
1254
tcg_gen_shri_tl(t1, t1, XER_CA);
1259
if (compute_ca && compute_ov) {
1260
/* Start with XER CA and OV disabled, the most likely case */
1261
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
1262
} else if (compute_ca) {
1263
/* Start with XER CA disabled, the most likely case */
1264
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1265
} else if (compute_ov) {
1266
/* Start with XER OV disabled, the most likely case */
1267
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1271
tcg_gen_not_tl(t0, arg1);
1272
tcg_gen_add_tl(t0, t0, arg2);
1273
gen_op_arith_compute_ca(ctx, t0, arg2, 0);
1274
tcg_gen_add_tl(t0, t0, t1);
1275
gen_op_arith_compute_ca(ctx, t0, t1, 0);
1278
tcg_gen_sub_tl(t0, arg2, arg1);
1280
gen_op_arith_compute_ca(ctx, t0, arg2, 1);
1284
gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1287
if (unlikely(Rc(ctx->opcode) != 0))
1288
gen_set_Rc0(ctx, t0);
1290
if (!TCGV_EQUAL(t0, ret)) {
1291
tcg_gen_mov_tl(ret, t0);
1295
/* Sub functions with Two operands functions */
1296
#define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1297
static void glue(gen_, name)(DisasContext *ctx) \
1299
gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1300
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1301
add_ca, compute_ca, compute_ov); \
1303
/* Sub functions with one operand and one immediate */
1304
#define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1305
add_ca, compute_ca, compute_ov) \
1306
static void glue(gen_, name)(DisasContext *ctx) \
1308
TCGv t0 = tcg_const_local_tl(const_val); \
1309
gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1310
cpu_gpr[rA(ctx->opcode)], t0, \
1311
add_ca, compute_ca, compute_ov); \
1312
tcg_temp_free(t0); \
1314
/* subf subf. subfo subfo. */
1315
GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
1316
GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
1317
/* subfc subfc. subfco subfco. */
1318
GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
1319
GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
1320
/* subfe subfe. subfeo subfo. */
1321
GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
1322
GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
1323
/* subfme subfme. subfmeo subfmeo. */
1324
GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
1325
GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
1326
/* subfze subfze. subfzeo subfzeo.*/
1327
GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
1328
GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
1331
static void gen_subfic(DisasContext *ctx)
1333
/* Start with XER CA and OV disabled, the most likely case */
1334
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1335
TCGv t0 = tcg_temp_local_new();
1336
TCGv t1 = tcg_const_local_tl(SIMM(ctx->opcode));
1337
tcg_gen_sub_tl(t0, t1, cpu_gpr[rA(ctx->opcode)]);
1338
gen_op_arith_compute_ca(ctx, t0, t1, 1);
1340
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
1344
/*** Integer logical ***/
1345
#define GEN_LOGICAL2(name, tcg_op, opc, type) \
1346
static void glue(gen_, name)(DisasContext *ctx) \
1348
tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1349
cpu_gpr[rB(ctx->opcode)]); \
1350
if (unlikely(Rc(ctx->opcode) != 0)) \
1351
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1354
#define GEN_LOGICAL1(name, tcg_op, opc, type) \
1355
static void glue(gen_, name)(DisasContext *ctx) \
1357
tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1358
if (unlikely(Rc(ctx->opcode) != 0)) \
1359
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1363
GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
1365
GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
1368
static void gen_andi_(DisasContext *ctx)
1370
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode));
1371
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1375
static void gen_andis_(DisasContext *ctx)
1377
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16);
1378
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1382
static void gen_cntlzw(DisasContext *ctx)
1384
gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1385
if (unlikely(Rc(ctx->opcode) != 0))
1386
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1389
GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
1390
/* extsb & extsb. */
1391
GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
1392
/* extsh & extsh. */
1393
GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
1395
GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
1397
GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
1400
static void gen_or(DisasContext *ctx)
1404
rs = rS(ctx->opcode);
1405
ra = rA(ctx->opcode);
1406
rb = rB(ctx->opcode);
1407
/* Optimisation for mr. ri case */
1408
if (rs != ra || rs != rb) {
1410
tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
1412
tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
1413
if (unlikely(Rc(ctx->opcode) != 0))
1414
gen_set_Rc0(ctx, cpu_gpr[ra]);
1415
} else if (unlikely(Rc(ctx->opcode) != 0)) {
1416
gen_set_Rc0(ctx, cpu_gpr[rs]);
1417
#if defined(TARGET_PPC64)
1423
/* Set process priority to low */
1427
/* Set process priority to medium-low */
1431
/* Set process priority to normal */
1434
#if !defined(CONFIG_USER_ONLY)
1436
if (ctx->mem_idx > 0) {
1437
/* Set process priority to very low */
1442
if (ctx->mem_idx > 0) {
1443
/* Set process priority to medium-hight */
1448
if (ctx->mem_idx > 0) {
1449
/* Set process priority to high */
1454
if (ctx->mem_idx > 1) {
1455
/* Set process priority to very high */
1465
TCGv t0 = tcg_temp_new();
1466
gen_load_spr(t0, SPR_PPR);
1467
tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
1468
tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
1469
gen_store_spr(SPR_PPR, t0);
1476
GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
1479
static void gen_xor(DisasContext *ctx)
1481
/* Optimisation for "set to zero" case */
1482
if (rS(ctx->opcode) != rB(ctx->opcode))
1483
tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1485
tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1486
if (unlikely(Rc(ctx->opcode) != 0))
1487
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1491
static void gen_ori(DisasContext *ctx)
1493
target_ulong uimm = UIMM(ctx->opcode);
1495
if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1497
/* XXX: should handle special NOPs for POWER series */
1500
tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1504
static void gen_oris(DisasContext *ctx)
1506
target_ulong uimm = UIMM(ctx->opcode);
1508
if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1512
tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1516
static void gen_xori(DisasContext *ctx)
1518
target_ulong uimm = UIMM(ctx->opcode);
1520
if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1524
tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1528
static void gen_xoris(DisasContext *ctx)
1530
target_ulong uimm = UIMM(ctx->opcode);
1532
if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1536
tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1539
/* popcntb : PowerPC 2.03 specification */
1540
static void gen_popcntb(DisasContext *ctx)
1542
gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1545
static void gen_popcntw(DisasContext *ctx)
1547
gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1550
#if defined(TARGET_PPC64)
1551
/* popcntd: PowerPC 2.06 specification */
1552
static void gen_popcntd(DisasContext *ctx)
1554
gen_helper_popcntd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1558
#if defined(TARGET_PPC64)
1559
/* extsw & extsw. */
1560
GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
1563
static void gen_cntlzd(DisasContext *ctx)
1565
gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1566
if (unlikely(Rc(ctx->opcode) != 0))
1567
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1571
/*** Integer rotate ***/
1573
/* rlwimi & rlwimi. */
1574
static void gen_rlwimi(DisasContext *ctx)
1576
uint32_t mb, me, sh;
1578
mb = MB(ctx->opcode);
1579
me = ME(ctx->opcode);
1580
sh = SH(ctx->opcode);
1581
if (likely(sh == 0 && mb == 0 && me == 31)) {
1582
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1586
TCGv t0 = tcg_temp_new();
1587
#if defined(TARGET_PPC64)
1588
TCGv_i32 t2 = tcg_temp_new_i32();
1589
tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]);
1590
tcg_gen_rotli_i32(t2, t2, sh);
1591
tcg_gen_extu_i32_i64(t0, t2);
1592
tcg_temp_free_i32(t2);
1594
tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1596
#if defined(TARGET_PPC64)
1600
mask = MASK(mb, me);
1601
t1 = tcg_temp_new();
1602
tcg_gen_andi_tl(t0, t0, mask);
1603
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1604
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1608
if (unlikely(Rc(ctx->opcode) != 0))
1609
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1612
/* rlwinm & rlwinm. */
1613
static void gen_rlwinm(DisasContext *ctx)
1615
uint32_t mb, me, sh;
1617
sh = SH(ctx->opcode);
1618
mb = MB(ctx->opcode);
1619
me = ME(ctx->opcode);
1621
if (likely(mb == 0 && me == (31 - sh))) {
1622
if (likely(sh == 0)) {
1623
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1625
TCGv t0 = tcg_temp_new();
1626
tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1627
tcg_gen_shli_tl(t0, t0, sh);
1628
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1631
} else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) {
1632
TCGv t0 = tcg_temp_new();
1633
tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1634
tcg_gen_shri_tl(t0, t0, mb);
1635
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1638
TCGv t0 = tcg_temp_new();
1639
#if defined(TARGET_PPC64)
1640
TCGv_i32 t1 = tcg_temp_new_i32();
1641
tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1642
tcg_gen_rotli_i32(t1, t1, sh);
1643
tcg_gen_extu_i32_i64(t0, t1);
1644
tcg_temp_free_i32(t1);
1646
tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1648
#if defined(TARGET_PPC64)
1652
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1655
if (unlikely(Rc(ctx->opcode) != 0))
1656
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1659
/* rlwnm & rlwnm. */
1660
static void gen_rlwnm(DisasContext *ctx)
1664
#if defined(TARGET_PPC64)
1668
mb = MB(ctx->opcode);
1669
me = ME(ctx->opcode);
1670
t0 = tcg_temp_new();
1671
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f);
1672
#if defined(TARGET_PPC64)
1673
t1 = tcg_temp_new_i32();
1674
t2 = tcg_temp_new_i32();
1675
tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1676
tcg_gen_trunc_i64_i32(t2, t0);
1677
tcg_gen_rotl_i32(t1, t1, t2);
1678
tcg_gen_extu_i32_i64(t0, t1);
1679
tcg_temp_free_i32(t1);
1680
tcg_temp_free_i32(t2);
1682
tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0);
1684
if (unlikely(mb != 0 || me != 31)) {
1685
#if defined(TARGET_PPC64)
1689
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1691
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1694
if (unlikely(Rc(ctx->opcode) != 0))
1695
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1698
#if defined(TARGET_PPC64)
1699
#define GEN_PPC64_R2(name, opc1, opc2) \
1700
static void glue(gen_, name##0)(DisasContext *ctx) \
1702
gen_##name(ctx, 0); \
1705
static void glue(gen_, name##1)(DisasContext *ctx) \
1707
gen_##name(ctx, 1); \
1709
#define GEN_PPC64_R4(name, opc1, opc2) \
1710
static void glue(gen_, name##0)(DisasContext *ctx) \
1712
gen_##name(ctx, 0, 0); \
1715
static void glue(gen_, name##1)(DisasContext *ctx) \
1717
gen_##name(ctx, 0, 1); \
1720
static void glue(gen_, name##2)(DisasContext *ctx) \
1722
gen_##name(ctx, 1, 0); \
1725
static void glue(gen_, name##3)(DisasContext *ctx) \
1727
gen_##name(ctx, 1, 1); \
1730
static inline void gen_rldinm(DisasContext *ctx, uint32_t mb, uint32_t me,
1733
if (likely(sh != 0 && mb == 0 && me == (63 - sh))) {
1734
tcg_gen_shli_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1735
} else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) {
1736
tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb);
1738
TCGv t0 = tcg_temp_new();
1739
tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1740
if (likely(mb == 0 && me == 63)) {
1741
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1743
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1747
if (unlikely(Rc(ctx->opcode) != 0))
1748
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1750
/* rldicl - rldicl. */
1751
static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
1755
sh = SH(ctx->opcode) | (shn << 5);
1756
mb = MB(ctx->opcode) | (mbn << 5);
1757
gen_rldinm(ctx, mb, 63, sh);
1759
GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1760
/* rldicr - rldicr. */
1761
static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
1765
sh = SH(ctx->opcode) | (shn << 5);
1766
me = MB(ctx->opcode) | (men << 5);
1767
gen_rldinm(ctx, 0, me, sh);
1769
GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1770
/* rldic - rldic. */
1771
static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
1775
sh = SH(ctx->opcode) | (shn << 5);
1776
mb = MB(ctx->opcode) | (mbn << 5);
1777
gen_rldinm(ctx, mb, 63 - sh, sh);
1779
GEN_PPC64_R4(rldic, 0x1E, 0x04);
1781
static inline void gen_rldnm(DisasContext *ctx, uint32_t mb, uint32_t me)
1785
mb = MB(ctx->opcode);
1786
me = ME(ctx->opcode);
1787
t0 = tcg_temp_new();
1788
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1789
tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1790
if (unlikely(mb != 0 || me != 63)) {
1791
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1793
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1796
if (unlikely(Rc(ctx->opcode) != 0))
1797
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1800
/* rldcl - rldcl. */
1801
static inline void gen_rldcl(DisasContext *ctx, int mbn)
1805
mb = MB(ctx->opcode) | (mbn << 5);
1806
gen_rldnm(ctx, mb, 63);
1808
GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1809
/* rldcr - rldcr. */
1810
static inline void gen_rldcr(DisasContext *ctx, int men)
1814
me = MB(ctx->opcode) | (men << 5);
1815
gen_rldnm(ctx, 0, me);
1817
GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1818
/* rldimi - rldimi. */
1819
static inline void gen_rldimi(DisasContext *ctx, int mbn, int shn)
1821
uint32_t sh, mb, me;
1823
sh = SH(ctx->opcode) | (shn << 5);
1824
mb = MB(ctx->opcode) | (mbn << 5);
1826
if (unlikely(sh == 0 && mb == 0)) {
1827
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1832
t0 = tcg_temp_new();
1833
tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1834
t1 = tcg_temp_new();
1835
mask = MASK(mb, me);
1836
tcg_gen_andi_tl(t0, t0, mask);
1837
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1838
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1842
if (unlikely(Rc(ctx->opcode) != 0))
1843
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1845
GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1848
/*** Integer shift ***/
1851
static void gen_slw(DisasContext *ctx)
1855
t0 = tcg_temp_new();
1856
/* AND rS with a mask that is 0 when rB >= 0x20 */
1857
#if defined(TARGET_PPC64)
1858
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1859
tcg_gen_sari_tl(t0, t0, 0x3f);
1861
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1862
tcg_gen_sari_tl(t0, t0, 0x1f);
1864
tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1865
t1 = tcg_temp_new();
1866
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1867
tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1870
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
1871
if (unlikely(Rc(ctx->opcode) != 0))
1872
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1876
static void gen_sraw(DisasContext *ctx)
1878
gen_helper_sraw(cpu_gpr[rA(ctx->opcode)],
1879
cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1880
if (unlikely(Rc(ctx->opcode) != 0))
1881
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1884
/* srawi & srawi. */
1885
static void gen_srawi(DisasContext *ctx)
1887
int sh = SH(ctx->opcode);
1891
l1 = gen_new_label();
1892
l2 = gen_new_label();
1893
t0 = tcg_temp_local_new();
1894
tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1895
tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
1896
tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
1897
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
1898
tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
1901
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1903
tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1904
tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], t0, sh);
1907
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1908
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1910
if (unlikely(Rc(ctx->opcode) != 0))
1911
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1915
static void gen_srw(DisasContext *ctx)
1919
t0 = tcg_temp_new();
1920
/* AND rS with a mask that is 0 when rB >= 0x20 */
1921
#if defined(TARGET_PPC64)
1922
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1923
tcg_gen_sari_tl(t0, t0, 0x3f);
1925
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1926
tcg_gen_sari_tl(t0, t0, 0x1f);
1928
tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1929
tcg_gen_ext32u_tl(t0, t0);
1930
t1 = tcg_temp_new();
1931
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1932
tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1935
if (unlikely(Rc(ctx->opcode) != 0))
1936
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1939
#if defined(TARGET_PPC64)
1941
static void gen_sld(DisasContext *ctx)
1945
t0 = tcg_temp_new();
1946
/* AND rS with a mask that is 0 when rB >= 0x40 */
1947
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1948
tcg_gen_sari_tl(t0, t0, 0x3f);
1949
tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1950
t1 = tcg_temp_new();
1951
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1952
tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1955
if (unlikely(Rc(ctx->opcode) != 0))
1956
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1960
static void gen_srad(DisasContext *ctx)
1962
gen_helper_srad(cpu_gpr[rA(ctx->opcode)],
1963
cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1964
if (unlikely(Rc(ctx->opcode) != 0))
1965
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1967
/* sradi & sradi. */
1968
static inline void gen_sradi(DisasContext *ctx, int n)
1970
int sh = SH(ctx->opcode) + (n << 5);
1974
l1 = gen_new_label();
1975
l2 = gen_new_label();
1976
t0 = tcg_temp_local_new();
1977
tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
1978
tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
1979
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
1980
tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
1983
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1986
tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1988
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1989
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1991
if (unlikely(Rc(ctx->opcode) != 0))
1992
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1995
static void gen_sradi0(DisasContext *ctx)
2000
static void gen_sradi1(DisasContext *ctx)
2006
static void gen_srd(DisasContext *ctx)
2010
t0 = tcg_temp_new();
2011
/* AND rS with a mask that is 0 when rB >= 0x40 */
2012
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2013
tcg_gen_sari_tl(t0, t0, 0x3f);
2014
tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2015
t1 = tcg_temp_new();
2016
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2017
tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2020
if (unlikely(Rc(ctx->opcode) != 0))
2021
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2025
/*** Floating-Point arithmetic ***/
2026
#define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
2027
static void gen_f##name(DisasContext *ctx) \
2029
if (unlikely(!ctx->fpu_enabled)) { \
2030
gen_exception(ctx, POWERPC_EXCP_FPU); \
2033
/* NIP cannot be restored if the memory exception comes from an helper */ \
2034
gen_update_nip(ctx, ctx->nip - 4); \
2035
gen_reset_fpstatus(); \
2036
gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2037
cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2039
gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2041
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], set_fprf, \
2042
Rc(ctx->opcode) != 0); \
2045
#define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
2046
_GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
2047
_GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
2049
#define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2050
static void gen_f##name(DisasContext *ctx) \
2052
if (unlikely(!ctx->fpu_enabled)) { \
2053
gen_exception(ctx, POWERPC_EXCP_FPU); \
2056
/* NIP cannot be restored if the memory exception comes from an helper */ \
2057
gen_update_nip(ctx, ctx->nip - 4); \
2058
gen_reset_fpstatus(); \
2059
gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2060
cpu_fpr[rB(ctx->opcode)]); \
2062
gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2064
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2065
set_fprf, Rc(ctx->opcode) != 0); \
2067
#define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
2068
_GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2069
_GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2071
#define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2072
static void gen_f##name(DisasContext *ctx) \
2074
if (unlikely(!ctx->fpu_enabled)) { \
2075
gen_exception(ctx, POWERPC_EXCP_FPU); \
2078
/* NIP cannot be restored if the memory exception comes from an helper */ \
2079
gen_update_nip(ctx, ctx->nip - 4); \
2080
gen_reset_fpstatus(); \
2081
gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2082
cpu_fpr[rC(ctx->opcode)]); \
2084
gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2086
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2087
set_fprf, Rc(ctx->opcode) != 0); \
2089
#define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
2090
_GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2091
_GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2093
#define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
2094
static void gen_f##name(DisasContext *ctx) \
2096
if (unlikely(!ctx->fpu_enabled)) { \
2097
gen_exception(ctx, POWERPC_EXCP_FPU); \
2100
/* NIP cannot be restored if the memory exception comes from an helper */ \
2101
gen_update_nip(ctx, ctx->nip - 4); \
2102
gen_reset_fpstatus(); \
2103
gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2104
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2105
set_fprf, Rc(ctx->opcode) != 0); \
2108
#define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
2109
static void gen_f##name(DisasContext *ctx) \
2111
if (unlikely(!ctx->fpu_enabled)) { \
2112
gen_exception(ctx, POWERPC_EXCP_FPU); \
2115
/* NIP cannot be restored if the memory exception comes from an helper */ \
2116
gen_update_nip(ctx, ctx->nip - 4); \
2117
gen_reset_fpstatus(); \
2118
gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2119
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2120
set_fprf, Rc(ctx->opcode) != 0); \
2124
GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
2126
GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
2128
GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
2131
GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
2134
GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
2137
GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
2140
static void gen_frsqrtes(DisasContext *ctx)
2142
if (unlikely(!ctx->fpu_enabled)) {
2143
gen_exception(ctx, POWERPC_EXCP_FPU);
2146
/* NIP cannot be restored if the memory exception comes from an helper */
2147
gen_update_nip(ctx, ctx->nip - 4);
2148
gen_reset_fpstatus();
2149
gen_helper_frsqrte(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2150
gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2151
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2155
_GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
2157
GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
2161
static void gen_fsqrt(DisasContext *ctx)
2163
if (unlikely(!ctx->fpu_enabled)) {
2164
gen_exception(ctx, POWERPC_EXCP_FPU);
2167
/* NIP cannot be restored if the memory exception comes from an helper */
2168
gen_update_nip(ctx, ctx->nip - 4);
2169
gen_reset_fpstatus();
2170
gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2171
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2174
static void gen_fsqrts(DisasContext *ctx)
2176
if (unlikely(!ctx->fpu_enabled)) {
2177
gen_exception(ctx, POWERPC_EXCP_FPU);
2180
/* NIP cannot be restored if the memory exception comes from an helper */
2181
gen_update_nip(ctx, ctx->nip - 4);
2182
gen_reset_fpstatus();
2183
gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2184
gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2185
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2188
/*** Floating-Point multiply-and-add ***/
2189
/* fmadd - fmadds */
2190
GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
2191
/* fmsub - fmsubs */
2192
GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
2193
/* fnmadd - fnmadds */
2194
GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
2195
/* fnmsub - fnmsubs */
2196
GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
2198
/*** Floating-Point round & convert ***/
2200
GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
2202
GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
2204
GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
2205
#if defined(TARGET_PPC64)
2207
GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
2209
GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
2211
GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
2215
GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
2217
GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
2219
GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
2221
GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
2223
/*** Floating-Point compare ***/
2226
static void gen_fcmpo(DisasContext *ctx)
2229
if (unlikely(!ctx->fpu_enabled)) {
2230
gen_exception(ctx, POWERPC_EXCP_FPU);
2233
/* NIP cannot be restored if the memory exception comes from an helper */
2234
gen_update_nip(ctx, ctx->nip - 4);
2235
gen_reset_fpstatus();
2236
crf = tcg_const_i32(crfD(ctx->opcode));
2237
gen_helper_fcmpo(cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)], crf);
2238
tcg_temp_free_i32(crf);
2239
gen_helper_float_check_status();
2243
static void gen_fcmpu(DisasContext *ctx)
2246
if (unlikely(!ctx->fpu_enabled)) {
2247
gen_exception(ctx, POWERPC_EXCP_FPU);
2250
/* NIP cannot be restored if the memory exception comes from an helper */
2251
gen_update_nip(ctx, ctx->nip - 4);
2252
gen_reset_fpstatus();
2253
crf = tcg_const_i32(crfD(ctx->opcode));
2254
gen_helper_fcmpu(cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)], crf);
2255
tcg_temp_free_i32(crf);
2256
gen_helper_float_check_status();
2259
/*** Floating-point move ***/
2261
/* XXX: beware that fabs never checks for NaNs nor update FPSCR */
2262
GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT);
2265
/* XXX: beware that fmr never checks for NaNs nor update FPSCR */
2266
static void gen_fmr(DisasContext *ctx)
2268
if (unlikely(!ctx->fpu_enabled)) {
2269
gen_exception(ctx, POWERPC_EXCP_FPU);
2272
tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2273
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2277
/* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
2278
GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT);
2280
/* XXX: beware that fneg never checks for NaNs nor update FPSCR */
2281
GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT);
2283
/*** Floating-Point status & ctrl register ***/
2286
static void gen_mcrfs(DisasContext *ctx)
2290
if (unlikely(!ctx->fpu_enabled)) {
2291
gen_exception(ctx, POWERPC_EXCP_FPU);
2294
bfa = 4 * (7 - crfS(ctx->opcode));
2295
tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_fpscr, bfa);
2296
tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 0xf);
2297
tcg_gen_andi_i32(cpu_fpscr, cpu_fpscr, ~(0xF << bfa));
2301
static void gen_mffs(DisasContext *ctx)
2303
if (unlikely(!ctx->fpu_enabled)) {
2304
gen_exception(ctx, POWERPC_EXCP_FPU);
2307
gen_reset_fpstatus();
2308
tcg_gen_extu_i32_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpscr);
2309
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2313
static void gen_mtfsb0(DisasContext *ctx)
2317
if (unlikely(!ctx->fpu_enabled)) {
2318
gen_exception(ctx, POWERPC_EXCP_FPU);
2321
crb = 31 - crbD(ctx->opcode);
2322
gen_reset_fpstatus();
2323
if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) {
2325
/* NIP cannot be restored if the memory exception comes from an helper */
2326
gen_update_nip(ctx, ctx->nip - 4);
2327
t0 = tcg_const_i32(crb);
2328
gen_helper_fpscr_clrbit(t0);
2329
tcg_temp_free_i32(t0);
2331
if (unlikely(Rc(ctx->opcode) != 0)) {
2332
tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2337
static void gen_mtfsb1(DisasContext *ctx)
2341
if (unlikely(!ctx->fpu_enabled)) {
2342
gen_exception(ctx, POWERPC_EXCP_FPU);
2345
crb = 31 - crbD(ctx->opcode);
2346
gen_reset_fpstatus();
2347
/* XXX: we pretend we can only do IEEE floating-point computations */
2348
if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
2350
/* NIP cannot be restored if the memory exception comes from an helper */
2351
gen_update_nip(ctx, ctx->nip - 4);
2352
t0 = tcg_const_i32(crb);
2353
gen_helper_fpscr_setbit(t0);
2354
tcg_temp_free_i32(t0);
2356
if (unlikely(Rc(ctx->opcode) != 0)) {
2357
tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2359
/* We can raise a differed exception */
2360
gen_helper_float_check_status();
2364
static void gen_mtfsf(DisasContext *ctx)
2367
int L = ctx->opcode & 0x02000000;
2369
if (unlikely(!ctx->fpu_enabled)) {
2370
gen_exception(ctx, POWERPC_EXCP_FPU);
2373
/* NIP cannot be restored if the memory exception comes from an helper */
2374
gen_update_nip(ctx, ctx->nip - 4);
2375
gen_reset_fpstatus();
2377
t0 = tcg_const_i32(0xff);
2379
t0 = tcg_const_i32(FM(ctx->opcode));
2380
gen_helper_store_fpscr(cpu_fpr[rB(ctx->opcode)], t0);
2381
tcg_temp_free_i32(t0);
2382
if (unlikely(Rc(ctx->opcode) != 0)) {
2383
tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2385
/* We can raise a differed exception */
2386
gen_helper_float_check_status();
2390
static void gen_mtfsfi(DisasContext *ctx)
2396
if (unlikely(!ctx->fpu_enabled)) {
2397
gen_exception(ctx, POWERPC_EXCP_FPU);
2400
bf = crbD(ctx->opcode) >> 2;
2402
/* NIP cannot be restored if the memory exception comes from an helper */
2403
gen_update_nip(ctx, ctx->nip - 4);
2404
gen_reset_fpstatus();
2405
t0 = tcg_const_i64(FPIMM(ctx->opcode) << (4 * sh));
2406
t1 = tcg_const_i32(1 << sh);
2407
gen_helper_store_fpscr(t0, t1);
2408
tcg_temp_free_i64(t0);
2409
tcg_temp_free_i32(t1);
2410
if (unlikely(Rc(ctx->opcode) != 0)) {
2411
tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2413
/* We can raise a differed exception */
2414
gen_helper_float_check_status();
2417
/*** Addressing modes ***/
2418
/* Register indirect with immediate index : EA = (rA|0) + SIMM */
2419
static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2422
target_long simm = SIMM(ctx->opcode);
2425
if (rA(ctx->opcode) == 0) {
2426
#if defined(TARGET_PPC64)
2427
if (!ctx->sf_mode) {
2428
tcg_gen_movi_tl(EA, (uint32_t)simm);
2431
tcg_gen_movi_tl(EA, simm);
2432
} else if (likely(simm != 0)) {
2433
tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2434
#if defined(TARGET_PPC64)
2435
if (!ctx->sf_mode) {
2436
tcg_gen_ext32u_tl(EA, EA);
2440
#if defined(TARGET_PPC64)
2441
if (!ctx->sf_mode) {
2442
tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2445
tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2449
static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
2451
if (rA(ctx->opcode) == 0) {
2452
#if defined(TARGET_PPC64)
2453
if (!ctx->sf_mode) {
2454
tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2457
tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2459
tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2460
#if defined(TARGET_PPC64)
2461
if (!ctx->sf_mode) {
2462
tcg_gen_ext32u_tl(EA, EA);
2468
static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
2470
if (rA(ctx->opcode) == 0) {
2471
tcg_gen_movi_tl(EA, 0);
2473
#if defined(TARGET_PPC64)
2474
if (!ctx->sf_mode) {
2475
tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2478
tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2482
static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
2485
tcg_gen_addi_tl(ret, arg1, val);
2486
#if defined(TARGET_PPC64)
2487
if (!ctx->sf_mode) {
2488
tcg_gen_ext32u_tl(ret, ret);
2493
static inline void gen_check_align(DisasContext *ctx, TCGv EA, int mask)
2495
int l1 = gen_new_label();
2496
TCGv t0 = tcg_temp_new();
2498
/* NIP cannot be restored if the memory exception comes from an helper */
2499
gen_update_nip(ctx, ctx->nip - 4);
2500
tcg_gen_andi_tl(t0, EA, mask);
2501
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2502
t1 = tcg_const_i32(POWERPC_EXCP_ALIGN);
2503
t2 = tcg_const_i32(0);
2504
gen_helper_raise_exception_err(t1, t2);
2505
tcg_temp_free_i32(t1);
2506
tcg_temp_free_i32(t2);
2511
/*** Integer load ***/
2512
static inline void gen_qemu_ld8u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2514
tcg_gen_qemu_ld8u(arg1, arg2, ctx->mem_idx);
2517
static inline void gen_qemu_ld8s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2519
tcg_gen_qemu_ld8s(arg1, arg2, ctx->mem_idx);
2522
static inline void gen_qemu_ld16u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2524
tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2525
if (unlikely(ctx->le_mode)) {
2526
tcg_gen_bswap16_tl(arg1, arg1);
2530
static inline void gen_qemu_ld16s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2532
if (unlikely(ctx->le_mode)) {
2533
tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2534
tcg_gen_bswap16_tl(arg1, arg1);
2535
tcg_gen_ext16s_tl(arg1, arg1);
2537
tcg_gen_qemu_ld16s(arg1, arg2, ctx->mem_idx);
2541
static inline void gen_qemu_ld32u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2543
tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2544
if (unlikely(ctx->le_mode)) {
2545
tcg_gen_bswap32_tl(arg1, arg1);
2549
#if defined(TARGET_PPC64)
2550
static inline void gen_qemu_ld32s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2552
if (unlikely(ctx->le_mode)) {
2553
tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2554
tcg_gen_bswap32_tl(arg1, arg1);
2555
tcg_gen_ext32s_tl(arg1, arg1);
2557
tcg_gen_qemu_ld32s(arg1, arg2, ctx->mem_idx);
2561
static inline void gen_qemu_ld64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2563
tcg_gen_qemu_ld64(arg1, arg2, ctx->mem_idx);
2564
if (unlikely(ctx->le_mode)) {
2565
tcg_gen_bswap64_i64(arg1, arg1);
2569
static inline void gen_qemu_st8(DisasContext *ctx, TCGv arg1, TCGv arg2)
2571
tcg_gen_qemu_st8(arg1, arg2, ctx->mem_idx);
2574
static inline void gen_qemu_st16(DisasContext *ctx, TCGv arg1, TCGv arg2)
2576
if (unlikely(ctx->le_mode)) {
2577
TCGv t0 = tcg_temp_new();
2578
tcg_gen_ext16u_tl(t0, arg1);
2579
tcg_gen_bswap16_tl(t0, t0);
2580
tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2583
tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2587
static inline void gen_qemu_st32(DisasContext *ctx, TCGv arg1, TCGv arg2)
2589
if (unlikely(ctx->le_mode)) {
2590
TCGv t0 = tcg_temp_new();
2591
tcg_gen_ext32u_tl(t0, arg1);
2592
tcg_gen_bswap32_tl(t0, t0);
2593
tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2596
tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2600
static inline void gen_qemu_st64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2602
if (unlikely(ctx->le_mode)) {
2603
TCGv_i64 t0 = tcg_temp_new_i64();
2604
tcg_gen_bswap64_i64(t0, arg1);
2605
tcg_gen_qemu_st64(t0, arg2, ctx->mem_idx);
2606
tcg_temp_free_i64(t0);
2608
tcg_gen_qemu_st64(arg1, arg2, ctx->mem_idx);
2611
#define GEN_LD(name, ldop, opc, type) \
2612
static void glue(gen_, name)(DisasContext *ctx) \
2615
gen_set_access_type(ctx, ACCESS_INT); \
2616
EA = tcg_temp_new(); \
2617
gen_addr_imm_index(ctx, EA, 0); \
2618
gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2619
tcg_temp_free(EA); \
2622
#define GEN_LDU(name, ldop, opc, type) \
2623
static void glue(gen_, name##u)(DisasContext *ctx) \
2626
if (unlikely(rA(ctx->opcode) == 0 || \
2627
rA(ctx->opcode) == rD(ctx->opcode))) { \
2628
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2631
gen_set_access_type(ctx, ACCESS_INT); \
2632
EA = tcg_temp_new(); \
2633
if (type == PPC_64B) \
2634
gen_addr_imm_index(ctx, EA, 0x03); \
2636
gen_addr_imm_index(ctx, EA, 0); \
2637
gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2638
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2639
tcg_temp_free(EA); \
2642
#define GEN_LDUX(name, ldop, opc2, opc3, type) \
2643
static void glue(gen_, name##ux)(DisasContext *ctx) \
2646
if (unlikely(rA(ctx->opcode) == 0 || \
2647
rA(ctx->opcode) == rD(ctx->opcode))) { \
2648
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2651
gen_set_access_type(ctx, ACCESS_INT); \
2652
EA = tcg_temp_new(); \
2653
gen_addr_reg_index(ctx, EA); \
2654
gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2655
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2656
tcg_temp_free(EA); \
2659
#define GEN_LDX(name, ldop, opc2, opc3, type) \
2660
static void glue(gen_, name##x)(DisasContext *ctx) \
2663
gen_set_access_type(ctx, ACCESS_INT); \
2664
EA = tcg_temp_new(); \
2665
gen_addr_reg_index(ctx, EA); \
2666
gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2667
tcg_temp_free(EA); \
2670
#define GEN_LDS(name, ldop, op, type) \
2671
GEN_LD(name, ldop, op | 0x20, type); \
2672
GEN_LDU(name, ldop, op | 0x21, type); \
2673
GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
2674
GEN_LDX(name, ldop, 0x17, op | 0x00, type)
2676
/* lbz lbzu lbzux lbzx */
2677
GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
2678
/* lha lhau lhaux lhax */
2679
GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
2680
/* lhz lhzu lhzux lhzx */
2681
GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
2682
/* lwz lwzu lwzux lwzx */
2683
GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
2684
#if defined(TARGET_PPC64)
2686
GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
2688
GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
2690
GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B);
2692
GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B);
2694
static void gen_ld(DisasContext *ctx)
2697
if (Rc(ctx->opcode)) {
2698
if (unlikely(rA(ctx->opcode) == 0 ||
2699
rA(ctx->opcode) == rD(ctx->opcode))) {
2700
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2704
gen_set_access_type(ctx, ACCESS_INT);
2705
EA = tcg_temp_new();
2706
gen_addr_imm_index(ctx, EA, 0x03);
2707
if (ctx->opcode & 0x02) {
2708
/* lwa (lwau is undefined) */
2709
gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2712
gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2714
if (Rc(ctx->opcode))
2715
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2720
static void gen_lq(DisasContext *ctx)
2722
#if defined(CONFIG_USER_ONLY)
2723
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2728
/* Restore CPU state */
2729
if (unlikely(ctx->mem_idx == 0)) {
2730
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2733
ra = rA(ctx->opcode);
2734
rd = rD(ctx->opcode);
2735
if (unlikely((rd & 1) || rd == ra)) {
2736
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2739
if (unlikely(ctx->le_mode)) {
2740
/* Little-endian mode is not handled */
2741
gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2744
gen_set_access_type(ctx, ACCESS_INT);
2745
EA = tcg_temp_new();
2746
gen_addr_imm_index(ctx, EA, 0x0F);
2747
gen_qemu_ld64(ctx, cpu_gpr[rd], EA);
2748
gen_addr_add(ctx, EA, EA, 8);
2749
gen_qemu_ld64(ctx, cpu_gpr[rd+1], EA);
2755
/*** Integer store ***/
2756
#define GEN_ST(name, stop, opc, type) \
2757
static void glue(gen_, name)(DisasContext *ctx) \
2760
gen_set_access_type(ctx, ACCESS_INT); \
2761
EA = tcg_temp_new(); \
2762
gen_addr_imm_index(ctx, EA, 0); \
2763
gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2764
tcg_temp_free(EA); \
2767
#define GEN_STU(name, stop, opc, type) \
2768
static void glue(gen_, stop##u)(DisasContext *ctx) \
2771
if (unlikely(rA(ctx->opcode) == 0)) { \
2772
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2775
gen_set_access_type(ctx, ACCESS_INT); \
2776
EA = tcg_temp_new(); \
2777
if (type == PPC_64B) \
2778
gen_addr_imm_index(ctx, EA, 0x03); \
2780
gen_addr_imm_index(ctx, EA, 0); \
2781
gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2782
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2783
tcg_temp_free(EA); \
2786
#define GEN_STUX(name, stop, opc2, opc3, type) \
2787
static void glue(gen_, name##ux)(DisasContext *ctx) \
2790
if (unlikely(rA(ctx->opcode) == 0)) { \
2791
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2794
gen_set_access_type(ctx, ACCESS_INT); \
2795
EA = tcg_temp_new(); \
2796
gen_addr_reg_index(ctx, EA); \
2797
gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2798
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2799
tcg_temp_free(EA); \
2802
#define GEN_STX(name, stop, opc2, opc3, type) \
2803
static void glue(gen_, name##x)(DisasContext *ctx) \
2806
gen_set_access_type(ctx, ACCESS_INT); \
2807
EA = tcg_temp_new(); \
2808
gen_addr_reg_index(ctx, EA); \
2809
gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2810
tcg_temp_free(EA); \
2813
#define GEN_STS(name, stop, op, type) \
2814
GEN_ST(name, stop, op | 0x20, type); \
2815
GEN_STU(name, stop, op | 0x21, type); \
2816
GEN_STUX(name, stop, 0x17, op | 0x01, type); \
2817
GEN_STX(name, stop, 0x17, op | 0x00, type)
2819
/* stb stbu stbux stbx */
2820
GEN_STS(stb, st8, 0x06, PPC_INTEGER);
2821
/* sth sthu sthux sthx */
2822
GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
2823
/* stw stwu stwux stwx */
2824
GEN_STS(stw, st32, 0x04, PPC_INTEGER);
2825
#if defined(TARGET_PPC64)
2826
GEN_STUX(std, st64, 0x15, 0x05, PPC_64B);
2827
GEN_STX(std, st64, 0x15, 0x04, PPC_64B);
2829
static void gen_std(DisasContext *ctx)
2834
rs = rS(ctx->opcode);
2835
if ((ctx->opcode & 0x3) == 0x2) {
2836
#if defined(CONFIG_USER_ONLY)
2837
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2840
if (unlikely(ctx->mem_idx == 0)) {
2841
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2844
if (unlikely(rs & 1)) {
2845
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2848
if (unlikely(ctx->le_mode)) {
2849
/* Little-endian mode is not handled */
2850
gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2853
gen_set_access_type(ctx, ACCESS_INT);
2854
EA = tcg_temp_new();
2855
gen_addr_imm_index(ctx, EA, 0x03);
2856
gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2857
gen_addr_add(ctx, EA, EA, 8);
2858
gen_qemu_st64(ctx, cpu_gpr[rs+1], EA);
2863
if (Rc(ctx->opcode)) {
2864
if (unlikely(rA(ctx->opcode) == 0)) {
2865
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2869
gen_set_access_type(ctx, ACCESS_INT);
2870
EA = tcg_temp_new();
2871
gen_addr_imm_index(ctx, EA, 0x03);
2872
gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2873
if (Rc(ctx->opcode))
2874
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2879
/*** Integer load and store with byte reverse ***/
2881
static inline void gen_qemu_ld16ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2883
tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2884
if (likely(!ctx->le_mode)) {
2885
tcg_gen_bswap16_tl(arg1, arg1);
2888
GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
2891
static inline void gen_qemu_ld32ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2893
tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2894
if (likely(!ctx->le_mode)) {
2895
tcg_gen_bswap32_tl(arg1, arg1);
2898
GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
2901
static inline void gen_qemu_st16r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2903
if (likely(!ctx->le_mode)) {
2904
TCGv t0 = tcg_temp_new();
2905
tcg_gen_ext16u_tl(t0, arg1);
2906
tcg_gen_bswap16_tl(t0, t0);
2907
tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2910
tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2913
GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
2916
static inline void gen_qemu_st32r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2918
if (likely(!ctx->le_mode)) {
2919
TCGv t0 = tcg_temp_new();
2920
tcg_gen_ext32u_tl(t0, arg1);
2921
tcg_gen_bswap32_tl(t0, t0);
2922
tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2925
tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2928
GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
2930
/*** Integer load and store multiple ***/
2933
static void gen_lmw(DisasContext *ctx)
2937
gen_set_access_type(ctx, ACCESS_INT);
2938
/* NIP cannot be restored if the memory exception comes from an helper */
2939
gen_update_nip(ctx, ctx->nip - 4);
2940
t0 = tcg_temp_new();
2941
t1 = tcg_const_i32(rD(ctx->opcode));
2942
gen_addr_imm_index(ctx, t0, 0);
2943
gen_helper_lmw(t0, t1);
2945
tcg_temp_free_i32(t1);
2949
static void gen_stmw(DisasContext *ctx)
2953
gen_set_access_type(ctx, ACCESS_INT);
2954
/* NIP cannot be restored if the memory exception comes from an helper */
2955
gen_update_nip(ctx, ctx->nip - 4);
2956
t0 = tcg_temp_new();
2957
t1 = tcg_const_i32(rS(ctx->opcode));
2958
gen_addr_imm_index(ctx, t0, 0);
2959
gen_helper_stmw(t0, t1);
2961
tcg_temp_free_i32(t1);
2964
/*** Integer load and store strings ***/
2967
/* PowerPC32 specification says we must generate an exception if
2968
* rA is in the range of registers to be loaded.
2969
* In an other hand, IBM says this is valid, but rA won't be loaded.
2970
* For now, I'll follow the spec...
2972
static void gen_lswi(DisasContext *ctx)
2976
int nb = NB(ctx->opcode);
2977
int start = rD(ctx->opcode);
2978
int ra = rA(ctx->opcode);
2984
if (unlikely(((start + nr) > 32 &&
2985
start <= ra && (start + nr - 32) > ra) ||
2986
((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
2987
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
2990
gen_set_access_type(ctx, ACCESS_INT);
2991
/* NIP cannot be restored if the memory exception comes from an helper */
2992
gen_update_nip(ctx, ctx->nip - 4);
2993
t0 = tcg_temp_new();
2994
gen_addr_register(ctx, t0);
2995
t1 = tcg_const_i32(nb);
2996
t2 = tcg_const_i32(start);
2997
gen_helper_lsw(t0, t1, t2);
2999
tcg_temp_free_i32(t1);
3000
tcg_temp_free_i32(t2);
3004
static void gen_lswx(DisasContext *ctx)
3007
TCGv_i32 t1, t2, t3;
3008
gen_set_access_type(ctx, ACCESS_INT);
3009
/* NIP cannot be restored if the memory exception comes from an helper */
3010
gen_update_nip(ctx, ctx->nip - 4);
3011
t0 = tcg_temp_new();
3012
gen_addr_reg_index(ctx, t0);
3013
t1 = tcg_const_i32(rD(ctx->opcode));
3014
t2 = tcg_const_i32(rA(ctx->opcode));
3015
t3 = tcg_const_i32(rB(ctx->opcode));
3016
gen_helper_lswx(t0, t1, t2, t3);
3018
tcg_temp_free_i32(t1);
3019
tcg_temp_free_i32(t2);
3020
tcg_temp_free_i32(t3);
3024
static void gen_stswi(DisasContext *ctx)
3028
int nb = NB(ctx->opcode);
3029
gen_set_access_type(ctx, ACCESS_INT);
3030
/* NIP cannot be restored if the memory exception comes from an helper */
3031
gen_update_nip(ctx, ctx->nip - 4);
3032
t0 = tcg_temp_new();
3033
gen_addr_register(ctx, t0);
3036
t1 = tcg_const_i32(nb);
3037
t2 = tcg_const_i32(rS(ctx->opcode));
3038
gen_helper_stsw(t0, t1, t2);
3040
tcg_temp_free_i32(t1);
3041
tcg_temp_free_i32(t2);
3045
static void gen_stswx(DisasContext *ctx)
3049
gen_set_access_type(ctx, ACCESS_INT);
3050
/* NIP cannot be restored if the memory exception comes from an helper */
3051
gen_update_nip(ctx, ctx->nip - 4);
3052
t0 = tcg_temp_new();
3053
gen_addr_reg_index(ctx, t0);
3054
t1 = tcg_temp_new_i32();
3055
tcg_gen_trunc_tl_i32(t1, cpu_xer);
3056
tcg_gen_andi_i32(t1, t1, 0x7F);
3057
t2 = tcg_const_i32(rS(ctx->opcode));
3058
gen_helper_stsw(t0, t1, t2);
3060
tcg_temp_free_i32(t1);
3061
tcg_temp_free_i32(t2);
3064
/*** Memory synchronisation ***/
3066
static void gen_eieio(DisasContext *ctx)
3071
static void gen_isync(DisasContext *ctx)
3073
gen_stop_exception(ctx);
3077
static void gen_lwarx(DisasContext *ctx)
3080
TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3081
gen_set_access_type(ctx, ACCESS_RES);
3082
t0 = tcg_temp_local_new();
3083
gen_addr_reg_index(ctx, t0);
3084
gen_check_align(ctx, t0, 0x03);
3085
gen_qemu_ld32u(ctx, gpr, t0);
3086
tcg_gen_mov_tl(cpu_reserve, t0);
3087
tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUState, reserve_val));
3091
#if defined(CONFIG_USER_ONLY)
3092
static void gen_conditional_store (DisasContext *ctx, TCGv EA,
3095
TCGv t0 = tcg_temp_new();
3096
uint32_t save_exception = ctx->exception;
3098
tcg_gen_st_tl(EA, cpu_env, offsetof(CPUState, reserve_ea));
3099
tcg_gen_movi_tl(t0, (size << 5) | reg);
3100
tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, reserve_info));
3102
gen_update_nip(ctx, ctx->nip-4);
3103
ctx->exception = POWERPC_EXCP_BRANCH;
3104
gen_exception(ctx, POWERPC_EXCP_STCX);
3105
ctx->exception = save_exception;
3110
static void gen_stwcx_(DisasContext *ctx)
3113
gen_set_access_type(ctx, ACCESS_RES);
3114
t0 = tcg_temp_local_new();
3115
gen_addr_reg_index(ctx, t0);
3116
gen_check_align(ctx, t0, 0x03);
3117
#if defined(CONFIG_USER_ONLY)
3118
gen_conditional_store(ctx, t0, rS(ctx->opcode), 4);
3123
tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3124
tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3125
tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3126
l1 = gen_new_label();
3127
tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3128
tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3129
gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3131
tcg_gen_movi_tl(cpu_reserve, -1);
3137
#if defined(TARGET_PPC64)
3139
static void gen_ldarx(DisasContext *ctx)
3142
TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3143
gen_set_access_type(ctx, ACCESS_RES);
3144
t0 = tcg_temp_local_new();
3145
gen_addr_reg_index(ctx, t0);
3146
gen_check_align(ctx, t0, 0x07);
3147
gen_qemu_ld64(ctx, gpr, t0);
3148
tcg_gen_mov_tl(cpu_reserve, t0);
3149
tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUState, reserve_val));
3154
static void gen_stdcx_(DisasContext *ctx)
3157
gen_set_access_type(ctx, ACCESS_RES);
3158
t0 = tcg_temp_local_new();
3159
gen_addr_reg_index(ctx, t0);
3160
gen_check_align(ctx, t0, 0x07);
3161
#if defined(CONFIG_USER_ONLY)
3162
gen_conditional_store(ctx, t0, rS(ctx->opcode), 8);
3166
tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3167
tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3168
tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3169
l1 = gen_new_label();
3170
tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3171
tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3172
gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3174
tcg_gen_movi_tl(cpu_reserve, -1);
3179
#endif /* defined(TARGET_PPC64) */
3182
static void gen_sync(DisasContext *ctx)
3187
static void gen_wait(DisasContext *ctx)
3189
TCGv_i32 t0 = tcg_temp_new_i32();
3190
tcg_gen_st_i32(t0, cpu_env, offsetof(CPUState, halted));
3191
tcg_temp_free_i32(t0);
3192
/* Stop translation, as the CPU is supposed to sleep from now */
3193
gen_exception_err(ctx, EXCP_HLT, 1);
3196
/*** Floating-point load ***/
3197
#define GEN_LDF(name, ldop, opc, type) \
3198
static void glue(gen_, name)(DisasContext *ctx) \
3201
if (unlikely(!ctx->fpu_enabled)) { \
3202
gen_exception(ctx, POWERPC_EXCP_FPU); \
3205
gen_set_access_type(ctx, ACCESS_FLOAT); \
3206
EA = tcg_temp_new(); \
3207
gen_addr_imm_index(ctx, EA, 0); \
3208
gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3209
tcg_temp_free(EA); \
3212
#define GEN_LDUF(name, ldop, opc, type) \
3213
static void glue(gen_, name##u)(DisasContext *ctx) \
3216
if (unlikely(!ctx->fpu_enabled)) { \
3217
gen_exception(ctx, POWERPC_EXCP_FPU); \
3220
if (unlikely(rA(ctx->opcode) == 0)) { \
3221
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3224
gen_set_access_type(ctx, ACCESS_FLOAT); \
3225
EA = tcg_temp_new(); \
3226
gen_addr_imm_index(ctx, EA, 0); \
3227
gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3228
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3229
tcg_temp_free(EA); \
3232
#define GEN_LDUXF(name, ldop, opc, type) \
3233
static void glue(gen_, name##ux)(DisasContext *ctx) \
3236
if (unlikely(!ctx->fpu_enabled)) { \
3237
gen_exception(ctx, POWERPC_EXCP_FPU); \
3240
if (unlikely(rA(ctx->opcode) == 0)) { \
3241
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3244
gen_set_access_type(ctx, ACCESS_FLOAT); \
3245
EA = tcg_temp_new(); \
3246
gen_addr_reg_index(ctx, EA); \
3247
gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3248
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3249
tcg_temp_free(EA); \
3252
#define GEN_LDXF(name, ldop, opc2, opc3, type) \
3253
static void glue(gen_, name##x)(DisasContext *ctx) \
3256
if (unlikely(!ctx->fpu_enabled)) { \
3257
gen_exception(ctx, POWERPC_EXCP_FPU); \
3260
gen_set_access_type(ctx, ACCESS_FLOAT); \
3261
EA = tcg_temp_new(); \
3262
gen_addr_reg_index(ctx, EA); \
3263
gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3264
tcg_temp_free(EA); \
3267
#define GEN_LDFS(name, ldop, op, type) \
3268
GEN_LDF(name, ldop, op | 0x20, type); \
3269
GEN_LDUF(name, ldop, op | 0x21, type); \
3270
GEN_LDUXF(name, ldop, op | 0x01, type); \
3271
GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
3273
static inline void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3275
TCGv t0 = tcg_temp_new();
3276
TCGv_i32 t1 = tcg_temp_new_i32();
3277
gen_qemu_ld32u(ctx, t0, arg2);
3278
tcg_gen_trunc_tl_i32(t1, t0);
3280
gen_helper_float32_to_float64(arg1, t1);
3281
tcg_temp_free_i32(t1);
3284
/* lfd lfdu lfdux lfdx */
3285
GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT);
3286
/* lfs lfsu lfsux lfsx */
3287
GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT);
3289
/*** Floating-point store ***/
3290
#define GEN_STF(name, stop, opc, type) \
3291
static void glue(gen_, name)(DisasContext *ctx) \
3294
if (unlikely(!ctx->fpu_enabled)) { \
3295
gen_exception(ctx, POWERPC_EXCP_FPU); \
3298
gen_set_access_type(ctx, ACCESS_FLOAT); \
3299
EA = tcg_temp_new(); \
3300
gen_addr_imm_index(ctx, EA, 0); \
3301
gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3302
tcg_temp_free(EA); \
3305
#define GEN_STUF(name, stop, opc, type) \
3306
static void glue(gen_, name##u)(DisasContext *ctx) \
3309
if (unlikely(!ctx->fpu_enabled)) { \
3310
gen_exception(ctx, POWERPC_EXCP_FPU); \
3313
if (unlikely(rA(ctx->opcode) == 0)) { \
3314
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3317
gen_set_access_type(ctx, ACCESS_FLOAT); \
3318
EA = tcg_temp_new(); \
3319
gen_addr_imm_index(ctx, EA, 0); \
3320
gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3321
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3322
tcg_temp_free(EA); \
3325
#define GEN_STUXF(name, stop, opc, type) \
3326
static void glue(gen_, name##ux)(DisasContext *ctx) \
3329
if (unlikely(!ctx->fpu_enabled)) { \
3330
gen_exception(ctx, POWERPC_EXCP_FPU); \
3333
if (unlikely(rA(ctx->opcode) == 0)) { \
3334
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3337
gen_set_access_type(ctx, ACCESS_FLOAT); \
3338
EA = tcg_temp_new(); \
3339
gen_addr_reg_index(ctx, EA); \
3340
gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3341
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3342
tcg_temp_free(EA); \
3345
#define GEN_STXF(name, stop, opc2, opc3, type) \
3346
static void glue(gen_, name##x)(DisasContext *ctx) \
3349
if (unlikely(!ctx->fpu_enabled)) { \
3350
gen_exception(ctx, POWERPC_EXCP_FPU); \
3353
gen_set_access_type(ctx, ACCESS_FLOAT); \
3354
EA = tcg_temp_new(); \
3355
gen_addr_reg_index(ctx, EA); \
3356
gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3357
tcg_temp_free(EA); \
3360
#define GEN_STFS(name, stop, op, type) \
3361
GEN_STF(name, stop, op | 0x20, type); \
3362
GEN_STUF(name, stop, op | 0x21, type); \
3363
GEN_STUXF(name, stop, op | 0x01, type); \
3364
GEN_STXF(name, stop, 0x17, op | 0x00, type)
3366
static inline void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3368
TCGv_i32 t0 = tcg_temp_new_i32();
3369
TCGv t1 = tcg_temp_new();
3370
gen_helper_float64_to_float32(t0, arg1);
3371
tcg_gen_extu_i32_tl(t1, t0);
3372
tcg_temp_free_i32(t0);
3373
gen_qemu_st32(ctx, t1, arg2);
3377
/* stfd stfdu stfdux stfdx */
3378
GEN_STFS(stfd, st64, 0x16, PPC_FLOAT);
3379
/* stfs stfsu stfsux stfsx */
3380
GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT);
3383
static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3385
TCGv t0 = tcg_temp_new();
3386
tcg_gen_trunc_i64_tl(t0, arg1),
3387
gen_qemu_st32(ctx, t0, arg2);
3391
GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
3393
static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
3395
#if defined(TARGET_PPC64)
3397
tcg_gen_movi_tl(cpu_cfar, nip);
3402
static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
3404
TranslationBlock *tb;
3406
#if defined(TARGET_PPC64)
3408
dest = (uint32_t) dest;
3410
if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
3411
likely(!ctx->singlestep_enabled)) {
3413
tcg_gen_movi_tl(cpu_nip, dest & ~3);
3414
tcg_gen_exit_tb((tcg_target_long)tb + n);
3416
tcg_gen_movi_tl(cpu_nip, dest & ~3);
3417
if (unlikely(ctx->singlestep_enabled)) {
3418
if ((ctx->singlestep_enabled &
3419
(CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
3420
ctx->exception == POWERPC_EXCP_BRANCH) {
3421
target_ulong tmp = ctx->nip;
3423
gen_exception(ctx, POWERPC_EXCP_TRACE);
3426
if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3427
gen_debug_exception(ctx);
3434
static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
3436
#if defined(TARGET_PPC64)
3437
if (ctx->sf_mode == 0)
3438
tcg_gen_movi_tl(cpu_lr, (uint32_t)nip);
3441
tcg_gen_movi_tl(cpu_lr, nip);
3445
static void gen_b(DisasContext *ctx)
3447
target_ulong li, target;
3449
ctx->exception = POWERPC_EXCP_BRANCH;
3450
/* sign extend LI */
3451
#if defined(TARGET_PPC64)
3453
li = ((int64_t)LI(ctx->opcode) << 38) >> 38;
3456
li = ((int32_t)LI(ctx->opcode) << 6) >> 6;
3457
if (likely(AA(ctx->opcode) == 0))
3458
target = ctx->nip + li - 4;
3461
if (LK(ctx->opcode))
3462
gen_setlr(ctx, ctx->nip);
3463
gen_update_cfar(ctx, ctx->nip);
3464
gen_goto_tb(ctx, 0, target);
3471
static inline void gen_bcond(DisasContext *ctx, int type)
3473
uint32_t bo = BO(ctx->opcode);
3477
ctx->exception = POWERPC_EXCP_BRANCH;
3478
if (type == BCOND_LR || type == BCOND_CTR) {
3479
target = tcg_temp_local_new();
3480
if (type == BCOND_CTR)
3481
tcg_gen_mov_tl(target, cpu_ctr);
3483
tcg_gen_mov_tl(target, cpu_lr);
3485
TCGV_UNUSED(target);
3487
if (LK(ctx->opcode))
3488
gen_setlr(ctx, ctx->nip);
3489
l1 = gen_new_label();
3490
if ((bo & 0x4) == 0) {
3491
/* Decrement and test CTR */
3492
TCGv temp = tcg_temp_new();
3493
if (unlikely(type == BCOND_CTR)) {
3494
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3497
tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3498
#if defined(TARGET_PPC64)
3500
tcg_gen_ext32u_tl(temp, cpu_ctr);
3503
tcg_gen_mov_tl(temp, cpu_ctr);
3505
tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3507
tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3509
tcg_temp_free(temp);
3511
if ((bo & 0x10) == 0) {
3513
uint32_t bi = BI(ctx->opcode);
3514
uint32_t mask = 1 << (3 - (bi & 0x03));
3515
TCGv_i32 temp = tcg_temp_new_i32();
3518
tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3519
tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3521
tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3522
tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3524
tcg_temp_free_i32(temp);
3526
gen_update_cfar(ctx, ctx->nip);
3527
if (type == BCOND_IM) {
3528
target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3529
if (likely(AA(ctx->opcode) == 0)) {
3530
gen_goto_tb(ctx, 0, ctx->nip + li - 4);
3532
gen_goto_tb(ctx, 0, li);
3535
gen_goto_tb(ctx, 1, ctx->nip);
3537
#if defined(TARGET_PPC64)
3538
if (!(ctx->sf_mode))
3539
tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3542
tcg_gen_andi_tl(cpu_nip, target, ~3);
3545
#if defined(TARGET_PPC64)
3546
if (!(ctx->sf_mode))
3547
tcg_gen_movi_tl(cpu_nip, (uint32_t)ctx->nip);
3550
tcg_gen_movi_tl(cpu_nip, ctx->nip);
3555
static void gen_bc(DisasContext *ctx)
3557
gen_bcond(ctx, BCOND_IM);
3560
static void gen_bcctr(DisasContext *ctx)
3562
gen_bcond(ctx, BCOND_CTR);
3565
static void gen_bclr(DisasContext *ctx)
3567
gen_bcond(ctx, BCOND_LR);
3570
/*** Condition register logical ***/
3571
#define GEN_CRLOGIC(name, tcg_op, opc) \
3572
static void glue(gen_, name)(DisasContext *ctx) \
3577
sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3578
t0 = tcg_temp_new_i32(); \
3580
tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
3582
tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
3584
tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
3585
t1 = tcg_temp_new_i32(); \
3586
sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3588
tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
3590
tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
3592
tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
3593
tcg_op(t0, t0, t1); \
3594
bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3595
tcg_gen_andi_i32(t0, t0, bitmask); \
3596
tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3597
tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
3598
tcg_temp_free_i32(t0); \
3599
tcg_temp_free_i32(t1); \
3603
GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3605
GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3607
GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3609
GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3611
GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3613
GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3615
GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3617
GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3620
static void gen_mcrf(DisasContext *ctx)
3622
tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3625
/*** System linkage ***/
3627
/* rfi (mem_idx only) */
3628
static void gen_rfi(DisasContext *ctx)
3630
#if defined(CONFIG_USER_ONLY)
3631
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3633
/* Restore CPU state */
3634
if (unlikely(!ctx->mem_idx)) {
3635
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3638
gen_update_cfar(ctx, ctx->nip);
3640
gen_sync_exception(ctx);
3644
#if defined(TARGET_PPC64)
3645
static void gen_rfid(DisasContext *ctx)
3647
#if defined(CONFIG_USER_ONLY)
3648
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3650
/* Restore CPU state */
3651
if (unlikely(!ctx->mem_idx)) {
3652
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3655
gen_update_cfar(ctx, ctx->nip);
3657
gen_sync_exception(ctx);
3661
static void gen_hrfid(DisasContext *ctx)
3663
#if defined(CONFIG_USER_ONLY)
3664
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3666
/* Restore CPU state */
3667
if (unlikely(ctx->mem_idx <= 1)) {
3668
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3672
gen_sync_exception(ctx);
3678
#if defined(CONFIG_USER_ONLY)
3679
#define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3681
#define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3683
static void gen_sc(DisasContext *ctx)
3687
lev = (ctx->opcode >> 5) & 0x7F;
3688
gen_exception_err(ctx, POWERPC_SYSCALL, lev);
3694
static void gen_tw(DisasContext *ctx)
3696
TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3697
/* Update the nip since this might generate a trap exception */
3698
gen_update_nip(ctx, ctx->nip);
3699
gen_helper_tw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3700
tcg_temp_free_i32(t0);
3704
static void gen_twi(DisasContext *ctx)
3706
TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3707
TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3708
/* Update the nip since this might generate a trap exception */
3709
gen_update_nip(ctx, ctx->nip);
3710
gen_helper_tw(cpu_gpr[rA(ctx->opcode)], t0, t1);
3712
tcg_temp_free_i32(t1);
3715
#if defined(TARGET_PPC64)
3717
static void gen_td(DisasContext *ctx)
3719
TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3720
/* Update the nip since this might generate a trap exception */
3721
gen_update_nip(ctx, ctx->nip);
3722
gen_helper_td(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3723
tcg_temp_free_i32(t0);
3727
static void gen_tdi(DisasContext *ctx)
3729
TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3730
TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3731
/* Update the nip since this might generate a trap exception */
3732
gen_update_nip(ctx, ctx->nip);
3733
gen_helper_td(cpu_gpr[rA(ctx->opcode)], t0, t1);
3735
tcg_temp_free_i32(t1);
3739
/*** Processor control ***/
3742
static void gen_mcrxr(DisasContext *ctx)
3744
tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], cpu_xer);
3745
tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], XER_CA);
3746
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_SO | 1 << XER_OV | 1 << XER_CA));
3750
static void gen_mfcr(DisasContext *ctx)
3754
if (likely(ctx->opcode & 0x00100000)) {
3755
crm = CRM(ctx->opcode);
3756
if (likely(crm && ((crm & (crm - 1)) == 0))) {
3758
tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
3759
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
3760
cpu_gpr[rD(ctx->opcode)], crn * 4);
3763
TCGv_i32 t0 = tcg_temp_new_i32();
3764
tcg_gen_mov_i32(t0, cpu_crf[0]);
3765
tcg_gen_shli_i32(t0, t0, 4);
3766
tcg_gen_or_i32(t0, t0, cpu_crf[1]);
3767
tcg_gen_shli_i32(t0, t0, 4);
3768
tcg_gen_or_i32(t0, t0, cpu_crf[2]);
3769
tcg_gen_shli_i32(t0, t0, 4);
3770
tcg_gen_or_i32(t0, t0, cpu_crf[3]);
3771
tcg_gen_shli_i32(t0, t0, 4);
3772
tcg_gen_or_i32(t0, t0, cpu_crf[4]);
3773
tcg_gen_shli_i32(t0, t0, 4);
3774
tcg_gen_or_i32(t0, t0, cpu_crf[5]);
3775
tcg_gen_shli_i32(t0, t0, 4);
3776
tcg_gen_or_i32(t0, t0, cpu_crf[6]);
3777
tcg_gen_shli_i32(t0, t0, 4);
3778
tcg_gen_or_i32(t0, t0, cpu_crf[7]);
3779
tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
3780
tcg_temp_free_i32(t0);
3785
static void gen_mfmsr(DisasContext *ctx)
3787
#if defined(CONFIG_USER_ONLY)
3788
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3790
if (unlikely(!ctx->mem_idx)) {
3791
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3794
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
3798
static void spr_noaccess(void *opaque, int gprn, int sprn)
3801
sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3802
printf("ERROR: try to access SPR %d !\n", sprn);
3805
#define SPR_NOACCESS (&spr_noaccess)
3808
static inline void gen_op_mfspr(DisasContext *ctx)
3810
void (*read_cb)(void *opaque, int gprn, int sprn);
3811
uint32_t sprn = SPR(ctx->opcode);
3813
#if !defined(CONFIG_USER_ONLY)
3814
if (ctx->mem_idx == 2)
3815
read_cb = ctx->spr_cb[sprn].hea_read;
3816
else if (ctx->mem_idx)
3817
read_cb = ctx->spr_cb[sprn].oea_read;
3820
read_cb = ctx->spr_cb[sprn].uea_read;
3821
if (likely(read_cb != NULL)) {
3822
if (likely(read_cb != SPR_NOACCESS)) {
3823
(*read_cb)(ctx, rD(ctx->opcode), sprn);
3825
/* Privilege exception */
3826
/* This is a hack to avoid warnings when running Linux:
3827
* this OS breaks the PowerPC virtualisation model,
3828
* allowing userland application to read the PVR
3830
if (sprn != SPR_PVR) {
3831
qemu_log("Trying to read privileged spr %d %03x at "
3832
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3833
printf("Trying to read privileged spr %d %03x at "
3834
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3836
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3840
qemu_log("Trying to read invalid spr %d %03x at "
3841
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3842
printf("Trying to read invalid spr %d %03x at " TARGET_FMT_lx "\n",
3843
sprn, sprn, ctx->nip);
3844
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3848
static void gen_mfspr(DisasContext *ctx)
3854
static void gen_mftb(DisasContext *ctx)
3860
static void gen_mtcrf(DisasContext *ctx)
3864
crm = CRM(ctx->opcode);
3865
if (likely((ctx->opcode & 0x00100000))) {
3866
if (crm && ((crm & (crm - 1)) == 0)) {
3867
TCGv_i32 temp = tcg_temp_new_i32();
3869
tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3870
tcg_gen_shri_i32(temp, temp, crn * 4);
3871
tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
3872
tcg_temp_free_i32(temp);
3875
TCGv_i32 temp = tcg_temp_new_i32();
3876
tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3877
for (crn = 0 ; crn < 8 ; crn++) {
3878
if (crm & (1 << crn)) {
3879
tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
3880
tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
3883
tcg_temp_free_i32(temp);
3888
#if defined(TARGET_PPC64)
3889
static void gen_mtmsrd(DisasContext *ctx)
3891
#if defined(CONFIG_USER_ONLY)
3892
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3894
if (unlikely(!ctx->mem_idx)) {
3895
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3898
if (ctx->opcode & 0x00010000) {
3899
/* Special form that does not need any synchronisation */
3900
TCGv t0 = tcg_temp_new();
3901
tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3902
tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
3903
tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3906
/* XXX: we need to update nip before the store
3907
* if we enter power saving mode, we will exit the loop
3908
* directly from ppc_store_msr
3910
gen_update_nip(ctx, ctx->nip);
3911
gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]);
3912
/* Must stop the translation as machine state (may have) changed */
3913
/* Note that mtmsr is not always defined as context-synchronizing */
3914
gen_stop_exception(ctx);
3920
static void gen_mtmsr(DisasContext *ctx)
3922
#if defined(CONFIG_USER_ONLY)
3923
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3925
if (unlikely(!ctx->mem_idx)) {
3926
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3929
if (ctx->opcode & 0x00010000) {
3930
/* Special form that does not need any synchronisation */
3931
TCGv t0 = tcg_temp_new();
3932
tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3933
tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
3934
tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3937
TCGv msr = tcg_temp_new();
3939
/* XXX: we need to update nip before the store
3940
* if we enter power saving mode, we will exit the loop
3941
* directly from ppc_store_msr
3943
gen_update_nip(ctx, ctx->nip);
3944
#if defined(TARGET_PPC64)
3945
tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32);
3947
tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]);
3949
gen_helper_store_msr(msr);
3950
/* Must stop the translation as machine state (may have) changed */
3951
/* Note that mtmsr is not always defined as context-synchronizing */
3952
gen_stop_exception(ctx);
3958
static void gen_mtspr(DisasContext *ctx)
3960
void (*write_cb)(void *opaque, int sprn, int gprn);
3961
uint32_t sprn = SPR(ctx->opcode);
3963
#if !defined(CONFIG_USER_ONLY)
3964
if (ctx->mem_idx == 2)
3965
write_cb = ctx->spr_cb[sprn].hea_write;
3966
else if (ctx->mem_idx)
3967
write_cb = ctx->spr_cb[sprn].oea_write;
3970
write_cb = ctx->spr_cb[sprn].uea_write;
3971
if (likely(write_cb != NULL)) {
3972
if (likely(write_cb != SPR_NOACCESS)) {
3973
(*write_cb)(ctx, sprn, rS(ctx->opcode));
3975
/* Privilege exception */
3976
qemu_log("Trying to write privileged spr %d %03x at "
3977
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3978
printf("Trying to write privileged spr %d %03x at " TARGET_FMT_lx
3979
"\n", sprn, sprn, ctx->nip);
3980
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3984
qemu_log("Trying to write invalid spr %d %03x at "
3985
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3986
printf("Trying to write invalid spr %d %03x at " TARGET_FMT_lx "\n",
3987
sprn, sprn, ctx->nip);
3988
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3992
/*** Cache management ***/
3995
static void gen_dcbf(DisasContext *ctx)
3997
/* XXX: specification says this is treated as a load by the MMU */
3999
gen_set_access_type(ctx, ACCESS_CACHE);
4000
t0 = tcg_temp_new();
4001
gen_addr_reg_index(ctx, t0);
4002
gen_qemu_ld8u(ctx, t0, t0);
4006
/* dcbi (Supervisor only) */
4007
static void gen_dcbi(DisasContext *ctx)
4009
#if defined(CONFIG_USER_ONLY)
4010
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4013
if (unlikely(!ctx->mem_idx)) {
4014
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4017
EA = tcg_temp_new();
4018
gen_set_access_type(ctx, ACCESS_CACHE);
4019
gen_addr_reg_index(ctx, EA);
4020
val = tcg_temp_new();
4021
/* XXX: specification says this should be treated as a store by the MMU */
4022
gen_qemu_ld8u(ctx, val, EA);
4023
gen_qemu_st8(ctx, val, EA);
4030
static void gen_dcbst(DisasContext *ctx)
4032
/* XXX: specification say this is treated as a load by the MMU */
4034
gen_set_access_type(ctx, ACCESS_CACHE);
4035
t0 = tcg_temp_new();
4036
gen_addr_reg_index(ctx, t0);
4037
gen_qemu_ld8u(ctx, t0, t0);
4042
static void gen_dcbt(DisasContext *ctx)
4044
/* interpreted as no-op */
4045
/* XXX: specification say this is treated as a load by the MMU
4046
* but does not generate any exception
4051
static void gen_dcbtst(DisasContext *ctx)
4053
/* interpreted as no-op */
4054
/* XXX: specification say this is treated as a load by the MMU
4055
* but does not generate any exception
4060
static void gen_dcbz(DisasContext *ctx)
4063
gen_set_access_type(ctx, ACCESS_CACHE);
4064
/* NIP cannot be restored if the memory exception comes from an helper */
4065
gen_update_nip(ctx, ctx->nip - 4);
4066
t0 = tcg_temp_new();
4067
gen_addr_reg_index(ctx, t0);
4068
gen_helper_dcbz(t0);
4072
static void gen_dcbz_970(DisasContext *ctx)
4075
gen_set_access_type(ctx, ACCESS_CACHE);
4076
/* NIP cannot be restored if the memory exception comes from an helper */
4077
gen_update_nip(ctx, ctx->nip - 4);
4078
t0 = tcg_temp_new();
4079
gen_addr_reg_index(ctx, t0);
4080
if (ctx->opcode & 0x00200000)
4081
gen_helper_dcbz(t0);
4083
gen_helper_dcbz_970(t0);
4088
static void gen_dst(DisasContext *ctx)
4090
if (rA(ctx->opcode) == 0) {
4091
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4093
/* interpreted as no-op */
4098
static void gen_dstst(DisasContext *ctx)
4100
if (rA(ctx->opcode) == 0) {
4101
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4103
/* interpreted as no-op */
4109
static void gen_dss(DisasContext *ctx)
4111
/* interpreted as no-op */
4115
static void gen_icbi(DisasContext *ctx)
4118
gen_set_access_type(ctx, ACCESS_CACHE);
4119
/* NIP cannot be restored if the memory exception comes from an helper */
4120
gen_update_nip(ctx, ctx->nip - 4);
4121
t0 = tcg_temp_new();
4122
gen_addr_reg_index(ctx, t0);
4123
gen_helper_icbi(t0);
4129
static void gen_dcba(DisasContext *ctx)
4131
/* interpreted as no-op */
4132
/* XXX: specification say this is treated as a store by the MMU
4133
* but does not generate any exception
4137
/*** Segment register manipulation ***/
4138
/* Supervisor only: */
4141
static void gen_mfsr(DisasContext *ctx)
4143
#if defined(CONFIG_USER_ONLY)
4144
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4147
if (unlikely(!ctx->mem_idx)) {
4148
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4151
t0 = tcg_const_tl(SR(ctx->opcode));
4152
gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4158
static void gen_mfsrin(DisasContext *ctx)
4160
#if defined(CONFIG_USER_ONLY)
4161
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4164
if (unlikely(!ctx->mem_idx)) {
4165
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4168
t0 = tcg_temp_new();
4169
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4170
tcg_gen_andi_tl(t0, t0, 0xF);
4171
gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4177
static void gen_mtsr(DisasContext *ctx)
4179
#if defined(CONFIG_USER_ONLY)
4180
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4183
if (unlikely(!ctx->mem_idx)) {
4184
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4187
t0 = tcg_const_tl(SR(ctx->opcode));
4188
gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4194
static void gen_mtsrin(DisasContext *ctx)
4196
#if defined(CONFIG_USER_ONLY)
4197
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4200
if (unlikely(!ctx->mem_idx)) {
4201
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4204
t0 = tcg_temp_new();
4205
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4206
tcg_gen_andi_tl(t0, t0, 0xF);
4207
gen_helper_store_sr(t0, cpu_gpr[rD(ctx->opcode)]);
4212
#if defined(TARGET_PPC64)
4213
/* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4216
static void gen_mfsr_64b(DisasContext *ctx)
4218
#if defined(CONFIG_USER_ONLY)
4219
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4222
if (unlikely(!ctx->mem_idx)) {
4223
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4226
t0 = tcg_const_tl(SR(ctx->opcode));
4227
gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4233
static void gen_mfsrin_64b(DisasContext *ctx)
4235
#if defined(CONFIG_USER_ONLY)
4236
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4239
if (unlikely(!ctx->mem_idx)) {
4240
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4243
t0 = tcg_temp_new();
4244
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4245
tcg_gen_andi_tl(t0, t0, 0xF);
4246
gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4252
static void gen_mtsr_64b(DisasContext *ctx)
4254
#if defined(CONFIG_USER_ONLY)
4255
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4258
if (unlikely(!ctx->mem_idx)) {
4259
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4262
t0 = tcg_const_tl(SR(ctx->opcode));
4263
gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4269
static void gen_mtsrin_64b(DisasContext *ctx)
4271
#if defined(CONFIG_USER_ONLY)
4272
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4275
if (unlikely(!ctx->mem_idx)) {
4276
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4279
t0 = tcg_temp_new();
4280
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4281
tcg_gen_andi_tl(t0, t0, 0xF);
4282
gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4288
static void gen_slbmte(DisasContext *ctx)
4290
#if defined(CONFIG_USER_ONLY)
4291
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4293
if (unlikely(!ctx->mem_idx)) {
4294
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4297
gen_helper_store_slb(cpu_gpr[rB(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
4301
static void gen_slbmfee(DisasContext *ctx)
4303
#if defined(CONFIG_USER_ONLY)
4304
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4306
if (unlikely(!ctx->mem_idx)) {
4307
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4310
gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)],
4311
cpu_gpr[rB(ctx->opcode)]);
4315
static void gen_slbmfev(DisasContext *ctx)
4317
#if defined(CONFIG_USER_ONLY)
4318
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4320
if (unlikely(!ctx->mem_idx)) {
4321
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4324
gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)],
4325
cpu_gpr[rB(ctx->opcode)]);
4328
#endif /* defined(TARGET_PPC64) */
4330
/*** Lookaside buffer management ***/
4331
/* Optional & mem_idx only: */
4334
static void gen_tlbia(DisasContext *ctx)
4336
#if defined(CONFIG_USER_ONLY)
4337
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4339
if (unlikely(!ctx->mem_idx)) {
4340
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4348
static void gen_tlbiel(DisasContext *ctx)
4350
#if defined(CONFIG_USER_ONLY)
4351
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4353
if (unlikely(!ctx->mem_idx)) {
4354
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4357
gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
4362
static void gen_tlbie(DisasContext *ctx)
4364
#if defined(CONFIG_USER_ONLY)
4365
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4367
if (unlikely(!ctx->mem_idx)) {
4368
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4371
#if defined(TARGET_PPC64)
4372
if (!ctx->sf_mode) {
4373
TCGv t0 = tcg_temp_new();
4374
tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
4375
gen_helper_tlbie(t0);
4379
gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
4384
static void gen_tlbsync(DisasContext *ctx)
4386
#if defined(CONFIG_USER_ONLY)
4387
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4389
if (unlikely(!ctx->mem_idx)) {
4390
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4393
/* This has no effect: it should ensure that all previous
4394
* tlbie have completed
4396
gen_stop_exception(ctx);
4400
#if defined(TARGET_PPC64)
4402
static void gen_slbia(DisasContext *ctx)
4404
#if defined(CONFIG_USER_ONLY)
4405
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4407
if (unlikely(!ctx->mem_idx)) {
4408
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4416
static void gen_slbie(DisasContext *ctx)
4418
#if defined(CONFIG_USER_ONLY)
4419
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4421
if (unlikely(!ctx->mem_idx)) {
4422
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4425
gen_helper_slbie(cpu_gpr[rB(ctx->opcode)]);
4430
/*** External control ***/
4434
static void gen_eciwx(DisasContext *ctx)
4437
/* Should check EAR[E] ! */
4438
gen_set_access_type(ctx, ACCESS_EXT);
4439
t0 = tcg_temp_new();
4440
gen_addr_reg_index(ctx, t0);
4441
gen_check_align(ctx, t0, 0x03);
4442
gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4447
static void gen_ecowx(DisasContext *ctx)
4450
/* Should check EAR[E] ! */
4451
gen_set_access_type(ctx, ACCESS_EXT);
4452
t0 = tcg_temp_new();
4453
gen_addr_reg_index(ctx, t0);
4454
gen_check_align(ctx, t0, 0x03);
4455
gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4459
/* PowerPC 601 specific instructions */
4462
static void gen_abs(DisasContext *ctx)
4464
int l1 = gen_new_label();
4465
int l2 = gen_new_label();
4466
tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1);
4467
tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4470
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4472
if (unlikely(Rc(ctx->opcode) != 0))
4473
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4477
static void gen_abso(DisasContext *ctx)
4479
int l1 = gen_new_label();
4480
int l2 = gen_new_label();
4481
int l3 = gen_new_label();
4482
/* Start with XER OV disabled, the most likely case */
4483
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4484
tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2);
4485
tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1);
4486
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4489
tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4492
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4494
if (unlikely(Rc(ctx->opcode) != 0))
4495
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4499
static void gen_clcs(DisasContext *ctx)
4501
TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
4502
gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], t0);
4503
tcg_temp_free_i32(t0);
4504
/* Rc=1 sets CR0 to an undefined state */
4508
static void gen_div(DisasContext *ctx)
4510
gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4511
if (unlikely(Rc(ctx->opcode) != 0))
4512
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4516
static void gen_divo(DisasContext *ctx)
4518
gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4519
if (unlikely(Rc(ctx->opcode) != 0))
4520
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4524
static void gen_divs(DisasContext *ctx)
4526
gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4527
if (unlikely(Rc(ctx->opcode) != 0))
4528
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4531
/* divso - divso. */
4532
static void gen_divso(DisasContext *ctx)
4534
gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4535
if (unlikely(Rc(ctx->opcode) != 0))
4536
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4540
static void gen_doz(DisasContext *ctx)
4542
int l1 = gen_new_label();
4543
int l2 = gen_new_label();
4544
tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4545
tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4548
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4550
if (unlikely(Rc(ctx->opcode) != 0))
4551
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4555
static void gen_dozo(DisasContext *ctx)
4557
int l1 = gen_new_label();
4558
int l2 = gen_new_label();
4559
TCGv t0 = tcg_temp_new();
4560
TCGv t1 = tcg_temp_new();
4561
TCGv t2 = tcg_temp_new();
4562
/* Start with XER OV disabled, the most likely case */
4563
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4564
tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4565
tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4566
tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4567
tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
4568
tcg_gen_andc_tl(t1, t1, t2);
4569
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
4570
tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4571
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4574
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4579
if (unlikely(Rc(ctx->opcode) != 0))
4580
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4584
static void gen_dozi(DisasContext *ctx)
4586
target_long simm = SIMM(ctx->opcode);
4587
int l1 = gen_new_label();
4588
int l2 = gen_new_label();
4589
tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
4590
tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
4593
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4595
if (unlikely(Rc(ctx->opcode) != 0))
4596
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4599
/* lscbx - lscbx. */
4600
static void gen_lscbx(DisasContext *ctx)
4602
TCGv t0 = tcg_temp_new();
4603
TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
4604
TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
4605
TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
4607
gen_addr_reg_index(ctx, t0);
4608
/* NIP cannot be restored if the memory exception comes from an helper */
4609
gen_update_nip(ctx, ctx->nip - 4);
4610
gen_helper_lscbx(t0, t0, t1, t2, t3);
4611
tcg_temp_free_i32(t1);
4612
tcg_temp_free_i32(t2);
4613
tcg_temp_free_i32(t3);
4614
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
4615
tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
4616
if (unlikely(Rc(ctx->opcode) != 0))
4617
gen_set_Rc0(ctx, t0);
4621
/* maskg - maskg. */
4622
static void gen_maskg(DisasContext *ctx)
4624
int l1 = gen_new_label();
4625
TCGv t0 = tcg_temp_new();
4626
TCGv t1 = tcg_temp_new();
4627
TCGv t2 = tcg_temp_new();
4628
TCGv t3 = tcg_temp_new();
4629
tcg_gen_movi_tl(t3, 0xFFFFFFFF);
4630
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4631
tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
4632
tcg_gen_addi_tl(t2, t0, 1);
4633
tcg_gen_shr_tl(t2, t3, t2);
4634
tcg_gen_shr_tl(t3, t3, t1);
4635
tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
4636
tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
4637
tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4643
if (unlikely(Rc(ctx->opcode) != 0))
4644
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4647
/* maskir - maskir. */
4648
static void gen_maskir(DisasContext *ctx)
4650
TCGv t0 = tcg_temp_new();
4651
TCGv t1 = tcg_temp_new();
4652
tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4653
tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4654
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4657
if (unlikely(Rc(ctx->opcode) != 0))
4658
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4662
static void gen_mul(DisasContext *ctx)
4664
TCGv_i64 t0 = tcg_temp_new_i64();
4665
TCGv_i64 t1 = tcg_temp_new_i64();
4666
TCGv t2 = tcg_temp_new();
4667
tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4668
tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4669
tcg_gen_mul_i64(t0, t0, t1);
4670
tcg_gen_trunc_i64_tl(t2, t0);
4671
gen_store_spr(SPR_MQ, t2);
4672
tcg_gen_shri_i64(t1, t0, 32);
4673
tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4674
tcg_temp_free_i64(t0);
4675
tcg_temp_free_i64(t1);
4677
if (unlikely(Rc(ctx->opcode) != 0))
4678
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4682
static void gen_mulo(DisasContext *ctx)
4684
int l1 = gen_new_label();
4685
TCGv_i64 t0 = tcg_temp_new_i64();
4686
TCGv_i64 t1 = tcg_temp_new_i64();
4687
TCGv t2 = tcg_temp_new();
4688
/* Start with XER OV disabled, the most likely case */
4689
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4690
tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4691
tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4692
tcg_gen_mul_i64(t0, t0, t1);
4693
tcg_gen_trunc_i64_tl(t2, t0);
4694
gen_store_spr(SPR_MQ, t2);
4695
tcg_gen_shri_i64(t1, t0, 32);
4696
tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4697
tcg_gen_ext32s_i64(t1, t0);
4698
tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
4699
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4701
tcg_temp_free_i64(t0);
4702
tcg_temp_free_i64(t1);
4704
if (unlikely(Rc(ctx->opcode) != 0))
4705
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4709
static void gen_nabs(DisasContext *ctx)
4711
int l1 = gen_new_label();
4712
int l2 = gen_new_label();
4713
tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4714
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4717
tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4719
if (unlikely(Rc(ctx->opcode) != 0))
4720
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4723
/* nabso - nabso. */
4724
static void gen_nabso(DisasContext *ctx)
4726
int l1 = gen_new_label();
4727
int l2 = gen_new_label();
4728
tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4729
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4732
tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4734
/* nabs never overflows */
4735
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4736
if (unlikely(Rc(ctx->opcode) != 0))
4737
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4741
static void gen_rlmi(DisasContext *ctx)
4743
uint32_t mb = MB(ctx->opcode);
4744
uint32_t me = ME(ctx->opcode);
4745
TCGv t0 = tcg_temp_new();
4746
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4747
tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4748
tcg_gen_andi_tl(t0, t0, MASK(mb, me));
4749
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me));
4750
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
4752
if (unlikely(Rc(ctx->opcode) != 0))
4753
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4757
static void gen_rrib(DisasContext *ctx)
4759
TCGv t0 = tcg_temp_new();
4760
TCGv t1 = tcg_temp_new();
4761
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4762
tcg_gen_movi_tl(t1, 0x80000000);
4763
tcg_gen_shr_tl(t1, t1, t0);
4764
tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4765
tcg_gen_and_tl(t0, t0, t1);
4766
tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
4767
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4770
if (unlikely(Rc(ctx->opcode) != 0))
4771
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4775
static void gen_sle(DisasContext *ctx)
4777
TCGv t0 = tcg_temp_new();
4778
TCGv t1 = tcg_temp_new();
4779
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4780
tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4781
tcg_gen_subfi_tl(t1, 32, t1);
4782
tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4783
tcg_gen_or_tl(t1, t0, t1);
4784
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4785
gen_store_spr(SPR_MQ, t1);
4788
if (unlikely(Rc(ctx->opcode) != 0))
4789
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4793
static void gen_sleq(DisasContext *ctx)
4795
TCGv t0 = tcg_temp_new();
4796
TCGv t1 = tcg_temp_new();
4797
TCGv t2 = tcg_temp_new();
4798
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4799
tcg_gen_movi_tl(t2, 0xFFFFFFFF);
4800
tcg_gen_shl_tl(t2, t2, t0);
4801
tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4802
gen_load_spr(t1, SPR_MQ);
4803
gen_store_spr(SPR_MQ, t0);
4804
tcg_gen_and_tl(t0, t0, t2);
4805
tcg_gen_andc_tl(t1, t1, t2);
4806
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4810
if (unlikely(Rc(ctx->opcode) != 0))
4811
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4815
static void gen_sliq(DisasContext *ctx)
4817
int sh = SH(ctx->opcode);
4818
TCGv t0 = tcg_temp_new();
4819
TCGv t1 = tcg_temp_new();
4820
tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4821
tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4822
tcg_gen_or_tl(t1, t0, t1);
4823
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4824
gen_store_spr(SPR_MQ, t1);
4827
if (unlikely(Rc(ctx->opcode) != 0))
4828
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4831
/* slliq - slliq. */
4832
static void gen_slliq(DisasContext *ctx)
4834
int sh = SH(ctx->opcode);
4835
TCGv t0 = tcg_temp_new();
4836
TCGv t1 = tcg_temp_new();
4837
tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4838
gen_load_spr(t1, SPR_MQ);
4839
gen_store_spr(SPR_MQ, t0);
4840
tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
4841
tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
4842
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4845
if (unlikely(Rc(ctx->opcode) != 0))
4846
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4850
static void gen_sllq(DisasContext *ctx)
4852
int l1 = gen_new_label();
4853
int l2 = gen_new_label();
4854
TCGv t0 = tcg_temp_local_new();
4855
TCGv t1 = tcg_temp_local_new();
4856
TCGv t2 = tcg_temp_local_new();
4857
tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4858
tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4859
tcg_gen_shl_tl(t1, t1, t2);
4860
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4861
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
4862
gen_load_spr(t0, SPR_MQ);
4863
tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4866
tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4867
gen_load_spr(t2, SPR_MQ);
4868
tcg_gen_andc_tl(t1, t2, t1);
4869
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4874
if (unlikely(Rc(ctx->opcode) != 0))
4875
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4879
static void gen_slq(DisasContext *ctx)
4881
int l1 = gen_new_label();
4882
TCGv t0 = tcg_temp_new();
4883
TCGv t1 = tcg_temp_new();
4884
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4885
tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4886
tcg_gen_subfi_tl(t1, 32, t1);
4887
tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4888
tcg_gen_or_tl(t1, t0, t1);
4889
gen_store_spr(SPR_MQ, t1);
4890
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
4891
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4892
tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4893
tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
4897
if (unlikely(Rc(ctx->opcode) != 0))
4898
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4901
/* sraiq - sraiq. */
4902
static void gen_sraiq(DisasContext *ctx)
4904
int sh = SH(ctx->opcode);
4905
int l1 = gen_new_label();
4906
TCGv t0 = tcg_temp_new();
4907
TCGv t1 = tcg_temp_new();
4908
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4909
tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4910
tcg_gen_or_tl(t0, t0, t1);
4911
gen_store_spr(SPR_MQ, t0);
4912
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4913
tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4914
tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
4915
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4917
tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
4920
if (unlikely(Rc(ctx->opcode) != 0))
4921
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4925
static void gen_sraq(DisasContext *ctx)
4927
int l1 = gen_new_label();
4928
int l2 = gen_new_label();
4929
TCGv t0 = tcg_temp_new();
4930
TCGv t1 = tcg_temp_local_new();
4931
TCGv t2 = tcg_temp_local_new();
4932
tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4933
tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4934
tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
4935
tcg_gen_subfi_tl(t2, 32, t2);
4936
tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
4937
tcg_gen_or_tl(t0, t0, t2);
4938
gen_store_spr(SPR_MQ, t0);
4939
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4940
tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
4941
tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
4942
tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
4945
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
4946
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4947
tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4948
tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
4949
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4953
if (unlikely(Rc(ctx->opcode) != 0))
4954
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4958
static void gen_sre(DisasContext *ctx)
4960
TCGv t0 = tcg_temp_new();
4961
TCGv t1 = tcg_temp_new();
4962
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4963
tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4964
tcg_gen_subfi_tl(t1, 32, t1);
4965
tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4966
tcg_gen_or_tl(t1, t0, t1);
4967
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4968
gen_store_spr(SPR_MQ, t1);
4971
if (unlikely(Rc(ctx->opcode) != 0))
4972
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4976
static void gen_srea(DisasContext *ctx)
4978
TCGv t0 = tcg_temp_new();
4979
TCGv t1 = tcg_temp_new();
4980
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4981
tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4982
gen_store_spr(SPR_MQ, t0);
4983
tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
4986
if (unlikely(Rc(ctx->opcode) != 0))
4987
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4991
static void gen_sreq(DisasContext *ctx)
4993
TCGv t0 = tcg_temp_new();
4994
TCGv t1 = tcg_temp_new();
4995
TCGv t2 = tcg_temp_new();
4996
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4997
tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4998
tcg_gen_shr_tl(t1, t1, t0);
4999
tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
5000
gen_load_spr(t2, SPR_MQ);
5001
gen_store_spr(SPR_MQ, t0);
5002
tcg_gen_and_tl(t0, t0, t1);
5003
tcg_gen_andc_tl(t2, t2, t1);
5004
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5008
if (unlikely(Rc(ctx->opcode) != 0))
5009
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5013
static void gen_sriq(DisasContext *ctx)
5015
int sh = SH(ctx->opcode);
5016
TCGv t0 = tcg_temp_new();
5017
TCGv t1 = tcg_temp_new();
5018
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5019
tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
5020
tcg_gen_or_tl(t1, t0, t1);
5021
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5022
gen_store_spr(SPR_MQ, t1);
5025
if (unlikely(Rc(ctx->opcode) != 0))
5026
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5030
static void gen_srliq(DisasContext *ctx)
5032
int sh = SH(ctx->opcode);
5033
TCGv t0 = tcg_temp_new();
5034
TCGv t1 = tcg_temp_new();
5035
tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5036
gen_load_spr(t1, SPR_MQ);
5037
gen_store_spr(SPR_MQ, t0);
5038
tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
5039
tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
5040
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5043
if (unlikely(Rc(ctx->opcode) != 0))
5044
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5048
static void gen_srlq(DisasContext *ctx)
5050
int l1 = gen_new_label();
5051
int l2 = gen_new_label();
5052
TCGv t0 = tcg_temp_local_new();
5053
TCGv t1 = tcg_temp_local_new();
5054
TCGv t2 = tcg_temp_local_new();
5055
tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5056
tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5057
tcg_gen_shr_tl(t2, t1, t2);
5058
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5059
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5060
gen_load_spr(t0, SPR_MQ);
5061
tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5064
tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5065
tcg_gen_and_tl(t0, t0, t2);
5066
gen_load_spr(t1, SPR_MQ);
5067
tcg_gen_andc_tl(t1, t1, t2);
5068
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5073
if (unlikely(Rc(ctx->opcode) != 0))
5074
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5078
static void gen_srq(DisasContext *ctx)
5080
int l1 = gen_new_label();
5081
TCGv t0 = tcg_temp_new();
5082
TCGv t1 = tcg_temp_new();
5083
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5084
tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5085
tcg_gen_subfi_tl(t1, 32, t1);
5086
tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5087
tcg_gen_or_tl(t1, t0, t1);
5088
gen_store_spr(SPR_MQ, t1);
5089
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5090
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5091
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5092
tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5096
if (unlikely(Rc(ctx->opcode) != 0))
5097
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5100
/* PowerPC 602 specific instructions */
5103
static void gen_dsa(DisasContext *ctx)
5106
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5110
static void gen_esa(DisasContext *ctx)
5113
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5117
static void gen_mfrom(DisasContext *ctx)
5119
#if defined(CONFIG_USER_ONLY)
5120
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5122
if (unlikely(!ctx->mem_idx)) {
5123
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5126
gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5130
/* 602 - 603 - G2 TLB management */
5133
static void gen_tlbld_6xx(DisasContext *ctx)
5135
#if defined(CONFIG_USER_ONLY)
5136
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5138
if (unlikely(!ctx->mem_idx)) {
5139
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5142
gen_helper_6xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5147
static void gen_tlbli_6xx(DisasContext *ctx)
5149
#if defined(CONFIG_USER_ONLY)
5150
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5152
if (unlikely(!ctx->mem_idx)) {
5153
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5156
gen_helper_6xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5160
/* 74xx TLB management */
5163
static void gen_tlbld_74xx(DisasContext *ctx)
5165
#if defined(CONFIG_USER_ONLY)
5166
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5168
if (unlikely(!ctx->mem_idx)) {
5169
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5172
gen_helper_74xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5177
static void gen_tlbli_74xx(DisasContext *ctx)
5179
#if defined(CONFIG_USER_ONLY)
5180
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5182
if (unlikely(!ctx->mem_idx)) {
5183
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5186
gen_helper_74xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5190
/* POWER instructions not in PowerPC 601 */
5193
static void gen_clf(DisasContext *ctx)
5195
/* Cache line flush: implemented as no-op */
5199
static void gen_cli(DisasContext *ctx)
5201
/* Cache line invalidate: privileged and treated as no-op */
5202
#if defined(CONFIG_USER_ONLY)
5203
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5205
if (unlikely(!ctx->mem_idx)) {
5206
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5213
static void gen_dclst(DisasContext *ctx)
5215
/* Data cache line store: treated as no-op */
5218
static void gen_mfsri(DisasContext *ctx)
5220
#if defined(CONFIG_USER_ONLY)
5221
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5223
int ra = rA(ctx->opcode);
5224
int rd = rD(ctx->opcode);
5226
if (unlikely(!ctx->mem_idx)) {
5227
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5230
t0 = tcg_temp_new();
5231
gen_addr_reg_index(ctx, t0);
5232
tcg_gen_shri_tl(t0, t0, 28);
5233
tcg_gen_andi_tl(t0, t0, 0xF);
5234
gen_helper_load_sr(cpu_gpr[rd], t0);
5236
if (ra != 0 && ra != rd)
5237
tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
5241
static void gen_rac(DisasContext *ctx)
5243
#if defined(CONFIG_USER_ONLY)
5244
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5247
if (unlikely(!ctx->mem_idx)) {
5248
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5251
t0 = tcg_temp_new();
5252
gen_addr_reg_index(ctx, t0);
5253
gen_helper_rac(cpu_gpr[rD(ctx->opcode)], t0);
5258
static void gen_rfsvc(DisasContext *ctx)
5260
#if defined(CONFIG_USER_ONLY)
5261
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5263
if (unlikely(!ctx->mem_idx)) {
5264
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5268
gen_sync_exception(ctx);
5272
/* svc is not implemented for now */
5274
/* POWER2 specific instructions */
5275
/* Quad manipulation (load/store two floats at a time) */
5278
static void gen_lfq(DisasContext *ctx)
5280
int rd = rD(ctx->opcode);
5282
gen_set_access_type(ctx, ACCESS_FLOAT);
5283
t0 = tcg_temp_new();
5284
gen_addr_imm_index(ctx, t0, 0);
5285
gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5286
gen_addr_add(ctx, t0, t0, 8);
5287
gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5292
static void gen_lfqu(DisasContext *ctx)
5294
int ra = rA(ctx->opcode);
5295
int rd = rD(ctx->opcode);
5297
gen_set_access_type(ctx, ACCESS_FLOAT);
5298
t0 = tcg_temp_new();
5299
t1 = tcg_temp_new();
5300
gen_addr_imm_index(ctx, t0, 0);
5301
gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5302
gen_addr_add(ctx, t1, t0, 8);
5303
gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5305
tcg_gen_mov_tl(cpu_gpr[ra], t0);
5311
static void gen_lfqux(DisasContext *ctx)
5313
int ra = rA(ctx->opcode);
5314
int rd = rD(ctx->opcode);
5315
gen_set_access_type(ctx, ACCESS_FLOAT);
5317
t0 = tcg_temp_new();
5318
gen_addr_reg_index(ctx, t0);
5319
gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5320
t1 = tcg_temp_new();
5321
gen_addr_add(ctx, t1, t0, 8);
5322
gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5325
tcg_gen_mov_tl(cpu_gpr[ra], t0);
5330
static void gen_lfqx(DisasContext *ctx)
5332
int rd = rD(ctx->opcode);
5334
gen_set_access_type(ctx, ACCESS_FLOAT);
5335
t0 = tcg_temp_new();
5336
gen_addr_reg_index(ctx, t0);
5337
gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5338
gen_addr_add(ctx, t0, t0, 8);
5339
gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5344
static void gen_stfq(DisasContext *ctx)
5346
int rd = rD(ctx->opcode);
5348
gen_set_access_type(ctx, ACCESS_FLOAT);
5349
t0 = tcg_temp_new();
5350
gen_addr_imm_index(ctx, t0, 0);
5351
gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5352
gen_addr_add(ctx, t0, t0, 8);
5353
gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5358
static void gen_stfqu(DisasContext *ctx)
5360
int ra = rA(ctx->opcode);
5361
int rd = rD(ctx->opcode);
5363
gen_set_access_type(ctx, ACCESS_FLOAT);
5364
t0 = tcg_temp_new();
5365
gen_addr_imm_index(ctx, t0, 0);
5366
gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5367
t1 = tcg_temp_new();
5368
gen_addr_add(ctx, t1, t0, 8);
5369
gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5372
tcg_gen_mov_tl(cpu_gpr[ra], t0);
5377
static void gen_stfqux(DisasContext *ctx)
5379
int ra = rA(ctx->opcode);
5380
int rd = rD(ctx->opcode);
5382
gen_set_access_type(ctx, ACCESS_FLOAT);
5383
t0 = tcg_temp_new();
5384
gen_addr_reg_index(ctx, t0);
5385
gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5386
t1 = tcg_temp_new();
5387
gen_addr_add(ctx, t1, t0, 8);
5388
gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5391
tcg_gen_mov_tl(cpu_gpr[ra], t0);
5396
static void gen_stfqx(DisasContext *ctx)
5398
int rd = rD(ctx->opcode);
5400
gen_set_access_type(ctx, ACCESS_FLOAT);
5401
t0 = tcg_temp_new();
5402
gen_addr_reg_index(ctx, t0);
5403
gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5404
gen_addr_add(ctx, t0, t0, 8);
5405
gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5409
/* BookE specific instructions */
5411
/* XXX: not implemented on 440 ? */
5412
static void gen_mfapidi(DisasContext *ctx)
5415
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5418
/* XXX: not implemented on 440 ? */
5419
static void gen_tlbiva(DisasContext *ctx)
5421
#if defined(CONFIG_USER_ONLY)
5422
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5425
if (unlikely(!ctx->mem_idx)) {
5426
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5429
t0 = tcg_temp_new();
5430
gen_addr_reg_index(ctx, t0);
5431
gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
5436
/* All 405 MAC instructions are translated here */
5437
static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5438
int ra, int rb, int rt, int Rc)
5442
t0 = tcg_temp_local_new();
5443
t1 = tcg_temp_local_new();
5445
switch (opc3 & 0x0D) {
5447
/* macchw - macchw. - macchwo - macchwo. */
5448
/* macchws - macchws. - macchwso - macchwso. */
5449
/* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
5450
/* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
5451
/* mulchw - mulchw. */
5452
tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5453
tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5454
tcg_gen_ext16s_tl(t1, t1);
5457
/* macchwu - macchwu. - macchwuo - macchwuo. */
5458
/* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
5459
/* mulchwu - mulchwu. */
5460
tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5461
tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5462
tcg_gen_ext16u_tl(t1, t1);
5465
/* machhw - machhw. - machhwo - machhwo. */
5466
/* machhws - machhws. - machhwso - machhwso. */
5467
/* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
5468
/* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
5469
/* mulhhw - mulhhw. */
5470
tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5471
tcg_gen_ext16s_tl(t0, t0);
5472
tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5473
tcg_gen_ext16s_tl(t1, t1);
5476
/* machhwu - machhwu. - machhwuo - machhwuo. */
5477
/* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
5478
/* mulhhwu - mulhhwu. */
5479
tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5480
tcg_gen_ext16u_tl(t0, t0);
5481
tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5482
tcg_gen_ext16u_tl(t1, t1);
5485
/* maclhw - maclhw. - maclhwo - maclhwo. */
5486
/* maclhws - maclhws. - maclhwso - maclhwso. */
5487
/* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
5488
/* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
5489
/* mullhw - mullhw. */
5490
tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5491
tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5494
/* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
5495
/* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
5496
/* mullhwu - mullhwu. */
5497
tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5498
tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5502
/* (n)multiply-and-accumulate (0x0C / 0x0E) */
5503
tcg_gen_mul_tl(t1, t0, t1);
5505
/* nmultiply-and-accumulate (0x0E) */
5506
tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5508
/* multiply-and-accumulate (0x0C) */
5509
tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5513
/* Check overflow and/or saturate */
5514
int l1 = gen_new_label();
5517
/* Start with XER OV disabled, the most likely case */
5518
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
5522
tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5523
tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5524
tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5525
tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5528
tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5529
tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5533
tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5536
tcg_gen_movi_tl(t0, UINT32_MAX);
5540
/* Check overflow */
5541
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
5544
tcg_gen_mov_tl(cpu_gpr[rt], t0);
5547
tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5551
if (unlikely(Rc) != 0) {
5553
gen_set_Rc0(ctx, cpu_gpr[rt]);
5557
#define GEN_MAC_HANDLER(name, opc2, opc3) \
5558
static void glue(gen_, name)(DisasContext *ctx) \
5560
gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
5561
rD(ctx->opcode), Rc(ctx->opcode)); \
5564
/* macchw - macchw. */
5565
GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5566
/* macchwo - macchwo. */
5567
GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5568
/* macchws - macchws. */
5569
GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5570
/* macchwso - macchwso. */
5571
GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5572
/* macchwsu - macchwsu. */
5573
GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5574
/* macchwsuo - macchwsuo. */
5575
GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5576
/* macchwu - macchwu. */
5577
GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5578
/* macchwuo - macchwuo. */
5579
GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5580
/* machhw - machhw. */
5581
GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5582
/* machhwo - machhwo. */
5583
GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5584
/* machhws - machhws. */
5585
GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5586
/* machhwso - machhwso. */
5587
GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5588
/* machhwsu - machhwsu. */
5589
GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5590
/* machhwsuo - machhwsuo. */
5591
GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5592
/* machhwu - machhwu. */
5593
GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5594
/* machhwuo - machhwuo. */
5595
GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5596
/* maclhw - maclhw. */
5597
GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5598
/* maclhwo - maclhwo. */
5599
GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5600
/* maclhws - maclhws. */
5601
GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5602
/* maclhwso - maclhwso. */
5603
GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5604
/* maclhwu - maclhwu. */
5605
GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5606
/* maclhwuo - maclhwuo. */
5607
GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5608
/* maclhwsu - maclhwsu. */
5609
GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5610
/* maclhwsuo - maclhwsuo. */
5611
GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5612
/* nmacchw - nmacchw. */
5613
GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5614
/* nmacchwo - nmacchwo. */
5615
GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5616
/* nmacchws - nmacchws. */
5617
GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5618
/* nmacchwso - nmacchwso. */
5619
GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5620
/* nmachhw - nmachhw. */
5621
GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5622
/* nmachhwo - nmachhwo. */
5623
GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5624
/* nmachhws - nmachhws. */
5625
GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5626
/* nmachhwso - nmachhwso. */
5627
GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5628
/* nmaclhw - nmaclhw. */
5629
GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5630
/* nmaclhwo - nmaclhwo. */
5631
GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5632
/* nmaclhws - nmaclhws. */
5633
GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5634
/* nmaclhwso - nmaclhwso. */
5635
GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5637
/* mulchw - mulchw. */
5638
GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5639
/* mulchwu - mulchwu. */
5640
GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5641
/* mulhhw - mulhhw. */
5642
GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5643
/* mulhhwu - mulhhwu. */
5644
GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5645
/* mullhw - mullhw. */
5646
GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5647
/* mullhwu - mullhwu. */
5648
GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5651
static void gen_mfdcr(DisasContext *ctx)
5653
#if defined(CONFIG_USER_ONLY)
5654
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5657
if (unlikely(!ctx->mem_idx)) {
5658
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5661
/* NIP cannot be restored if the memory exception comes from an helper */
5662
gen_update_nip(ctx, ctx->nip - 4);
5663
dcrn = tcg_const_tl(SPR(ctx->opcode));
5664
gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], dcrn);
5665
tcg_temp_free(dcrn);
5670
static void gen_mtdcr(DisasContext *ctx)
5672
#if defined(CONFIG_USER_ONLY)
5673
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5676
if (unlikely(!ctx->mem_idx)) {
5677
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5680
/* NIP cannot be restored if the memory exception comes from an helper */
5681
gen_update_nip(ctx, ctx->nip - 4);
5682
dcrn = tcg_const_tl(SPR(ctx->opcode));
5683
gen_helper_store_dcr(dcrn, cpu_gpr[rS(ctx->opcode)]);
5684
tcg_temp_free(dcrn);
5689
/* XXX: not implemented on 440 ? */
5690
static void gen_mfdcrx(DisasContext *ctx)
5692
#if defined(CONFIG_USER_ONLY)
5693
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5695
if (unlikely(!ctx->mem_idx)) {
5696
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5699
/* NIP cannot be restored if the memory exception comes from an helper */
5700
gen_update_nip(ctx, ctx->nip - 4);
5701
gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5702
/* Note: Rc update flag set leads to undefined state of Rc0 */
5707
/* XXX: not implemented on 440 ? */
5708
static void gen_mtdcrx(DisasContext *ctx)
5710
#if defined(CONFIG_USER_ONLY)
5711
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5713
if (unlikely(!ctx->mem_idx)) {
5714
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5717
/* NIP cannot be restored if the memory exception comes from an helper */
5718
gen_update_nip(ctx, ctx->nip - 4);
5719
gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5720
/* Note: Rc update flag set leads to undefined state of Rc0 */
5724
/* mfdcrux (PPC 460) : user-mode access to DCR */
5725
static void gen_mfdcrux(DisasContext *ctx)
5727
/* NIP cannot be restored if the memory exception comes from an helper */
5728
gen_update_nip(ctx, ctx->nip - 4);
5729
gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5730
/* Note: Rc update flag set leads to undefined state of Rc0 */
5733
/* mtdcrux (PPC 460) : user-mode access to DCR */
5734
static void gen_mtdcrux(DisasContext *ctx)
5736
/* NIP cannot be restored if the memory exception comes from an helper */
5737
gen_update_nip(ctx, ctx->nip - 4);
5738
gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5739
/* Note: Rc update flag set leads to undefined state of Rc0 */
5743
static void gen_dccci(DisasContext *ctx)
5745
#if defined(CONFIG_USER_ONLY)
5746
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5748
if (unlikely(!ctx->mem_idx)) {
5749
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5752
/* interpreted as no-op */
5757
static void gen_dcread(DisasContext *ctx)
5759
#if defined(CONFIG_USER_ONLY)
5760
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5763
if (unlikely(!ctx->mem_idx)) {
5764
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5767
gen_set_access_type(ctx, ACCESS_CACHE);
5768
EA = tcg_temp_new();
5769
gen_addr_reg_index(ctx, EA);
5770
val = tcg_temp_new();
5771
gen_qemu_ld32u(ctx, val, EA);
5773
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5779
static void gen_icbt_40x(DisasContext *ctx)
5781
/* interpreted as no-op */
5782
/* XXX: specification say this is treated as a load by the MMU
5783
* but does not generate any exception
5788
static void gen_iccci(DisasContext *ctx)
5790
#if defined(CONFIG_USER_ONLY)
5791
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5793
if (unlikely(!ctx->mem_idx)) {
5794
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5797
/* interpreted as no-op */
5802
static void gen_icread(DisasContext *ctx)
5804
#if defined(CONFIG_USER_ONLY)
5805
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5807
if (unlikely(!ctx->mem_idx)) {
5808
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5811
/* interpreted as no-op */
5815
/* rfci (mem_idx only) */
5816
static void gen_rfci_40x(DisasContext *ctx)
5818
#if defined(CONFIG_USER_ONLY)
5819
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5821
if (unlikely(!ctx->mem_idx)) {
5822
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5825
/* Restore CPU state */
5826
gen_helper_40x_rfci();
5827
gen_sync_exception(ctx);
5831
static void gen_rfci(DisasContext *ctx)
5833
#if defined(CONFIG_USER_ONLY)
5834
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5836
if (unlikely(!ctx->mem_idx)) {
5837
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5840
/* Restore CPU state */
5842
gen_sync_exception(ctx);
5846
/* BookE specific */
5848
/* XXX: not implemented on 440 ? */
5849
static void gen_rfdi(DisasContext *ctx)
5851
#if defined(CONFIG_USER_ONLY)
5852
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5854
if (unlikely(!ctx->mem_idx)) {
5855
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5858
/* Restore CPU state */
5860
gen_sync_exception(ctx);
5864
/* XXX: not implemented on 440 ? */
5865
static void gen_rfmci(DisasContext *ctx)
5867
#if defined(CONFIG_USER_ONLY)
5868
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5870
if (unlikely(!ctx->mem_idx)) {
5871
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5874
/* Restore CPU state */
5876
gen_sync_exception(ctx);
5880
/* TLB management - PowerPC 405 implementation */
5883
static void gen_tlbre_40x(DisasContext *ctx)
5885
#if defined(CONFIG_USER_ONLY)
5886
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5888
if (unlikely(!ctx->mem_idx)) {
5889
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5892
switch (rB(ctx->opcode)) {
5894
gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5897
gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5900
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5906
/* tlbsx - tlbsx. */
5907
static void gen_tlbsx_40x(DisasContext *ctx)
5909
#if defined(CONFIG_USER_ONLY)
5910
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5913
if (unlikely(!ctx->mem_idx)) {
5914
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5917
t0 = tcg_temp_new();
5918
gen_addr_reg_index(ctx, t0);
5919
gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], t0);
5921
if (Rc(ctx->opcode)) {
5922
int l1 = gen_new_label();
5923
tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
5924
tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
5925
tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
5926
tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5927
tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5934
static void gen_tlbwe_40x(DisasContext *ctx)
5936
#if defined(CONFIG_USER_ONLY)
5937
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5939
if (unlikely(!ctx->mem_idx)) {
5940
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5943
switch (rB(ctx->opcode)) {
5945
gen_helper_4xx_tlbwe_hi(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5948
gen_helper_4xx_tlbwe_lo(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5951
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5957
/* TLB management - PowerPC 440 implementation */
5960
static void gen_tlbre_440(DisasContext *ctx)
5962
#if defined(CONFIG_USER_ONLY)
5963
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5965
if (unlikely(!ctx->mem_idx)) {
5966
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5969
switch (rB(ctx->opcode)) {
5974
TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5975
gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], t0, cpu_gpr[rA(ctx->opcode)]);
5976
tcg_temp_free_i32(t0);
5980
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5986
/* tlbsx - tlbsx. */
5987
static void gen_tlbsx_440(DisasContext *ctx)
5989
#if defined(CONFIG_USER_ONLY)
5990
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5993
if (unlikely(!ctx->mem_idx)) {
5994
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5997
t0 = tcg_temp_new();
5998
gen_addr_reg_index(ctx, t0);
5999
gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], t0);
6001
if (Rc(ctx->opcode)) {
6002
int l1 = gen_new_label();
6003
tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
6004
tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
6005
tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
6006
tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6007
tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6014
static void gen_tlbwe_440(DisasContext *ctx)
6016
#if defined(CONFIG_USER_ONLY)
6017
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6019
if (unlikely(!ctx->mem_idx)) {
6020
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6023
switch (rB(ctx->opcode)) {
6028
TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6029
gen_helper_440_tlbwe(t0, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6030
tcg_temp_free_i32(t0);
6034
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6040
/* TLB management - PowerPC BookE 2.06 implementation */
6043
static void gen_tlbre_booke206(DisasContext *ctx)
6045
#if defined(CONFIG_USER_ONLY)
6046
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6048
if (unlikely(!ctx->mem_idx)) {
6049
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6053
gen_helper_booke206_tlbre();
6057
/* tlbsx - tlbsx. */
6058
static void gen_tlbsx_booke206(DisasContext *ctx)
6060
#if defined(CONFIG_USER_ONLY)
6061
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6064
if (unlikely(!ctx->mem_idx)) {
6065
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6069
if (rA(ctx->opcode)) {
6070
t0 = tcg_temp_new();
6071
tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
6073
t0 = tcg_const_tl(0);
6076
tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
6077
gen_helper_booke206_tlbsx(t0);
6082
static void gen_tlbwe_booke206(DisasContext *ctx)
6084
#if defined(CONFIG_USER_ONLY)
6085
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6087
if (unlikely(!ctx->mem_idx)) {
6088
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6091
gen_helper_booke206_tlbwe();
6095
static void gen_tlbivax_booke206(DisasContext *ctx)
6097
#if defined(CONFIG_USER_ONLY)
6098
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6101
if (unlikely(!ctx->mem_idx)) {
6102
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6106
t0 = tcg_temp_new();
6107
gen_addr_reg_index(ctx, t0);
6109
gen_helper_booke206_tlbivax(t0);
6115
static void gen_wrtee(DisasContext *ctx)
6117
#if defined(CONFIG_USER_ONLY)
6118
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6121
if (unlikely(!ctx->mem_idx)) {
6122
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6125
t0 = tcg_temp_new();
6126
tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6127
tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6128
tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6130
/* Stop translation to have a chance to raise an exception
6131
* if we just set msr_ee to 1
6133
gen_stop_exception(ctx);
6138
static void gen_wrteei(DisasContext *ctx)
6140
#if defined(CONFIG_USER_ONLY)
6141
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6143
if (unlikely(!ctx->mem_idx)) {
6144
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6147
if (ctx->opcode & 0x00008000) {
6148
tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6149
/* Stop translation to have a chance to raise an exception */
6150
gen_stop_exception(ctx);
6152
tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6157
/* PowerPC 440 specific instructions */
6160
static void gen_dlmzb(DisasContext *ctx)
6162
TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6163
gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
6164
cpu_gpr[rB(ctx->opcode)], t0);
6165
tcg_temp_free_i32(t0);
6168
/* mbar replaces eieio on 440 */
6169
static void gen_mbar(DisasContext *ctx)
6171
/* interpreted as no-op */
6174
/* msync replaces sync on 440 */
6175
static void gen_msync(DisasContext *ctx)
6177
/* interpreted as no-op */
6181
static void gen_icbt_440(DisasContext *ctx)
6183
/* interpreted as no-op */
6184
/* XXX: specification say this is treated as a load by the MMU
6185
* but does not generate any exception
6189
/*** Altivec vector extension ***/
6190
/* Altivec registers moves */
6192
static inline TCGv_ptr gen_avr_ptr(int reg)
6194
TCGv_ptr r = tcg_temp_new_ptr();
6195
tcg_gen_addi_ptr(r, cpu_env, offsetof(CPUPPCState, avr[reg]));
6199
#define GEN_VR_LDX(name, opc2, opc3) \
6200
static void glue(gen_, name)(DisasContext *ctx) \
6203
if (unlikely(!ctx->altivec_enabled)) { \
6204
gen_exception(ctx, POWERPC_EXCP_VPU); \
6207
gen_set_access_type(ctx, ACCESS_INT); \
6208
EA = tcg_temp_new(); \
6209
gen_addr_reg_index(ctx, EA); \
6210
tcg_gen_andi_tl(EA, EA, ~0xf); \
6211
if (ctx->le_mode) { \
6212
gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6213
tcg_gen_addi_tl(EA, EA, 8); \
6214
gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6216
gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6217
tcg_gen_addi_tl(EA, EA, 8); \
6218
gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6220
tcg_temp_free(EA); \
6223
#define GEN_VR_STX(name, opc2, opc3) \
6224
static void gen_st##name(DisasContext *ctx) \
6227
if (unlikely(!ctx->altivec_enabled)) { \
6228
gen_exception(ctx, POWERPC_EXCP_VPU); \
6231
gen_set_access_type(ctx, ACCESS_INT); \
6232
EA = tcg_temp_new(); \
6233
gen_addr_reg_index(ctx, EA); \
6234
tcg_gen_andi_tl(EA, EA, ~0xf); \
6235
if (ctx->le_mode) { \
6236
gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6237
tcg_gen_addi_tl(EA, EA, 8); \
6238
gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6240
gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6241
tcg_gen_addi_tl(EA, EA, 8); \
6242
gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6244
tcg_temp_free(EA); \
6247
#define GEN_VR_LVE(name, opc2, opc3) \
6248
static void gen_lve##name(DisasContext *ctx) \
6252
if (unlikely(!ctx->altivec_enabled)) { \
6253
gen_exception(ctx, POWERPC_EXCP_VPU); \
6256
gen_set_access_type(ctx, ACCESS_INT); \
6257
EA = tcg_temp_new(); \
6258
gen_addr_reg_index(ctx, EA); \
6259
rs = gen_avr_ptr(rS(ctx->opcode)); \
6260
gen_helper_lve##name (rs, EA); \
6261
tcg_temp_free(EA); \
6262
tcg_temp_free_ptr(rs); \
6265
#define GEN_VR_STVE(name, opc2, opc3) \
6266
static void gen_stve##name(DisasContext *ctx) \
6270
if (unlikely(!ctx->altivec_enabled)) { \
6271
gen_exception(ctx, POWERPC_EXCP_VPU); \
6274
gen_set_access_type(ctx, ACCESS_INT); \
6275
EA = tcg_temp_new(); \
6276
gen_addr_reg_index(ctx, EA); \
6277
rs = gen_avr_ptr(rS(ctx->opcode)); \
6278
gen_helper_stve##name (rs, EA); \
6279
tcg_temp_free(EA); \
6280
tcg_temp_free_ptr(rs); \
6283
GEN_VR_LDX(lvx, 0x07, 0x03);
6284
/* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
6285
GEN_VR_LDX(lvxl, 0x07, 0x0B);
6287
GEN_VR_LVE(bx, 0x07, 0x00);
6288
GEN_VR_LVE(hx, 0x07, 0x01);
6289
GEN_VR_LVE(wx, 0x07, 0x02);
6291
GEN_VR_STX(svx, 0x07, 0x07);
6292
/* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
6293
GEN_VR_STX(svxl, 0x07, 0x0F);
6295
GEN_VR_STVE(bx, 0x07, 0x04);
6296
GEN_VR_STVE(hx, 0x07, 0x05);
6297
GEN_VR_STVE(wx, 0x07, 0x06);
6299
static void gen_lvsl(DisasContext *ctx)
6303
if (unlikely(!ctx->altivec_enabled)) {
6304
gen_exception(ctx, POWERPC_EXCP_VPU);
6307
EA = tcg_temp_new();
6308
gen_addr_reg_index(ctx, EA);
6309
rd = gen_avr_ptr(rD(ctx->opcode));
6310
gen_helper_lvsl(rd, EA);
6312
tcg_temp_free_ptr(rd);
6315
static void gen_lvsr(DisasContext *ctx)
6319
if (unlikely(!ctx->altivec_enabled)) {
6320
gen_exception(ctx, POWERPC_EXCP_VPU);
6323
EA = tcg_temp_new();
6324
gen_addr_reg_index(ctx, EA);
6325
rd = gen_avr_ptr(rD(ctx->opcode));
6326
gen_helper_lvsr(rd, EA);
6328
tcg_temp_free_ptr(rd);
6331
static void gen_mfvscr(DisasContext *ctx)
6334
if (unlikely(!ctx->altivec_enabled)) {
6335
gen_exception(ctx, POWERPC_EXCP_VPU);
6338
tcg_gen_movi_i64(cpu_avrh[rD(ctx->opcode)], 0);
6339
t = tcg_temp_new_i32();
6340
tcg_gen_ld_i32(t, cpu_env, offsetof(CPUState, vscr));
6341
tcg_gen_extu_i32_i64(cpu_avrl[rD(ctx->opcode)], t);
6342
tcg_temp_free_i32(t);
6345
static void gen_mtvscr(DisasContext *ctx)
6348
if (unlikely(!ctx->altivec_enabled)) {
6349
gen_exception(ctx, POWERPC_EXCP_VPU);
6352
p = gen_avr_ptr(rD(ctx->opcode));
6353
gen_helper_mtvscr(p);
6354
tcg_temp_free_ptr(p);
6357
/* Logical operations */
6358
#define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
6359
static void glue(gen_, name)(DisasContext *ctx) \
6361
if (unlikely(!ctx->altivec_enabled)) { \
6362
gen_exception(ctx, POWERPC_EXCP_VPU); \
6365
tcg_op(cpu_avrh[rD(ctx->opcode)], cpu_avrh[rA(ctx->opcode)], cpu_avrh[rB(ctx->opcode)]); \
6366
tcg_op(cpu_avrl[rD(ctx->opcode)], cpu_avrl[rA(ctx->opcode)], cpu_avrl[rB(ctx->opcode)]); \
6369
GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16);
6370
GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17);
6371
GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18);
6372
GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19);
6373
GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20);
6375
#define GEN_VXFORM(name, opc2, opc3) \
6376
static void glue(gen_, name)(DisasContext *ctx) \
6378
TCGv_ptr ra, rb, rd; \
6379
if (unlikely(!ctx->altivec_enabled)) { \
6380
gen_exception(ctx, POWERPC_EXCP_VPU); \
6383
ra = gen_avr_ptr(rA(ctx->opcode)); \
6384
rb = gen_avr_ptr(rB(ctx->opcode)); \
6385
rd = gen_avr_ptr(rD(ctx->opcode)); \
6386
gen_helper_##name (rd, ra, rb); \
6387
tcg_temp_free_ptr(ra); \
6388
tcg_temp_free_ptr(rb); \
6389
tcg_temp_free_ptr(rd); \
6392
GEN_VXFORM(vaddubm, 0, 0);
6393
GEN_VXFORM(vadduhm, 0, 1);
6394
GEN_VXFORM(vadduwm, 0, 2);
6395
GEN_VXFORM(vsububm, 0, 16);
6396
GEN_VXFORM(vsubuhm, 0, 17);
6397
GEN_VXFORM(vsubuwm, 0, 18);
6398
GEN_VXFORM(vmaxub, 1, 0);
6399
GEN_VXFORM(vmaxuh, 1, 1);
6400
GEN_VXFORM(vmaxuw, 1, 2);
6401
GEN_VXFORM(vmaxsb, 1, 4);
6402
GEN_VXFORM(vmaxsh, 1, 5);
6403
GEN_VXFORM(vmaxsw, 1, 6);
6404
GEN_VXFORM(vminub, 1, 8);
6405
GEN_VXFORM(vminuh, 1, 9);
6406
GEN_VXFORM(vminuw, 1, 10);
6407
GEN_VXFORM(vminsb, 1, 12);
6408
GEN_VXFORM(vminsh, 1, 13);
6409
GEN_VXFORM(vminsw, 1, 14);
6410
GEN_VXFORM(vavgub, 1, 16);
6411
GEN_VXFORM(vavguh, 1, 17);
6412
GEN_VXFORM(vavguw, 1, 18);
6413
GEN_VXFORM(vavgsb, 1, 20);
6414
GEN_VXFORM(vavgsh, 1, 21);
6415
GEN_VXFORM(vavgsw, 1, 22);
6416
GEN_VXFORM(vmrghb, 6, 0);
6417
GEN_VXFORM(vmrghh, 6, 1);
6418
GEN_VXFORM(vmrghw, 6, 2);
6419
GEN_VXFORM(vmrglb, 6, 4);
6420
GEN_VXFORM(vmrglh, 6, 5);
6421
GEN_VXFORM(vmrglw, 6, 6);
6422
GEN_VXFORM(vmuloub, 4, 0);
6423
GEN_VXFORM(vmulouh, 4, 1);
6424
GEN_VXFORM(vmulosb, 4, 4);
6425
GEN_VXFORM(vmulosh, 4, 5);
6426
GEN_VXFORM(vmuleub, 4, 8);
6427
GEN_VXFORM(vmuleuh, 4, 9);
6428
GEN_VXFORM(vmulesb, 4, 12);
6429
GEN_VXFORM(vmulesh, 4, 13);
6430
GEN_VXFORM(vslb, 2, 4);
6431
GEN_VXFORM(vslh, 2, 5);
6432
GEN_VXFORM(vslw, 2, 6);
6433
GEN_VXFORM(vsrb, 2, 8);
6434
GEN_VXFORM(vsrh, 2, 9);
6435
GEN_VXFORM(vsrw, 2, 10);
6436
GEN_VXFORM(vsrab, 2, 12);
6437
GEN_VXFORM(vsrah, 2, 13);
6438
GEN_VXFORM(vsraw, 2, 14);
6439
GEN_VXFORM(vslo, 6, 16);
6440
GEN_VXFORM(vsro, 6, 17);
6441
GEN_VXFORM(vaddcuw, 0, 6);
6442
GEN_VXFORM(vsubcuw, 0, 22);
6443
GEN_VXFORM(vaddubs, 0, 8);
6444
GEN_VXFORM(vadduhs, 0, 9);
6445
GEN_VXFORM(vadduws, 0, 10);
6446
GEN_VXFORM(vaddsbs, 0, 12);
6447
GEN_VXFORM(vaddshs, 0, 13);
6448
GEN_VXFORM(vaddsws, 0, 14);
6449
GEN_VXFORM(vsububs, 0, 24);
6450
GEN_VXFORM(vsubuhs, 0, 25);
6451
GEN_VXFORM(vsubuws, 0, 26);
6452
GEN_VXFORM(vsubsbs, 0, 28);
6453
GEN_VXFORM(vsubshs, 0, 29);
6454
GEN_VXFORM(vsubsws, 0, 30);
6455
GEN_VXFORM(vrlb, 2, 0);
6456
GEN_VXFORM(vrlh, 2, 1);
6457
GEN_VXFORM(vrlw, 2, 2);
6458
GEN_VXFORM(vsl, 2, 7);
6459
GEN_VXFORM(vsr, 2, 11);
6460
GEN_VXFORM(vpkuhum, 7, 0);
6461
GEN_VXFORM(vpkuwum, 7, 1);
6462
GEN_VXFORM(vpkuhus, 7, 2);
6463
GEN_VXFORM(vpkuwus, 7, 3);
6464
GEN_VXFORM(vpkshus, 7, 4);
6465
GEN_VXFORM(vpkswus, 7, 5);
6466
GEN_VXFORM(vpkshss, 7, 6);
6467
GEN_VXFORM(vpkswss, 7, 7);
6468
GEN_VXFORM(vpkpx, 7, 12);
6469
GEN_VXFORM(vsum4ubs, 4, 24);
6470
GEN_VXFORM(vsum4sbs, 4, 28);
6471
GEN_VXFORM(vsum4shs, 4, 25);
6472
GEN_VXFORM(vsum2sws, 4, 26);
6473
GEN_VXFORM(vsumsws, 4, 30);
6474
GEN_VXFORM(vaddfp, 5, 0);
6475
GEN_VXFORM(vsubfp, 5, 1);
6476
GEN_VXFORM(vmaxfp, 5, 16);
6477
GEN_VXFORM(vminfp, 5, 17);
6479
#define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
6480
static void glue(gen_, name)(DisasContext *ctx) \
6482
TCGv_ptr ra, rb, rd; \
6483
if (unlikely(!ctx->altivec_enabled)) { \
6484
gen_exception(ctx, POWERPC_EXCP_VPU); \
6487
ra = gen_avr_ptr(rA(ctx->opcode)); \
6488
rb = gen_avr_ptr(rB(ctx->opcode)); \
6489
rd = gen_avr_ptr(rD(ctx->opcode)); \
6490
gen_helper_##opname (rd, ra, rb); \
6491
tcg_temp_free_ptr(ra); \
6492
tcg_temp_free_ptr(rb); \
6493
tcg_temp_free_ptr(rd); \
6496
#define GEN_VXRFORM(name, opc2, opc3) \
6497
GEN_VXRFORM1(name, name, #name, opc2, opc3) \
6498
GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
6500
GEN_VXRFORM(vcmpequb, 3, 0)
6501
GEN_VXRFORM(vcmpequh, 3, 1)
6502
GEN_VXRFORM(vcmpequw, 3, 2)
6503
GEN_VXRFORM(vcmpgtsb, 3, 12)
6504
GEN_VXRFORM(vcmpgtsh, 3, 13)
6505
GEN_VXRFORM(vcmpgtsw, 3, 14)
6506
GEN_VXRFORM(vcmpgtub, 3, 8)
6507
GEN_VXRFORM(vcmpgtuh, 3, 9)
6508
GEN_VXRFORM(vcmpgtuw, 3, 10)
6509
GEN_VXRFORM(vcmpeqfp, 3, 3)
6510
GEN_VXRFORM(vcmpgefp, 3, 7)
6511
GEN_VXRFORM(vcmpgtfp, 3, 11)
6512
GEN_VXRFORM(vcmpbfp, 3, 15)
6514
#define GEN_VXFORM_SIMM(name, opc2, opc3) \
6515
static void glue(gen_, name)(DisasContext *ctx) \
6519
if (unlikely(!ctx->altivec_enabled)) { \
6520
gen_exception(ctx, POWERPC_EXCP_VPU); \
6523
simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6524
rd = gen_avr_ptr(rD(ctx->opcode)); \
6525
gen_helper_##name (rd, simm); \
6526
tcg_temp_free_i32(simm); \
6527
tcg_temp_free_ptr(rd); \
6530
GEN_VXFORM_SIMM(vspltisb, 6, 12);
6531
GEN_VXFORM_SIMM(vspltish, 6, 13);
6532
GEN_VXFORM_SIMM(vspltisw, 6, 14);
6534
#define GEN_VXFORM_NOA(name, opc2, opc3) \
6535
static void glue(gen_, name)(DisasContext *ctx) \
6538
if (unlikely(!ctx->altivec_enabled)) { \
6539
gen_exception(ctx, POWERPC_EXCP_VPU); \
6542
rb = gen_avr_ptr(rB(ctx->opcode)); \
6543
rd = gen_avr_ptr(rD(ctx->opcode)); \
6544
gen_helper_##name (rd, rb); \
6545
tcg_temp_free_ptr(rb); \
6546
tcg_temp_free_ptr(rd); \
6549
GEN_VXFORM_NOA(vupkhsb, 7, 8);
6550
GEN_VXFORM_NOA(vupkhsh, 7, 9);
6551
GEN_VXFORM_NOA(vupklsb, 7, 10);
6552
GEN_VXFORM_NOA(vupklsh, 7, 11);
6553
GEN_VXFORM_NOA(vupkhpx, 7, 13);
6554
GEN_VXFORM_NOA(vupklpx, 7, 15);
6555
GEN_VXFORM_NOA(vrefp, 5, 4);
6556
GEN_VXFORM_NOA(vrsqrtefp, 5, 5);
6557
GEN_VXFORM_NOA(vexptefp, 5, 6);
6558
GEN_VXFORM_NOA(vlogefp, 5, 7);
6559
GEN_VXFORM_NOA(vrfim, 5, 8);
6560
GEN_VXFORM_NOA(vrfin, 5, 9);
6561
GEN_VXFORM_NOA(vrfip, 5, 10);
6562
GEN_VXFORM_NOA(vrfiz, 5, 11);
6564
#define GEN_VXFORM_SIMM(name, opc2, opc3) \
6565
static void glue(gen_, name)(DisasContext *ctx) \
6569
if (unlikely(!ctx->altivec_enabled)) { \
6570
gen_exception(ctx, POWERPC_EXCP_VPU); \
6573
simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6574
rd = gen_avr_ptr(rD(ctx->opcode)); \
6575
gen_helper_##name (rd, simm); \
6576
tcg_temp_free_i32(simm); \
6577
tcg_temp_free_ptr(rd); \
6580
#define GEN_VXFORM_UIMM(name, opc2, opc3) \
6581
static void glue(gen_, name)(DisasContext *ctx) \
6585
if (unlikely(!ctx->altivec_enabled)) { \
6586
gen_exception(ctx, POWERPC_EXCP_VPU); \
6589
uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
6590
rb = gen_avr_ptr(rB(ctx->opcode)); \
6591
rd = gen_avr_ptr(rD(ctx->opcode)); \
6592
gen_helper_##name (rd, rb, uimm); \
6593
tcg_temp_free_i32(uimm); \
6594
tcg_temp_free_ptr(rb); \
6595
tcg_temp_free_ptr(rd); \
6598
GEN_VXFORM_UIMM(vspltb, 6, 8);
6599
GEN_VXFORM_UIMM(vsplth, 6, 9);
6600
GEN_VXFORM_UIMM(vspltw, 6, 10);
6601
GEN_VXFORM_UIMM(vcfux, 5, 12);
6602
GEN_VXFORM_UIMM(vcfsx, 5, 13);
6603
GEN_VXFORM_UIMM(vctuxs, 5, 14);
6604
GEN_VXFORM_UIMM(vctsxs, 5, 15);
6606
static void gen_vsldoi(DisasContext *ctx)
6608
TCGv_ptr ra, rb, rd;
6610
if (unlikely(!ctx->altivec_enabled)) {
6611
gen_exception(ctx, POWERPC_EXCP_VPU);
6614
ra = gen_avr_ptr(rA(ctx->opcode));
6615
rb = gen_avr_ptr(rB(ctx->opcode));
6616
rd = gen_avr_ptr(rD(ctx->opcode));
6617
sh = tcg_const_i32(VSH(ctx->opcode));
6618
gen_helper_vsldoi (rd, ra, rb, sh);
6619
tcg_temp_free_ptr(ra);
6620
tcg_temp_free_ptr(rb);
6621
tcg_temp_free_ptr(rd);
6622
tcg_temp_free_i32(sh);
6625
#define GEN_VAFORM_PAIRED(name0, name1, opc2) \
6626
static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
6628
TCGv_ptr ra, rb, rc, rd; \
6629
if (unlikely(!ctx->altivec_enabled)) { \
6630
gen_exception(ctx, POWERPC_EXCP_VPU); \
6633
ra = gen_avr_ptr(rA(ctx->opcode)); \
6634
rb = gen_avr_ptr(rB(ctx->opcode)); \
6635
rc = gen_avr_ptr(rC(ctx->opcode)); \
6636
rd = gen_avr_ptr(rD(ctx->opcode)); \
6637
if (Rc(ctx->opcode)) { \
6638
gen_helper_##name1 (rd, ra, rb, rc); \
6640
gen_helper_##name0 (rd, ra, rb, rc); \
6642
tcg_temp_free_ptr(ra); \
6643
tcg_temp_free_ptr(rb); \
6644
tcg_temp_free_ptr(rc); \
6645
tcg_temp_free_ptr(rd); \
6648
GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16)
6650
static void gen_vmladduhm(DisasContext *ctx)
6652
TCGv_ptr ra, rb, rc, rd;
6653
if (unlikely(!ctx->altivec_enabled)) {
6654
gen_exception(ctx, POWERPC_EXCP_VPU);
6657
ra = gen_avr_ptr(rA(ctx->opcode));
6658
rb = gen_avr_ptr(rB(ctx->opcode));
6659
rc = gen_avr_ptr(rC(ctx->opcode));
6660
rd = gen_avr_ptr(rD(ctx->opcode));
6661
gen_helper_vmladduhm(rd, ra, rb, rc);
6662
tcg_temp_free_ptr(ra);
6663
tcg_temp_free_ptr(rb);
6664
tcg_temp_free_ptr(rc);
6665
tcg_temp_free_ptr(rd);
6668
GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18)
6669
GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19)
6670
GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20)
6671
GEN_VAFORM_PAIRED(vsel, vperm, 21)
6672
GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23)
6674
/*** SPE extension ***/
6675
/* Register moves */
6678
static inline void gen_evmra(DisasContext *ctx)
6681
if (unlikely(!ctx->spe_enabled)) {
6682
gen_exception(ctx, POWERPC_EXCP_SPEU);
6686
#if defined(TARGET_PPC64)
6688
tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6691
tcg_gen_st_i64(cpu_gpr[rA(ctx->opcode)],
6693
offsetof(CPUState, spe_acc));
6695
TCGv_i64 tmp = tcg_temp_new_i64();
6697
/* tmp := rA_lo + rA_hi << 32 */
6698
tcg_gen_concat_i32_i64(tmp, cpu_gpr[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6700
/* spe_acc := tmp */
6701
tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
6702
tcg_temp_free_i64(tmp);
6705
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6706
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6710
static inline void gen_load_gpr64(TCGv_i64 t, int reg)
6712
#if defined(TARGET_PPC64)
6713
tcg_gen_mov_i64(t, cpu_gpr[reg]);
6715
tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]);
6719
static inline void gen_store_gpr64(int reg, TCGv_i64 t)
6721
#if defined(TARGET_PPC64)
6722
tcg_gen_mov_i64(cpu_gpr[reg], t);
6724
TCGv_i64 tmp = tcg_temp_new_i64();
6725
tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
6726
tcg_gen_shri_i64(tmp, t, 32);
6727
tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
6728
tcg_temp_free_i64(tmp);
6732
#define GEN_SPE(name0, name1, opc2, opc3, inval0, inval1, type) \
6733
static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
6735
if (Rc(ctx->opcode)) \
6741
/* Handler for undefined SPE opcodes */
6742
static inline void gen_speundef(DisasContext *ctx)
6744
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6748
#if defined(TARGET_PPC64)
6749
#define GEN_SPEOP_LOGIC2(name, tcg_op) \
6750
static inline void gen_##name(DisasContext *ctx) \
6752
if (unlikely(!ctx->spe_enabled)) { \
6753
gen_exception(ctx, POWERPC_EXCP_SPEU); \
6756
tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6757
cpu_gpr[rB(ctx->opcode)]); \
6760
#define GEN_SPEOP_LOGIC2(name, tcg_op) \
6761
static inline void gen_##name(DisasContext *ctx) \
6763
if (unlikely(!ctx->spe_enabled)) { \
6764
gen_exception(ctx, POWERPC_EXCP_SPEU); \
6767
tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6768
cpu_gpr[rB(ctx->opcode)]); \
6769
tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6770
cpu_gprh[rB(ctx->opcode)]); \
6774
GEN_SPEOP_LOGIC2(evand, tcg_gen_and_tl);
6775
GEN_SPEOP_LOGIC2(evandc, tcg_gen_andc_tl);
6776
GEN_SPEOP_LOGIC2(evxor, tcg_gen_xor_tl);
6777
GEN_SPEOP_LOGIC2(evor, tcg_gen_or_tl);
6778
GEN_SPEOP_LOGIC2(evnor, tcg_gen_nor_tl);
6779
GEN_SPEOP_LOGIC2(eveqv, tcg_gen_eqv_tl);
6780
GEN_SPEOP_LOGIC2(evorc, tcg_gen_orc_tl);
6781
GEN_SPEOP_LOGIC2(evnand, tcg_gen_nand_tl);
6783
/* SPE logic immediate */
6784
#if defined(TARGET_PPC64)
6785
#define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6786
static inline void gen_##name(DisasContext *ctx) \
6788
if (unlikely(!ctx->spe_enabled)) { \
6789
gen_exception(ctx, POWERPC_EXCP_SPEU); \
6792
TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6793
TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6794
TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6795
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6796
tcg_opi(t0, t0, rB(ctx->opcode)); \
6797
tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6798
tcg_gen_trunc_i64_i32(t1, t2); \
6799
tcg_temp_free_i64(t2); \
6800
tcg_opi(t1, t1, rB(ctx->opcode)); \
6801
tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6802
tcg_temp_free_i32(t0); \
6803
tcg_temp_free_i32(t1); \
6806
#define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6807
static inline void gen_##name(DisasContext *ctx) \
6809
if (unlikely(!ctx->spe_enabled)) { \
6810
gen_exception(ctx, POWERPC_EXCP_SPEU); \
6813
tcg_opi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6815
tcg_opi(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6819
GEN_SPEOP_TCG_LOGIC_IMM2(evslwi, tcg_gen_shli_i32);
6820
GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu, tcg_gen_shri_i32);
6821
GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis, tcg_gen_sari_i32);
6822
GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi, tcg_gen_rotli_i32);
6824
/* SPE arithmetic */
6825
#if defined(TARGET_PPC64)
6826
#define GEN_SPEOP_ARITH1(name, tcg_op) \
6827
static inline void gen_##name(DisasContext *ctx) \
6829
if (unlikely(!ctx->spe_enabled)) { \
6830
gen_exception(ctx, POWERPC_EXCP_SPEU); \
6833
TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6834
TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6835
TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6836
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6838
tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6839
tcg_gen_trunc_i64_i32(t1, t2); \
6840
tcg_temp_free_i64(t2); \
6842
tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6843
tcg_temp_free_i32(t0); \
6844
tcg_temp_free_i32(t1); \
6847
#define GEN_SPEOP_ARITH1(name, tcg_op) \
6848
static inline void gen_##name(DisasContext *ctx) \
6850
if (unlikely(!ctx->spe_enabled)) { \
6851
gen_exception(ctx, POWERPC_EXCP_SPEU); \
6854
tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \
6855
tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \
6859
static inline void gen_op_evabs(TCGv_i32 ret, TCGv_i32 arg1)
6861
int l1 = gen_new_label();
6862
int l2 = gen_new_label();
6864
tcg_gen_brcondi_i32(TCG_COND_GE, arg1, 0, l1);
6865
tcg_gen_neg_i32(ret, arg1);
6868
tcg_gen_mov_i32(ret, arg1);
6871
GEN_SPEOP_ARITH1(evabs, gen_op_evabs);
6872
GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32);
6873
GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32);
6874
GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32);
6875
static inline void gen_op_evrndw(TCGv_i32 ret, TCGv_i32 arg1)
6877
tcg_gen_addi_i32(ret, arg1, 0x8000);
6878
tcg_gen_ext16u_i32(ret, ret);
6880
GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw);
6881
GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32);
6882
GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32);
6884
#if defined(TARGET_PPC64)
6885
#define GEN_SPEOP_ARITH2(name, tcg_op) \
6886
static inline void gen_##name(DisasContext *ctx) \
6888
if (unlikely(!ctx->spe_enabled)) { \
6889
gen_exception(ctx, POWERPC_EXCP_SPEU); \
6892
TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6893
TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6894
TCGv_i32 t2 = tcg_temp_local_new_i32(); \
6895
TCGv_i64 t3 = tcg_temp_local_new_i64(); \
6896
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6897
tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \
6898
tcg_op(t0, t0, t2); \
6899
tcg_gen_shri_i64(t3, cpu_gpr[rA(ctx->opcode)], 32); \
6900
tcg_gen_trunc_i64_i32(t1, t3); \
6901
tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \
6902
tcg_gen_trunc_i64_i32(t2, t3); \
6903
tcg_temp_free_i64(t3); \
6904
tcg_op(t1, t1, t2); \
6905
tcg_temp_free_i32(t2); \
6906
tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6907
tcg_temp_free_i32(t0); \
6908
tcg_temp_free_i32(t1); \
6911
#define GEN_SPEOP_ARITH2(name, tcg_op) \
6912
static inline void gen_##name(DisasContext *ctx) \
6914
if (unlikely(!ctx->spe_enabled)) { \
6915
gen_exception(ctx, POWERPC_EXCP_SPEU); \
6918
tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6919
cpu_gpr[rB(ctx->opcode)]); \
6920
tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6921
cpu_gprh[rB(ctx->opcode)]); \
6925
static inline void gen_op_evsrwu(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6930
l1 = gen_new_label();
6931
l2 = gen_new_label();
6932
t0 = tcg_temp_local_new_i32();
6933
/* No error here: 6 bits are used */
6934
tcg_gen_andi_i32(t0, arg2, 0x3F);
6935
tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6936
tcg_gen_shr_i32(ret, arg1, t0);
6939
tcg_gen_movi_i32(ret, 0);
6941
tcg_temp_free_i32(t0);
6943
GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu);
6944
static inline void gen_op_evsrws(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6949
l1 = gen_new_label();
6950
l2 = gen_new_label();
6951
t0 = tcg_temp_local_new_i32();
6952
/* No error here: 6 bits are used */
6953
tcg_gen_andi_i32(t0, arg2, 0x3F);
6954
tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6955
tcg_gen_sar_i32(ret, arg1, t0);
6958
tcg_gen_movi_i32(ret, 0);
6960
tcg_temp_free_i32(t0);
6962
GEN_SPEOP_ARITH2(evsrws, gen_op_evsrws);
6963
static inline void gen_op_evslw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6968
l1 = gen_new_label();
6969
l2 = gen_new_label();
6970
t0 = tcg_temp_local_new_i32();
6971
/* No error here: 6 bits are used */
6972
tcg_gen_andi_i32(t0, arg2, 0x3F);
6973
tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6974
tcg_gen_shl_i32(ret, arg1, t0);
6977
tcg_gen_movi_i32(ret, 0);
6979
tcg_temp_free_i32(t0);
6981
GEN_SPEOP_ARITH2(evslw, gen_op_evslw);
6982
static inline void gen_op_evrlw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6984
TCGv_i32 t0 = tcg_temp_new_i32();
6985
tcg_gen_andi_i32(t0, arg2, 0x1F);
6986
tcg_gen_rotl_i32(ret, arg1, t0);
6987
tcg_temp_free_i32(t0);
6989
GEN_SPEOP_ARITH2(evrlw, gen_op_evrlw);
6990
static inline void gen_evmergehi(DisasContext *ctx)
6992
if (unlikely(!ctx->spe_enabled)) {
6993
gen_exception(ctx, POWERPC_EXCP_SPEU);
6996
#if defined(TARGET_PPC64)
6997
TCGv t0 = tcg_temp_new();
6998
TCGv t1 = tcg_temp_new();
6999
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
7000
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
7001
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7005
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7006
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7009
GEN_SPEOP_ARITH2(evaddw, tcg_gen_add_i32);
7010
static inline void gen_op_evsubf(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
7012
tcg_gen_sub_i32(ret, arg2, arg1);
7014
GEN_SPEOP_ARITH2(evsubfw, gen_op_evsubf);
7016
/* SPE arithmetic immediate */
7017
#if defined(TARGET_PPC64)
7018
#define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
7019
static inline void gen_##name(DisasContext *ctx) \
7021
if (unlikely(!ctx->spe_enabled)) { \
7022
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7025
TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7026
TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7027
TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7028
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7029
tcg_op(t0, t0, rA(ctx->opcode)); \
7030
tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
7031
tcg_gen_trunc_i64_i32(t1, t2); \
7032
tcg_temp_free_i64(t2); \
7033
tcg_op(t1, t1, rA(ctx->opcode)); \
7034
tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7035
tcg_temp_free_i32(t0); \
7036
tcg_temp_free_i32(t1); \
7039
#define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
7040
static inline void gen_##name(DisasContext *ctx) \
7042
if (unlikely(!ctx->spe_enabled)) { \
7043
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7046
tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
7048
tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)], \
7052
GEN_SPEOP_ARITH_IMM2(evaddiw, tcg_gen_addi_i32);
7053
GEN_SPEOP_ARITH_IMM2(evsubifw, tcg_gen_subi_i32);
7055
/* SPE comparison */
7056
#if defined(TARGET_PPC64)
7057
#define GEN_SPEOP_COMP(name, tcg_cond) \
7058
static inline void gen_##name(DisasContext *ctx) \
7060
if (unlikely(!ctx->spe_enabled)) { \
7061
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7064
int l1 = gen_new_label(); \
7065
int l2 = gen_new_label(); \
7066
int l3 = gen_new_label(); \
7067
int l4 = gen_new_label(); \
7068
TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7069
TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7070
TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7071
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7072
tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7073
tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \
7074
tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \
7076
gen_set_label(l1); \
7077
tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7078
CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7079
gen_set_label(l2); \
7080
tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
7081
tcg_gen_trunc_i64_i32(t0, t2); \
7082
tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
7083
tcg_gen_trunc_i64_i32(t1, t2); \
7084
tcg_temp_free_i64(t2); \
7085
tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \
7086
tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7087
~(CRF_CH | CRF_CH_AND_CL)); \
7089
gen_set_label(l3); \
7090
tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7091
CRF_CH | CRF_CH_OR_CL); \
7092
gen_set_label(l4); \
7093
tcg_temp_free_i32(t0); \
7094
tcg_temp_free_i32(t1); \
7097
#define GEN_SPEOP_COMP(name, tcg_cond) \
7098
static inline void gen_##name(DisasContext *ctx) \
7100
if (unlikely(!ctx->spe_enabled)) { \
7101
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7104
int l1 = gen_new_label(); \
7105
int l2 = gen_new_label(); \
7106
int l3 = gen_new_label(); \
7107
int l4 = gen_new_label(); \
7109
tcg_gen_brcond_i32(tcg_cond, cpu_gpr[rA(ctx->opcode)], \
7110
cpu_gpr[rB(ctx->opcode)], l1); \
7111
tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \
7113
gen_set_label(l1); \
7114
tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7115
CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7116
gen_set_label(l2); \
7117
tcg_gen_brcond_i32(tcg_cond, cpu_gprh[rA(ctx->opcode)], \
7118
cpu_gprh[rB(ctx->opcode)], l3); \
7119
tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7120
~(CRF_CH | CRF_CH_AND_CL)); \
7122
gen_set_label(l3); \
7123
tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7124
CRF_CH | CRF_CH_OR_CL); \
7125
gen_set_label(l4); \
7128
GEN_SPEOP_COMP(evcmpgtu, TCG_COND_GTU);
7129
GEN_SPEOP_COMP(evcmpgts, TCG_COND_GT);
7130
GEN_SPEOP_COMP(evcmpltu, TCG_COND_LTU);
7131
GEN_SPEOP_COMP(evcmplts, TCG_COND_LT);
7132
GEN_SPEOP_COMP(evcmpeq, TCG_COND_EQ);
7135
static inline void gen_brinc(DisasContext *ctx)
7137
/* Note: brinc is usable even if SPE is disabled */
7138
gen_helper_brinc(cpu_gpr[rD(ctx->opcode)],
7139
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7141
static inline void gen_evmergelo(DisasContext *ctx)
7143
if (unlikely(!ctx->spe_enabled)) {
7144
gen_exception(ctx, POWERPC_EXCP_SPEU);
7147
#if defined(TARGET_PPC64)
7148
TCGv t0 = tcg_temp_new();
7149
TCGv t1 = tcg_temp_new();
7150
tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7151
tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7152
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7156
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7157
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7160
static inline void gen_evmergehilo(DisasContext *ctx)
7162
if (unlikely(!ctx->spe_enabled)) {
7163
gen_exception(ctx, POWERPC_EXCP_SPEU);
7166
#if defined(TARGET_PPC64)
7167
TCGv t0 = tcg_temp_new();
7168
TCGv t1 = tcg_temp_new();
7169
tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7170
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
7171
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7175
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7176
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7179
static inline void gen_evmergelohi(DisasContext *ctx)
7181
if (unlikely(!ctx->spe_enabled)) {
7182
gen_exception(ctx, POWERPC_EXCP_SPEU);
7185
#if defined(TARGET_PPC64)
7186
TCGv t0 = tcg_temp_new();
7187
TCGv t1 = tcg_temp_new();
7188
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
7189
tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7190
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7194
if (rD(ctx->opcode) == rA(ctx->opcode)) {
7195
TCGv_i32 tmp = tcg_temp_new_i32();
7196
tcg_gen_mov_i32(tmp, cpu_gpr[rA(ctx->opcode)]);
7197
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7198
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], tmp);
7199
tcg_temp_free_i32(tmp);
7201
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7202
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7206
static inline void gen_evsplati(DisasContext *ctx)
7208
uint64_t imm = ((int32_t)(rA(ctx->opcode) << 27)) >> 27;
7210
#if defined(TARGET_PPC64)
7211
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7213
tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7214
tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7217
static inline void gen_evsplatfi(DisasContext *ctx)
7219
uint64_t imm = rA(ctx->opcode) << 27;
7221
#if defined(TARGET_PPC64)
7222
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7224
tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7225
tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7229
static inline void gen_evsel(DisasContext *ctx)
7231
int l1 = gen_new_label();
7232
int l2 = gen_new_label();
7233
int l3 = gen_new_label();
7234
int l4 = gen_new_label();
7235
TCGv_i32 t0 = tcg_temp_local_new_i32();
7236
#if defined(TARGET_PPC64)
7237
TCGv t1 = tcg_temp_local_new();
7238
TCGv t2 = tcg_temp_local_new();
7240
tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 3);
7241
tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
7242
#if defined(TARGET_PPC64)
7243
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7245
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7249
#if defined(TARGET_PPC64)
7250
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7252
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7255
tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 2);
7256
tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l3);
7257
#if defined(TARGET_PPC64)
7258
tcg_gen_ext32u_tl(t2, cpu_gpr[rA(ctx->opcode)]);
7260
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7264
#if defined(TARGET_PPC64)
7265
tcg_gen_ext32u_tl(t2, cpu_gpr[rB(ctx->opcode)]);
7267
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7270
tcg_temp_free_i32(t0);
7271
#if defined(TARGET_PPC64)
7272
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t1, t2);
7278
static void gen_evsel0(DisasContext *ctx)
7283
static void gen_evsel1(DisasContext *ctx)
7288
static void gen_evsel2(DisasContext *ctx)
7293
static void gen_evsel3(DisasContext *ctx)
7300
static inline void gen_evmwumi(DisasContext *ctx)
7304
if (unlikely(!ctx->spe_enabled)) {
7305
gen_exception(ctx, POWERPC_EXCP_SPEU);
7309
t0 = tcg_temp_new_i64();
7310
t1 = tcg_temp_new_i64();
7312
/* t0 := rA; t1 := rB */
7313
#if defined(TARGET_PPC64)
7314
tcg_gen_ext32u_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7315
tcg_gen_ext32u_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7317
tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7318
tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7321
tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7323
gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7325
tcg_temp_free_i64(t0);
7326
tcg_temp_free_i64(t1);
7329
static inline void gen_evmwumia(DisasContext *ctx)
7333
if (unlikely(!ctx->spe_enabled)) {
7334
gen_exception(ctx, POWERPC_EXCP_SPEU);
7338
gen_evmwumi(ctx); /* rD := rA * rB */
7340
tmp = tcg_temp_new_i64();
7343
gen_load_gpr64(tmp, rD(ctx->opcode));
7344
tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
7345
tcg_temp_free_i64(tmp);
7348
static inline void gen_evmwumiaa(DisasContext *ctx)
7353
if (unlikely(!ctx->spe_enabled)) {
7354
gen_exception(ctx, POWERPC_EXCP_SPEU);
7358
gen_evmwumi(ctx); /* rD := rA * rB */
7360
acc = tcg_temp_new_i64();
7361
tmp = tcg_temp_new_i64();
7364
gen_load_gpr64(tmp, rD(ctx->opcode));
7367
tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7369
/* acc := tmp + acc */
7370
tcg_gen_add_i64(acc, acc, tmp);
7373
tcg_gen_st_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7376
gen_store_gpr64(rD(ctx->opcode), acc);
7378
tcg_temp_free_i64(acc);
7379
tcg_temp_free_i64(tmp);
7382
static inline void gen_evmwsmi(DisasContext *ctx)
7386
if (unlikely(!ctx->spe_enabled)) {
7387
gen_exception(ctx, POWERPC_EXCP_SPEU);
7391
t0 = tcg_temp_new_i64();
7392
t1 = tcg_temp_new_i64();
7394
/* t0 := rA; t1 := rB */
7395
#if defined(TARGET_PPC64)
7396
tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7397
tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7399
tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7400
tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7403
tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7405
gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7407
tcg_temp_free_i64(t0);
7408
tcg_temp_free_i64(t1);
7411
static inline void gen_evmwsmia(DisasContext *ctx)
7415
gen_evmwsmi(ctx); /* rD := rA * rB */
7417
tmp = tcg_temp_new_i64();
7420
gen_load_gpr64(tmp, rD(ctx->opcode));
7421
tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
7423
tcg_temp_free_i64(tmp);
7426
static inline void gen_evmwsmiaa(DisasContext *ctx)
7428
TCGv_i64 acc = tcg_temp_new_i64();
7429
TCGv_i64 tmp = tcg_temp_new_i64();
7431
gen_evmwsmi(ctx); /* rD := rA * rB */
7433
acc = tcg_temp_new_i64();
7434
tmp = tcg_temp_new_i64();
7437
gen_load_gpr64(tmp, rD(ctx->opcode));
7440
tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7442
/* acc := tmp + acc */
7443
tcg_gen_add_i64(acc, acc, tmp);
7446
tcg_gen_st_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7449
gen_store_gpr64(rD(ctx->opcode), acc);
7451
tcg_temp_free_i64(acc);
7452
tcg_temp_free_i64(tmp);
7455
GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7456
GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE);
7457
GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7458
GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE);
7459
GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, 0x0000F800, PPC_SPE); ////
7460
GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, 0x0000F800, PPC_SPE); ////
7461
GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, 0x0000F800, PPC_SPE); ////
7462
GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x0000F800, 0x00000000, PPC_SPE); //
7463
GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, 0xFFFFFFFF, PPC_SPE);
7464
GEN_SPE(speundef, evand, 0x08, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE); ////
7465
GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7466
GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7467
GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7468
GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, 0x00000000, PPC_SPE);
7469
GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, 0x00000000, PPC_SPE);
7470
GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, 0x00000000, PPC_SPE);
7471
GEN_SPE(speundef, evorc, 0x0D, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE); ////
7472
GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7473
GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7474
GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, 0x00000000, PPC_SPE);
7475
GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7476
GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE);
7477
GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, 0x0000F800, PPC_SPE); //
7478
GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, 0x0000F800, PPC_SPE);
7479
GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7480
GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7481
GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, 0x00600000, PPC_SPE); ////
7482
GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, 0x00600000, PPC_SPE); ////
7483
GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, 0xFFFFFFFF, PPC_SPE); ////
7485
/* SPE load and stores */
7486
static inline void gen_addr_spe_imm_index(DisasContext *ctx, TCGv EA, int sh)
7488
target_ulong uimm = rB(ctx->opcode);
7490
if (rA(ctx->opcode) == 0) {
7491
tcg_gen_movi_tl(EA, uimm << sh);
7493
tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], uimm << sh);
7494
#if defined(TARGET_PPC64)
7495
if (!ctx->sf_mode) {
7496
tcg_gen_ext32u_tl(EA, EA);
7502
static inline void gen_op_evldd(DisasContext *ctx, TCGv addr)
7504
#if defined(TARGET_PPC64)
7505
gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7507
TCGv_i64 t0 = tcg_temp_new_i64();
7508
gen_qemu_ld64(ctx, t0, addr);
7509
tcg_gen_trunc_i64_i32(cpu_gpr[rD(ctx->opcode)], t0);
7510
tcg_gen_shri_i64(t0, t0, 32);
7511
tcg_gen_trunc_i64_i32(cpu_gprh[rD(ctx->opcode)], t0);
7512
tcg_temp_free_i64(t0);
7516
static inline void gen_op_evldw(DisasContext *ctx, TCGv addr)
7518
#if defined(TARGET_PPC64)
7519
TCGv t0 = tcg_temp_new();
7520
gen_qemu_ld32u(ctx, t0, addr);
7521
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7522
gen_addr_add(ctx, addr, addr, 4);
7523
gen_qemu_ld32u(ctx, t0, addr);
7524
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7527
gen_qemu_ld32u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7528
gen_addr_add(ctx, addr, addr, 4);
7529
gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7533
static inline void gen_op_evldh(DisasContext *ctx, TCGv addr)
7535
TCGv t0 = tcg_temp_new();
7536
#if defined(TARGET_PPC64)
7537
gen_qemu_ld16u(ctx, t0, addr);
7538
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7539
gen_addr_add(ctx, addr, addr, 2);
7540
gen_qemu_ld16u(ctx, t0, addr);
7541
tcg_gen_shli_tl(t0, t0, 32);
7542
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7543
gen_addr_add(ctx, addr, addr, 2);
7544
gen_qemu_ld16u(ctx, t0, addr);
7545
tcg_gen_shli_tl(t0, t0, 16);
7546
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7547
gen_addr_add(ctx, addr, addr, 2);
7548
gen_qemu_ld16u(ctx, t0, addr);
7549
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7551
gen_qemu_ld16u(ctx, t0, addr);
7552
tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7553
gen_addr_add(ctx, addr, addr, 2);
7554
gen_qemu_ld16u(ctx, t0, addr);
7555
tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7556
gen_addr_add(ctx, addr, addr, 2);
7557
gen_qemu_ld16u(ctx, t0, addr);
7558
tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7559
gen_addr_add(ctx, addr, addr, 2);
7560
gen_qemu_ld16u(ctx, t0, addr);
7561
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7566
static inline void gen_op_evlhhesplat(DisasContext *ctx, TCGv addr)
7568
TCGv t0 = tcg_temp_new();
7569
gen_qemu_ld16u(ctx, t0, addr);
7570
#if defined(TARGET_PPC64)
7571
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7572
tcg_gen_shli_tl(t0, t0, 16);
7573
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7575
tcg_gen_shli_tl(t0, t0, 16);
7576
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7577
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7582
static inline void gen_op_evlhhousplat(DisasContext *ctx, TCGv addr)
7584
TCGv t0 = tcg_temp_new();
7585
gen_qemu_ld16u(ctx, t0, addr);
7586
#if defined(TARGET_PPC64)
7587
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7588
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7590
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7591
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7596
static inline void gen_op_evlhhossplat(DisasContext *ctx, TCGv addr)
7598
TCGv t0 = tcg_temp_new();
7599
gen_qemu_ld16s(ctx, t0, addr);
7600
#if defined(TARGET_PPC64)
7601
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7602
tcg_gen_ext32u_tl(t0, t0);
7603
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7605
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7606
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7611
static inline void gen_op_evlwhe(DisasContext *ctx, TCGv addr)
7613
TCGv t0 = tcg_temp_new();
7614
#if defined(TARGET_PPC64)
7615
gen_qemu_ld16u(ctx, t0, addr);
7616
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7617
gen_addr_add(ctx, addr, addr, 2);
7618
gen_qemu_ld16u(ctx, t0, addr);
7619
tcg_gen_shli_tl(t0, t0, 16);
7620
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7622
gen_qemu_ld16u(ctx, t0, addr);
7623
tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7624
gen_addr_add(ctx, addr, addr, 2);
7625
gen_qemu_ld16u(ctx, t0, addr);
7626
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
7631
static inline void gen_op_evlwhou(DisasContext *ctx, TCGv addr)
7633
#if defined(TARGET_PPC64)
7634
TCGv t0 = tcg_temp_new();
7635
gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7636
gen_addr_add(ctx, addr, addr, 2);
7637
gen_qemu_ld16u(ctx, t0, addr);
7638
tcg_gen_shli_tl(t0, t0, 32);
7639
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7642
gen_qemu_ld16u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7643
gen_addr_add(ctx, addr, addr, 2);
7644
gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7648
static inline void gen_op_evlwhos(DisasContext *ctx, TCGv addr)
7650
#if defined(TARGET_PPC64)
7651
TCGv t0 = tcg_temp_new();
7652
gen_qemu_ld16s(ctx, t0, addr);
7653
tcg_gen_ext32u_tl(cpu_gpr[rD(ctx->opcode)], t0);
7654
gen_addr_add(ctx, addr, addr, 2);
7655
gen_qemu_ld16s(ctx, t0, addr);
7656
tcg_gen_shli_tl(t0, t0, 32);
7657
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7660
gen_qemu_ld16s(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7661
gen_addr_add(ctx, addr, addr, 2);
7662
gen_qemu_ld16s(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7666
static inline void gen_op_evlwwsplat(DisasContext *ctx, TCGv addr)
7668
TCGv t0 = tcg_temp_new();
7669
gen_qemu_ld32u(ctx, t0, addr);
7670
#if defined(TARGET_PPC64)
7671
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7672
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7674
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7675
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7680
static inline void gen_op_evlwhsplat(DisasContext *ctx, TCGv addr)
7682
TCGv t0 = tcg_temp_new();
7683
#if defined(TARGET_PPC64)
7684
gen_qemu_ld16u(ctx, t0, addr);
7685
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7686
tcg_gen_shli_tl(t0, t0, 32);
7687
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7688
gen_addr_add(ctx, addr, addr, 2);
7689
gen_qemu_ld16u(ctx, t0, addr);
7690
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7691
tcg_gen_shli_tl(t0, t0, 16);
7692
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7694
gen_qemu_ld16u(ctx, t0, addr);
7695
tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7696
tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7697
gen_addr_add(ctx, addr, addr, 2);
7698
gen_qemu_ld16u(ctx, t0, addr);
7699
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
7700
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7705
static inline void gen_op_evstdd(DisasContext *ctx, TCGv addr)
7707
#if defined(TARGET_PPC64)
7708
gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7710
TCGv_i64 t0 = tcg_temp_new_i64();
7711
tcg_gen_concat_i32_i64(t0, cpu_gpr[rS(ctx->opcode)], cpu_gprh[rS(ctx->opcode)]);
7712
gen_qemu_st64(ctx, t0, addr);
7713
tcg_temp_free_i64(t0);
7717
static inline void gen_op_evstdw(DisasContext *ctx, TCGv addr)
7719
#if defined(TARGET_PPC64)
7720
TCGv t0 = tcg_temp_new();
7721
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7722
gen_qemu_st32(ctx, t0, addr);
7725
gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7727
gen_addr_add(ctx, addr, addr, 4);
7728
gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7731
static inline void gen_op_evstdh(DisasContext *ctx, TCGv addr)
7733
TCGv t0 = tcg_temp_new();
7734
#if defined(TARGET_PPC64)
7735
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
7737
tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
7739
gen_qemu_st16(ctx, t0, addr);
7740
gen_addr_add(ctx, addr, addr, 2);
7741
#if defined(TARGET_PPC64)
7742
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7743
gen_qemu_st16(ctx, t0, addr);
7745
gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7747
gen_addr_add(ctx, addr, addr, 2);
7748
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7749
gen_qemu_st16(ctx, t0, addr);
7751
gen_addr_add(ctx, addr, addr, 2);
7752
gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7755
static inline void gen_op_evstwhe(DisasContext *ctx, TCGv addr)
7757
TCGv t0 = tcg_temp_new();
7758
#if defined(TARGET_PPC64)
7759
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
7761
tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
7763
gen_qemu_st16(ctx, t0, addr);
7764
gen_addr_add(ctx, addr, addr, 2);
7765
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7766
gen_qemu_st16(ctx, t0, addr);
7770
static inline void gen_op_evstwho(DisasContext *ctx, TCGv addr)
7772
#if defined(TARGET_PPC64)
7773
TCGv t0 = tcg_temp_new();
7774
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7775
gen_qemu_st16(ctx, t0, addr);
7778
gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7780
gen_addr_add(ctx, addr, addr, 2);
7781
gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7784
static inline void gen_op_evstwwe(DisasContext *ctx, TCGv addr)
7786
#if defined(TARGET_PPC64)
7787
TCGv t0 = tcg_temp_new();
7788
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7789
gen_qemu_st32(ctx, t0, addr);
7792
gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7796
static inline void gen_op_evstwwo(DisasContext *ctx, TCGv addr)
7798
gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7801
#define GEN_SPEOP_LDST(name, opc2, sh) \
7802
static void glue(gen_, name)(DisasContext *ctx) \
7805
if (unlikely(!ctx->spe_enabled)) { \
7806
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7809
gen_set_access_type(ctx, ACCESS_INT); \
7810
t0 = tcg_temp_new(); \
7811
if (Rc(ctx->opcode)) { \
7812
gen_addr_spe_imm_index(ctx, t0, sh); \
7814
gen_addr_reg_index(ctx, t0); \
7816
gen_op_##name(ctx, t0); \
7817
tcg_temp_free(t0); \
7820
GEN_SPEOP_LDST(evldd, 0x00, 3);
7821
GEN_SPEOP_LDST(evldw, 0x01, 3);
7822
GEN_SPEOP_LDST(evldh, 0x02, 3);
7823
GEN_SPEOP_LDST(evlhhesplat, 0x04, 1);
7824
GEN_SPEOP_LDST(evlhhousplat, 0x06, 1);
7825
GEN_SPEOP_LDST(evlhhossplat, 0x07, 1);
7826
GEN_SPEOP_LDST(evlwhe, 0x08, 2);
7827
GEN_SPEOP_LDST(evlwhou, 0x0A, 2);
7828
GEN_SPEOP_LDST(evlwhos, 0x0B, 2);
7829
GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2);
7830
GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2);
7832
GEN_SPEOP_LDST(evstdd, 0x10, 3);
7833
GEN_SPEOP_LDST(evstdw, 0x11, 3);
7834
GEN_SPEOP_LDST(evstdh, 0x12, 3);
7835
GEN_SPEOP_LDST(evstwhe, 0x18, 2);
7836
GEN_SPEOP_LDST(evstwho, 0x1A, 2);
7837
GEN_SPEOP_LDST(evstwwe, 0x1C, 2);
7838
GEN_SPEOP_LDST(evstwwo, 0x1E, 2);
7840
/* Multiply and add - TODO */
7842
GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);//
7843
GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7844
GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, 0x00000000, PPC_SPE);
7845
GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7846
GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, 0x00000000, PPC_SPE);
7847
GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7848
GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7849
GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7850
GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, 0x00000000, PPC_SPE);
7851
GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7852
GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, 0x00000000, PPC_SPE);
7853
GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7855
GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7856
GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, 0xFFFFFFFF, PPC_SPE);
7857
GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, 0x00000000, PPC_SPE);
7858
GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7859
GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7860
GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7861
GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7862
GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, 0xFFFFFFFF, PPC_SPE);
7863
GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, 0x00000000, PPC_SPE);
7864
GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7865
GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7866
GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7868
GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
7869
GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
7870
GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
7871
GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
7872
GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, 0x00000000, PPC_SPE);
7874
GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, 0x00000000, PPC_SPE);
7875
GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7876
GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, 0x00000000, PPC_SPE);
7877
GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7878
GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, 0x00000000, PPC_SPE);
7879
GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7880
GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, 0x00000000, PPC_SPE);
7881
GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7882
GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, 0x00000000, PPC_SPE);
7883
GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7884
GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, 0x00000000, PPC_SPE);
7885
GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7887
GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, 0x00000000, PPC_SPE);
7888
GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, 0x00000000, PPC_SPE);
7889
GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7890
GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7892
GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, 0x00000000, PPC_SPE);
7893
GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7894
GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, 0x00000000, PPC_SPE);
7895
GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7896
GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, 0x00000000, PPC_SPE);
7897
GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7898
GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, 0x00000000, PPC_SPE);
7899
GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7900
GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, 0x00000000, PPC_SPE);
7901
GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7902
GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, 0x00000000, PPC_SPE);
7903
GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7905
GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, 0x00000000, PPC_SPE);
7906
GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, 0x00000000, PPC_SPE);
7907
GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7908
GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, 0x00000000, PPC_SPE);
7909
GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7912
/*** SPE floating-point extension ***/
7913
#if defined(TARGET_PPC64)
7914
#define GEN_SPEFPUOP_CONV_32_32(name) \
7915
static inline void gen_##name(DisasContext *ctx) \
7919
t0 = tcg_temp_new_i32(); \
7920
tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7921
gen_helper_##name(t0, t0); \
7922
t1 = tcg_temp_new(); \
7923
tcg_gen_extu_i32_tl(t1, t0); \
7924
tcg_temp_free_i32(t0); \
7925
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7926
0xFFFFFFFF00000000ULL); \
7927
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7928
tcg_temp_free(t1); \
7930
#define GEN_SPEFPUOP_CONV_32_64(name) \
7931
static inline void gen_##name(DisasContext *ctx) \
7935
t0 = tcg_temp_new_i32(); \
7936
gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
7937
t1 = tcg_temp_new(); \
7938
tcg_gen_extu_i32_tl(t1, t0); \
7939
tcg_temp_free_i32(t0); \
7940
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7941
0xFFFFFFFF00000000ULL); \
7942
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7943
tcg_temp_free(t1); \
7945
#define GEN_SPEFPUOP_CONV_64_32(name) \
7946
static inline void gen_##name(DisasContext *ctx) \
7948
TCGv_i32 t0 = tcg_temp_new_i32(); \
7949
tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7950
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
7951
tcg_temp_free_i32(t0); \
7953
#define GEN_SPEFPUOP_CONV_64_64(name) \
7954
static inline void gen_##name(DisasContext *ctx) \
7956
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7958
#define GEN_SPEFPUOP_ARITH2_32_32(name) \
7959
static inline void gen_##name(DisasContext *ctx) \
7963
if (unlikely(!ctx->spe_enabled)) { \
7964
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7967
t0 = tcg_temp_new_i32(); \
7968
t1 = tcg_temp_new_i32(); \
7969
tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7970
tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7971
gen_helper_##name(t0, t0, t1); \
7972
tcg_temp_free_i32(t1); \
7973
t2 = tcg_temp_new(); \
7974
tcg_gen_extu_i32_tl(t2, t0); \
7975
tcg_temp_free_i32(t0); \
7976
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7977
0xFFFFFFFF00000000ULL); \
7978
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t2); \
7979
tcg_temp_free(t2); \
7981
#define GEN_SPEFPUOP_ARITH2_64_64(name) \
7982
static inline void gen_##name(DisasContext *ctx) \
7984
if (unlikely(!ctx->spe_enabled)) { \
7985
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7988
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7989
cpu_gpr[rB(ctx->opcode)]); \
7991
#define GEN_SPEFPUOP_COMP_32(name) \
7992
static inline void gen_##name(DisasContext *ctx) \
7995
if (unlikely(!ctx->spe_enabled)) { \
7996
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7999
t0 = tcg_temp_new_i32(); \
8000
t1 = tcg_temp_new_i32(); \
8001
tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
8002
tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
8003
gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
8004
tcg_temp_free_i32(t0); \
8005
tcg_temp_free_i32(t1); \
8007
#define GEN_SPEFPUOP_COMP_64(name) \
8008
static inline void gen_##name(DisasContext *ctx) \
8010
if (unlikely(!ctx->spe_enabled)) { \
8011
gen_exception(ctx, POWERPC_EXCP_SPEU); \
8014
gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
8015
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8018
#define GEN_SPEFPUOP_CONV_32_32(name) \
8019
static inline void gen_##name(DisasContext *ctx) \
8021
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8023
#define GEN_SPEFPUOP_CONV_32_64(name) \
8024
static inline void gen_##name(DisasContext *ctx) \
8026
TCGv_i64 t0 = tcg_temp_new_i64(); \
8027
gen_load_gpr64(t0, rB(ctx->opcode)); \
8028
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
8029
tcg_temp_free_i64(t0); \
8031
#define GEN_SPEFPUOP_CONV_64_32(name) \
8032
static inline void gen_##name(DisasContext *ctx) \
8034
TCGv_i64 t0 = tcg_temp_new_i64(); \
8035
gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
8036
gen_store_gpr64(rD(ctx->opcode), t0); \
8037
tcg_temp_free_i64(t0); \
8039
#define GEN_SPEFPUOP_CONV_64_64(name) \
8040
static inline void gen_##name(DisasContext *ctx) \
8042
TCGv_i64 t0 = tcg_temp_new_i64(); \
8043
gen_load_gpr64(t0, rB(ctx->opcode)); \
8044
gen_helper_##name(t0, t0); \
8045
gen_store_gpr64(rD(ctx->opcode), t0); \
8046
tcg_temp_free_i64(t0); \
8048
#define GEN_SPEFPUOP_ARITH2_32_32(name) \
8049
static inline void gen_##name(DisasContext *ctx) \
8051
if (unlikely(!ctx->spe_enabled)) { \
8052
gen_exception(ctx, POWERPC_EXCP_SPEU); \
8055
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], \
8056
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8058
#define GEN_SPEFPUOP_ARITH2_64_64(name) \
8059
static inline void gen_##name(DisasContext *ctx) \
8062
if (unlikely(!ctx->spe_enabled)) { \
8063
gen_exception(ctx, POWERPC_EXCP_SPEU); \
8066
t0 = tcg_temp_new_i64(); \
8067
t1 = tcg_temp_new_i64(); \
8068
gen_load_gpr64(t0, rA(ctx->opcode)); \
8069
gen_load_gpr64(t1, rB(ctx->opcode)); \
8070
gen_helper_##name(t0, t0, t1); \
8071
gen_store_gpr64(rD(ctx->opcode), t0); \
8072
tcg_temp_free_i64(t0); \
8073
tcg_temp_free_i64(t1); \
8075
#define GEN_SPEFPUOP_COMP_32(name) \
8076
static inline void gen_##name(DisasContext *ctx) \
8078
if (unlikely(!ctx->spe_enabled)) { \
8079
gen_exception(ctx, POWERPC_EXCP_SPEU); \
8082
gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
8083
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8085
#define GEN_SPEFPUOP_COMP_64(name) \
8086
static inline void gen_##name(DisasContext *ctx) \
8089
if (unlikely(!ctx->spe_enabled)) { \
8090
gen_exception(ctx, POWERPC_EXCP_SPEU); \
8093
t0 = tcg_temp_new_i64(); \
8094
t1 = tcg_temp_new_i64(); \
8095
gen_load_gpr64(t0, rA(ctx->opcode)); \
8096
gen_load_gpr64(t1, rB(ctx->opcode)); \
8097
gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
8098
tcg_temp_free_i64(t0); \
8099
tcg_temp_free_i64(t1); \
8103
/* Single precision floating-point vectors operations */
8105
GEN_SPEFPUOP_ARITH2_64_64(evfsadd);
8106
GEN_SPEFPUOP_ARITH2_64_64(evfssub);
8107
GEN_SPEFPUOP_ARITH2_64_64(evfsmul);
8108
GEN_SPEFPUOP_ARITH2_64_64(evfsdiv);
8109
static inline void gen_evfsabs(DisasContext *ctx)
8111
if (unlikely(!ctx->spe_enabled)) {
8112
gen_exception(ctx, POWERPC_EXCP_SPEU);
8115
#if defined(TARGET_PPC64)
8116
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000080000000LL);
8118
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x80000000);
8119
tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
8122
static inline void gen_evfsnabs(DisasContext *ctx)
8124
if (unlikely(!ctx->spe_enabled)) {
8125
gen_exception(ctx, POWERPC_EXCP_SPEU);
8128
#if defined(TARGET_PPC64)
8129
tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
8131
tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8132
tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8135
static inline void gen_evfsneg(DisasContext *ctx)
8137
if (unlikely(!ctx->spe_enabled)) {
8138
gen_exception(ctx, POWERPC_EXCP_SPEU);
8141
#if defined(TARGET_PPC64)
8142
tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
8144
tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8145
tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8150
GEN_SPEFPUOP_CONV_64_64(evfscfui);
8151
GEN_SPEFPUOP_CONV_64_64(evfscfsi);
8152
GEN_SPEFPUOP_CONV_64_64(evfscfuf);
8153
GEN_SPEFPUOP_CONV_64_64(evfscfsf);
8154
GEN_SPEFPUOP_CONV_64_64(evfsctui);
8155
GEN_SPEFPUOP_CONV_64_64(evfsctsi);
8156
GEN_SPEFPUOP_CONV_64_64(evfsctuf);
8157
GEN_SPEFPUOP_CONV_64_64(evfsctsf);
8158
GEN_SPEFPUOP_CONV_64_64(evfsctuiz);
8159
GEN_SPEFPUOP_CONV_64_64(evfsctsiz);
8162
GEN_SPEFPUOP_COMP_64(evfscmpgt);
8163
GEN_SPEFPUOP_COMP_64(evfscmplt);
8164
GEN_SPEFPUOP_COMP_64(evfscmpeq);
8165
GEN_SPEFPUOP_COMP_64(evfststgt);
8166
GEN_SPEFPUOP_COMP_64(evfststlt);
8167
GEN_SPEFPUOP_COMP_64(evfststeq);
8169
/* Opcodes definitions */
8170
GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
8171
GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE); //
8172
GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8173
GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
8174
GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
8175
GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8176
GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8177
GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8178
GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8179
GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8180
GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8181
GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8182
GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
8183
GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8185
/* Single precision floating-point operations */
8187
GEN_SPEFPUOP_ARITH2_32_32(efsadd);
8188
GEN_SPEFPUOP_ARITH2_32_32(efssub);
8189
GEN_SPEFPUOP_ARITH2_32_32(efsmul);
8190
GEN_SPEFPUOP_ARITH2_32_32(efsdiv);
8191
static inline void gen_efsabs(DisasContext *ctx)
8193
if (unlikely(!ctx->spe_enabled)) {
8194
gen_exception(ctx, POWERPC_EXCP_SPEU);
8197
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], (target_long)~0x80000000LL);
8199
static inline void gen_efsnabs(DisasContext *ctx)
8201
if (unlikely(!ctx->spe_enabled)) {
8202
gen_exception(ctx, POWERPC_EXCP_SPEU);
8205
tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8207
static inline void gen_efsneg(DisasContext *ctx)
8209
if (unlikely(!ctx->spe_enabled)) {
8210
gen_exception(ctx, POWERPC_EXCP_SPEU);
8213
tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8217
GEN_SPEFPUOP_CONV_32_32(efscfui);
8218
GEN_SPEFPUOP_CONV_32_32(efscfsi);
8219
GEN_SPEFPUOP_CONV_32_32(efscfuf);
8220
GEN_SPEFPUOP_CONV_32_32(efscfsf);
8221
GEN_SPEFPUOP_CONV_32_32(efsctui);
8222
GEN_SPEFPUOP_CONV_32_32(efsctsi);
8223
GEN_SPEFPUOP_CONV_32_32(efsctuf);
8224
GEN_SPEFPUOP_CONV_32_32(efsctsf);
8225
GEN_SPEFPUOP_CONV_32_32(efsctuiz);
8226
GEN_SPEFPUOP_CONV_32_32(efsctsiz);
8227
GEN_SPEFPUOP_CONV_32_64(efscfd);
8230
GEN_SPEFPUOP_COMP_32(efscmpgt);
8231
GEN_SPEFPUOP_COMP_32(efscmplt);
8232
GEN_SPEFPUOP_COMP_32(efscmpeq);
8233
GEN_SPEFPUOP_COMP_32(efststgt);
8234
GEN_SPEFPUOP_COMP_32(efststlt);
8235
GEN_SPEFPUOP_COMP_32(efststeq);
8237
/* Opcodes definitions */
8238
GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
8239
GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE); //
8240
GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8241
GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
8242
GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
8243
GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, 0x00180000, PPC_SPE_SINGLE); //
8244
GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8245
GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8246
GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8247
GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8248
GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8249
GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8250
GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
8251
GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8253
/* Double precision floating-point operations */
8255
GEN_SPEFPUOP_ARITH2_64_64(efdadd);
8256
GEN_SPEFPUOP_ARITH2_64_64(efdsub);
8257
GEN_SPEFPUOP_ARITH2_64_64(efdmul);
8258
GEN_SPEFPUOP_ARITH2_64_64(efddiv);
8259
static inline void gen_efdabs(DisasContext *ctx)
8261
if (unlikely(!ctx->spe_enabled)) {
8262
gen_exception(ctx, POWERPC_EXCP_SPEU);
8265
#if defined(TARGET_PPC64)
8266
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000000000000LL);
8268
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8269
tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
8272
static inline void gen_efdnabs(DisasContext *ctx)
8274
if (unlikely(!ctx->spe_enabled)) {
8275
gen_exception(ctx, POWERPC_EXCP_SPEU);
8278
#if defined(TARGET_PPC64)
8279
tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8281
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8282
tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8285
static inline void gen_efdneg(DisasContext *ctx)
8287
if (unlikely(!ctx->spe_enabled)) {
8288
gen_exception(ctx, POWERPC_EXCP_SPEU);
8291
#if defined(TARGET_PPC64)
8292
tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8294
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8295
tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8300
GEN_SPEFPUOP_CONV_64_32(efdcfui);
8301
GEN_SPEFPUOP_CONV_64_32(efdcfsi);
8302
GEN_SPEFPUOP_CONV_64_32(efdcfuf);
8303
GEN_SPEFPUOP_CONV_64_32(efdcfsf);
8304
GEN_SPEFPUOP_CONV_32_64(efdctui);
8305
GEN_SPEFPUOP_CONV_32_64(efdctsi);
8306
GEN_SPEFPUOP_CONV_32_64(efdctuf);
8307
GEN_SPEFPUOP_CONV_32_64(efdctsf);
8308
GEN_SPEFPUOP_CONV_32_64(efdctuiz);
8309
GEN_SPEFPUOP_CONV_32_64(efdctsiz);
8310
GEN_SPEFPUOP_CONV_64_32(efdcfs);
8311
GEN_SPEFPUOP_CONV_64_64(efdcfuid);
8312
GEN_SPEFPUOP_CONV_64_64(efdcfsid);
8313
GEN_SPEFPUOP_CONV_64_64(efdctuidz);
8314
GEN_SPEFPUOP_CONV_64_64(efdctsidz);
8317
GEN_SPEFPUOP_COMP_64(efdcmpgt);
8318
GEN_SPEFPUOP_COMP_64(efdcmplt);
8319
GEN_SPEFPUOP_COMP_64(efdcmpeq);
8320
GEN_SPEFPUOP_COMP_64(efdtstgt);
8321
GEN_SPEFPUOP_COMP_64(efdtstlt);
8322
GEN_SPEFPUOP_COMP_64(efdtsteq);
8324
/* Opcodes definitions */
8325
GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE); //
8326
GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8327
GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_DOUBLE); //
8328
GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
8329
GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE); //
8330
GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8331
GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE); //
8332
GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, 0x00180000, PPC_SPE_DOUBLE); //
8333
GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8334
GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8335
GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8336
GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8337
GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
8338
GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
8339
GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE); //
8340
GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
8342
static opcode_t opcodes[] = {
8343
GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
8344
GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER),
8345
GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8346
GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER),
8347
GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8348
GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
8349
GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8350
GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8351
GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8352
GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8353
GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
8354
GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
8355
GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
8356
GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
8357
GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8358
#if defined(TARGET_PPC64)
8359
GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
8361
GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
8362
GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
8363
GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8364
GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8365
GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8366
GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
8367
GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
8368
GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
8369
GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8370
GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8371
GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8372
GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8373
GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB),
8374
GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
8375
#if defined(TARGET_PPC64)
8376
GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
8377
GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
8379
GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8380
GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8381
GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8382
GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
8383
GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
8384
GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
8385
GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
8386
#if defined(TARGET_PPC64)
8387
GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
8388
GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
8389
GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
8390
GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
8391
GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
8393
GEN_HANDLER(frsqrtes, 0x3B, 0x1A, 0xFF, 0x001F07C0, PPC_FLOAT_FRSQRTES),
8394
GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8395
GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8396
GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT),
8397
GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT),
8398
GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT),
8399
GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT),
8400
GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT),
8401
GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT),
8402
GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT),
8403
GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x00010000, PPC_FLOAT),
8404
GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT),
8405
#if defined(TARGET_PPC64)
8406
GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B),
8407
GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX),
8408
GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B),
8410
GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8411
GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8412
GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
8413
GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
8414
GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
8415
GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
8416
GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO),
8417
GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
8418
GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
8419
GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
8420
#if defined(TARGET_PPC64)
8421
GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
8422
GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
8424
GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
8425
GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
8426
GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8427
GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8428
GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
8429
GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
8430
GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
8431
GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
8432
#if defined(TARGET_PPC64)
8433
GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
8434
GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
8436
GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW),
8437
GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
8438
GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8439
#if defined(TARGET_PPC64)
8440
GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
8441
GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
8443
GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
8444
GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
8445
GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
8446
GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
8447
GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
8448
GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
8449
#if defined(TARGET_PPC64)
8450
GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
8452
GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC),
8453
GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC),
8454
GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
8455
GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
8456
GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
8457
GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE),
8458
GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE),
8459
GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03E00001, PPC_CACHE_DCBZ),
8460
GEN_HANDLER2(dcbz_970, "dcbz", 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZT),
8461
GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
8462
GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC),
8463
GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
8464
GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
8465
GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
8466
GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
8467
GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
8468
GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
8469
GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
8470
#if defined(TARGET_PPC64)
8471
GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
8472
GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
8474
GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
8475
GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
8477
GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
8478
GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
8479
GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
8481
GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
8482
GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x03FF0001, PPC_MEM_TLBIE),
8483
GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE),
8484
GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
8485
#if defined(TARGET_PPC64)
8486
GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI),
8487
GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
8489
GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
8490
GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
8491
GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR),
8492
GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR),
8493
GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR),
8494
GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR),
8495
GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR),
8496
GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR),
8497
GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR),
8498
GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR),
8499
GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR),
8500
GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8501
GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR),
8502
GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR),
8503
GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR),
8504
GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR),
8505
GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR),
8506
GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR),
8507
GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR),
8508
GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8509
GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR),
8510
GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR),
8511
GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR),
8512
GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR),
8513
GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR),
8514
GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR),
8515
GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR),
8516
GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR),
8517
GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR),
8518
GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR),
8519
GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR),
8520
GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR),
8521
GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR),
8522
GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR),
8523
GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR),
8524
GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR),
8525
GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC),
8526
GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC),
8527
GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC),
8528
GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
8529
GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
8530
GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB),
8531
GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB),
8532
GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER),
8533
GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER),
8534
GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER),
8535
GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER),
8536
GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER),
8537
GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER),
8538
GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8539
GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8540
GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2),
8541
GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2),
8542
GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8543
GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8544
GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2),
8545
GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2),
8546
GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
8547
GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
8548
GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
8549
GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
8550
GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
8551
GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
8552
GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
8553
GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
8554
GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
8555
GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
8556
GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
8557
GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
8558
GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
8559
GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
8560
GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
8561
GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
8562
GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
8563
GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
8564
GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
8565
GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
8566
GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
8567
GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
8568
GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
8569
GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
8570
PPC_NONE, PPC2_BOOKE206),
8571
GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
8572
PPC_NONE, PPC2_BOOKE206),
8573
GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
8574
PPC_NONE, PPC2_BOOKE206),
8575
GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
8576
PPC_NONE, PPC2_BOOKE206),
8577
GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
8578
GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
8579
GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
8580
GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
8581
PPC_BOOKE, PPC2_BOOKE206),
8582
GEN_HANDLER_E(msync, 0x1F, 0x16, 0x12, 0x03FFF801,
8583
PPC_BOOKE, PPC2_BOOKE206),
8584
GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
8585
PPC_BOOKE, PPC2_BOOKE206),
8586
GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
8587
GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
8588
GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
8589
GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
8590
GEN_HANDLER(vsldoi, 0x04, 0x16, 0xFF, 0x00000400, PPC_ALTIVEC),
8591
GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
8592
GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE),
8593
GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE),
8594
GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE),
8595
GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE),
8597
#undef GEN_INT_ARITH_ADD
8598
#undef GEN_INT_ARITH_ADD_CONST
8599
#define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
8600
GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
8601
#define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
8602
add_ca, compute_ca, compute_ov) \
8603
GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
8604
GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
8605
GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
8606
GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
8607
GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
8608
GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
8609
GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
8610
GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
8611
GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
8612
GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
8613
GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
8615
#undef GEN_INT_ARITH_DIVW
8616
#define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
8617
GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
8618
GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
8619
GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
8620
GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
8621
GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
8623
#if defined(TARGET_PPC64)
8624
#undef GEN_INT_ARITH_DIVD
8625
#define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
8626
GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8627
GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
8628
GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
8629
GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
8630
GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
8632
#undef GEN_INT_ARITH_MUL_HELPER
8633
#define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
8634
GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8635
GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
8636
GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
8637
GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
8640
#undef GEN_INT_ARITH_SUBF
8641
#undef GEN_INT_ARITH_SUBF_CONST
8642
#define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
8643
GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
8644
#define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
8645
add_ca, compute_ca, compute_ov) \
8646
GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
8647
GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
8648
GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
8649
GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
8650
GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
8651
GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
8652
GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
8653
GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
8654
GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
8655
GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
8656
GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
8660
#define GEN_LOGICAL2(name, tcg_op, opc, type) \
8661
GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
8662
#define GEN_LOGICAL1(name, tcg_op, opc, type) \
8663
GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
8664
GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
8665
GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
8666
GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
8667
GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
8668
GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
8669
GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
8670
GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
8671
GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
8672
#if defined(TARGET_PPC64)
8673
GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
8676
#if defined(TARGET_PPC64)
8679
#define GEN_PPC64_R2(name, opc1, opc2) \
8680
GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8681
GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8683
#define GEN_PPC64_R4(name, opc1, opc2) \
8684
GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8685
GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
8687
GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8689
GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
8691
GEN_PPC64_R4(rldicl, 0x1E, 0x00),
8692
GEN_PPC64_R4(rldicr, 0x1E, 0x02),
8693
GEN_PPC64_R4(rldic, 0x1E, 0x04),
8694
GEN_PPC64_R2(rldcl, 0x1E, 0x08),
8695
GEN_PPC64_R2(rldcr, 0x1E, 0x09),
8696
GEN_PPC64_R4(rldimi, 0x1E, 0x06),
8699
#undef _GEN_FLOAT_ACB
8700
#undef GEN_FLOAT_ACB
8701
#undef _GEN_FLOAT_AB
8703
#undef _GEN_FLOAT_AC
8707
#define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
8708
GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type)
8709
#define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
8710
_GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type), \
8711
_GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type)
8712
#define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
8713
GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
8714
#define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
8715
_GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
8716
_GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
8717
#define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
8718
GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
8719
#define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
8720
_GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
8721
_GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
8722
#define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
8723
GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type)
8724
#define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
8725
GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type)
8727
GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT),
8728
GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT),
8729
GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT),
8730
GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT),
8731
GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES),
8732
GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE),
8733
_GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL),
8734
GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT),
8735
GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT),
8736
GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT),
8737
GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT),
8738
GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT),
8739
GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT),
8740
GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT),
8741
GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT),
8742
#if defined(TARGET_PPC64)
8743
GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B),
8744
GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B),
8745
GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B),
8747
GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT),
8748
GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT),
8749
GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT),
8750
GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT),
8751
GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT),
8752
GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT),
8753
GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT),
8760
#define GEN_LD(name, ldop, opc, type) \
8761
GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8762
#define GEN_LDU(name, ldop, opc, type) \
8763
GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8764
#define GEN_LDUX(name, ldop, opc2, opc3, type) \
8765
GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8766
#define GEN_LDX(name, ldop, opc2, opc3, type) \
8767
GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8768
#define GEN_LDS(name, ldop, op, type) \
8769
GEN_LD(name, ldop, op | 0x20, type) \
8770
GEN_LDU(name, ldop, op | 0x21, type) \
8771
GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
8772
GEN_LDX(name, ldop, 0x17, op | 0x00, type)
8774
GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER)
8775
GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER)
8776
GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER)
8777
GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER)
8778
#if defined(TARGET_PPC64)
8779
GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B)
8780
GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B)
8781
GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B)
8782
GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B)
8784
GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
8785
GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
8792
#define GEN_ST(name, stop, opc, type) \
8793
GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8794
#define GEN_STU(name, stop, opc, type) \
8795
GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
8796
#define GEN_STUX(name, stop, opc2, opc3, type) \
8797
GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8798
#define GEN_STX(name, stop, opc2, opc3, type) \
8799
GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8800
#define GEN_STS(name, stop, op, type) \
8801
GEN_ST(name, stop, op | 0x20, type) \
8802
GEN_STU(name, stop, op | 0x21, type) \
8803
GEN_STUX(name, stop, 0x17, op | 0x01, type) \
8804
GEN_STX(name, stop, 0x17, op | 0x00, type)
8806
GEN_STS(stb, st8, 0x06, PPC_INTEGER)
8807
GEN_STS(sth, st16, 0x0C, PPC_INTEGER)
8808
GEN_STS(stw, st32, 0x04, PPC_INTEGER)
8809
#if defined(TARGET_PPC64)
8810
GEN_STUX(std, st64, 0x15, 0x05, PPC_64B)
8811
GEN_STX(std, st64, 0x15, 0x04, PPC_64B)
8813
GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
8814
GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
8821
#define GEN_LDF(name, ldop, opc, type) \
8822
GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8823
#define GEN_LDUF(name, ldop, opc, type) \
8824
GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8825
#define GEN_LDUXF(name, ldop, opc, type) \
8826
GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
8827
#define GEN_LDXF(name, ldop, opc2, opc3, type) \
8828
GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8829
#define GEN_LDFS(name, ldop, op, type) \
8830
GEN_LDF(name, ldop, op | 0x20, type) \
8831
GEN_LDUF(name, ldop, op | 0x21, type) \
8832
GEN_LDUXF(name, ldop, op | 0x01, type) \
8833
GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
8835
GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT)
8836
GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT)
8843
#define GEN_STF(name, stop, opc, type) \
8844
GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8845
#define GEN_STUF(name, stop, opc, type) \
8846
GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8847
#define GEN_STUXF(name, stop, opc, type) \
8848
GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
8849
#define GEN_STXF(name, stop, opc2, opc3, type) \
8850
GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8851
#define GEN_STFS(name, stop, op, type) \
8852
GEN_STF(name, stop, op | 0x20, type) \
8853
GEN_STUF(name, stop, op | 0x21, type) \
8854
GEN_STUXF(name, stop, op | 0x01, type) \
8855
GEN_STXF(name, stop, 0x17, op | 0x00, type)
8857
GEN_STFS(stfd, st64, 0x16, PPC_FLOAT)
8858
GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT)
8859
GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX)
8862
#define GEN_CRLOGIC(name, tcg_op, opc) \
8863
GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
8864
GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
8865
GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
8866
GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
8867
GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
8868
GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
8869
GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
8870
GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
8871
GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
8873
#undef GEN_MAC_HANDLER
8874
#define GEN_MAC_HANDLER(name, opc2, opc3) \
8875
GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
8876
GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
8877
GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
8878
GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
8879
GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
8880
GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
8881
GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
8882
GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
8883
GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
8884
GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
8885
GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
8886
GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
8887
GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
8888
GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
8889
GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
8890
GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
8891
GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
8892
GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
8893
GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
8894
GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
8895
GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
8896
GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
8897
GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
8898
GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
8899
GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
8900
GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
8901
GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
8902
GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
8903
GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
8904
GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
8905
GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
8906
GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
8907
GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
8908
GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
8909
GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
8910
GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
8911
GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
8912
GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
8913
GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
8914
GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
8915
GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
8916
GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
8917
GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
8923
#define GEN_VR_LDX(name, opc2, opc3) \
8924
GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8925
#define GEN_VR_STX(name, opc2, opc3) \
8926
GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8927
#define GEN_VR_LVE(name, opc2, opc3) \
8928
GEN_HANDLER(lve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8929
#define GEN_VR_STVE(name, opc2, opc3) \
8930
GEN_HANDLER(stve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8931
GEN_VR_LDX(lvx, 0x07, 0x03),
8932
GEN_VR_LDX(lvxl, 0x07, 0x0B),
8933
GEN_VR_LVE(bx, 0x07, 0x00),
8934
GEN_VR_LVE(hx, 0x07, 0x01),
8935
GEN_VR_LVE(wx, 0x07, 0x02),
8936
GEN_VR_STX(svx, 0x07, 0x07),
8937
GEN_VR_STX(svxl, 0x07, 0x0F),
8938
GEN_VR_STVE(bx, 0x07, 0x04),
8939
GEN_VR_STVE(hx, 0x07, 0x05),
8940
GEN_VR_STVE(wx, 0x07, 0x06),
8942
#undef GEN_VX_LOGICAL
8943
#define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
8944
GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
8945
GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16),
8946
GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17),
8947
GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18),
8948
GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19),
8949
GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20),
8952
#define GEN_VXFORM(name, opc2, opc3) \
8953
GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
8954
GEN_VXFORM(vaddubm, 0, 0),
8955
GEN_VXFORM(vadduhm, 0, 1),
8956
GEN_VXFORM(vadduwm, 0, 2),
8957
GEN_VXFORM(vsububm, 0, 16),
8958
GEN_VXFORM(vsubuhm, 0, 17),
8959
GEN_VXFORM(vsubuwm, 0, 18),
8960
GEN_VXFORM(vmaxub, 1, 0),
8961
GEN_VXFORM(vmaxuh, 1, 1),
8962
GEN_VXFORM(vmaxuw, 1, 2),
8963
GEN_VXFORM(vmaxsb, 1, 4),
8964
GEN_VXFORM(vmaxsh, 1, 5),
8965
GEN_VXFORM(vmaxsw, 1, 6),
8966
GEN_VXFORM(vminub, 1, 8),
8967
GEN_VXFORM(vminuh, 1, 9),
8968
GEN_VXFORM(vminuw, 1, 10),
8969
GEN_VXFORM(vminsb, 1, 12),
8970
GEN_VXFORM(vminsh, 1, 13),
8971
GEN_VXFORM(vminsw, 1, 14),
8972
GEN_VXFORM(vavgub, 1, 16),
8973
GEN_VXFORM(vavguh, 1, 17),
8974
GEN_VXFORM(vavguw, 1, 18),
8975
GEN_VXFORM(vavgsb, 1, 20),
8976
GEN_VXFORM(vavgsh, 1, 21),
8977
GEN_VXFORM(vavgsw, 1, 22),
8978
GEN_VXFORM(vmrghb, 6, 0),
8979
GEN_VXFORM(vmrghh, 6, 1),
8980
GEN_VXFORM(vmrghw, 6, 2),
8981
GEN_VXFORM(vmrglb, 6, 4),
8982
GEN_VXFORM(vmrglh, 6, 5),
8983
GEN_VXFORM(vmrglw, 6, 6),
8984
GEN_VXFORM(vmuloub, 4, 0),
8985
GEN_VXFORM(vmulouh, 4, 1),
8986
GEN_VXFORM(vmulosb, 4, 4),
8987
GEN_VXFORM(vmulosh, 4, 5),
8988
GEN_VXFORM(vmuleub, 4, 8),
8989
GEN_VXFORM(vmuleuh, 4, 9),
8990
GEN_VXFORM(vmulesb, 4, 12),
8991
GEN_VXFORM(vmulesh, 4, 13),
8992
GEN_VXFORM(vslb, 2, 4),
8993
GEN_VXFORM(vslh, 2, 5),
8994
GEN_VXFORM(vslw, 2, 6),
8995
GEN_VXFORM(vsrb, 2, 8),
8996
GEN_VXFORM(vsrh, 2, 9),
8997
GEN_VXFORM(vsrw, 2, 10),
8998
GEN_VXFORM(vsrab, 2, 12),
8999
GEN_VXFORM(vsrah, 2, 13),
9000
GEN_VXFORM(vsraw, 2, 14),
9001
GEN_VXFORM(vslo, 6, 16),
9002
GEN_VXFORM(vsro, 6, 17),
9003
GEN_VXFORM(vaddcuw, 0, 6),
9004
GEN_VXFORM(vsubcuw, 0, 22),
9005
GEN_VXFORM(vaddubs, 0, 8),
9006
GEN_VXFORM(vadduhs, 0, 9),
9007
GEN_VXFORM(vadduws, 0, 10),
9008
GEN_VXFORM(vaddsbs, 0, 12),
9009
GEN_VXFORM(vaddshs, 0, 13),
9010
GEN_VXFORM(vaddsws, 0, 14),
9011
GEN_VXFORM(vsububs, 0, 24),
9012
GEN_VXFORM(vsubuhs, 0, 25),
9013
GEN_VXFORM(vsubuws, 0, 26),
9014
GEN_VXFORM(vsubsbs, 0, 28),
9015
GEN_VXFORM(vsubshs, 0, 29),
9016
GEN_VXFORM(vsubsws, 0, 30),
9017
GEN_VXFORM(vrlb, 2, 0),
9018
GEN_VXFORM(vrlh, 2, 1),
9019
GEN_VXFORM(vrlw, 2, 2),
9020
GEN_VXFORM(vsl, 2, 7),
9021
GEN_VXFORM(vsr, 2, 11),
9022
GEN_VXFORM(vpkuhum, 7, 0),
9023
GEN_VXFORM(vpkuwum, 7, 1),
9024
GEN_VXFORM(vpkuhus, 7, 2),
9025
GEN_VXFORM(vpkuwus, 7, 3),
9026
GEN_VXFORM(vpkshus, 7, 4),
9027
GEN_VXFORM(vpkswus, 7, 5),
9028
GEN_VXFORM(vpkshss, 7, 6),
9029
GEN_VXFORM(vpkswss, 7, 7),
9030
GEN_VXFORM(vpkpx, 7, 12),
9031
GEN_VXFORM(vsum4ubs, 4, 24),
9032
GEN_VXFORM(vsum4sbs, 4, 28),
9033
GEN_VXFORM(vsum4shs, 4, 25),
9034
GEN_VXFORM(vsum2sws, 4, 26),
9035
GEN_VXFORM(vsumsws, 4, 30),
9036
GEN_VXFORM(vaddfp, 5, 0),
9037
GEN_VXFORM(vsubfp, 5, 1),
9038
GEN_VXFORM(vmaxfp, 5, 16),
9039
GEN_VXFORM(vminfp, 5, 17),
9043
#define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
9044
GEN_HANDLER2(name, str, 0x4, opc2, opc3, 0x00000000, PPC_ALTIVEC),
9045
#define GEN_VXRFORM(name, opc2, opc3) \
9046
GEN_VXRFORM1(name, name, #name, opc2, opc3) \
9047
GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
9048
GEN_VXRFORM(vcmpequb, 3, 0)
9049
GEN_VXRFORM(vcmpequh, 3, 1)
9050
GEN_VXRFORM(vcmpequw, 3, 2)
9051
GEN_VXRFORM(vcmpgtsb, 3, 12)
9052
GEN_VXRFORM(vcmpgtsh, 3, 13)
9053
GEN_VXRFORM(vcmpgtsw, 3, 14)
9054
GEN_VXRFORM(vcmpgtub, 3, 8)
9055
GEN_VXRFORM(vcmpgtuh, 3, 9)
9056
GEN_VXRFORM(vcmpgtuw, 3, 10)
9057
GEN_VXRFORM(vcmpeqfp, 3, 3)
9058
GEN_VXRFORM(vcmpgefp, 3, 7)
9059
GEN_VXRFORM(vcmpgtfp, 3, 11)
9060
GEN_VXRFORM(vcmpbfp, 3, 15)
9062
#undef GEN_VXFORM_SIMM
9063
#define GEN_VXFORM_SIMM(name, opc2, opc3) \
9064
GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9065
GEN_VXFORM_SIMM(vspltisb, 6, 12),
9066
GEN_VXFORM_SIMM(vspltish, 6, 13),
9067
GEN_VXFORM_SIMM(vspltisw, 6, 14),
9069
#undef GEN_VXFORM_NOA
9070
#define GEN_VXFORM_NOA(name, opc2, opc3) \
9071
GEN_HANDLER(name, 0x04, opc2, opc3, 0x001f0000, PPC_ALTIVEC)
9072
GEN_VXFORM_NOA(vupkhsb, 7, 8),
9073
GEN_VXFORM_NOA(vupkhsh, 7, 9),
9074
GEN_VXFORM_NOA(vupklsb, 7, 10),
9075
GEN_VXFORM_NOA(vupklsh, 7, 11),
9076
GEN_VXFORM_NOA(vupkhpx, 7, 13),
9077
GEN_VXFORM_NOA(vupklpx, 7, 15),
9078
GEN_VXFORM_NOA(vrefp, 5, 4),
9079
GEN_VXFORM_NOA(vrsqrtefp, 5, 5),
9080
GEN_VXFORM_NOA(vexptefp, 5, 6),
9081
GEN_VXFORM_NOA(vlogefp, 5, 7),
9082
GEN_VXFORM_NOA(vrfim, 5, 8),
9083
GEN_VXFORM_NOA(vrfin, 5, 9),
9084
GEN_VXFORM_NOA(vrfip, 5, 10),
9085
GEN_VXFORM_NOA(vrfiz, 5, 11),
9087
#undef GEN_VXFORM_UIMM
9088
#define GEN_VXFORM_UIMM(name, opc2, opc3) \
9089
GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9090
GEN_VXFORM_UIMM(vspltb, 6, 8),
9091
GEN_VXFORM_UIMM(vsplth, 6, 9),
9092
GEN_VXFORM_UIMM(vspltw, 6, 10),
9093
GEN_VXFORM_UIMM(vcfux, 5, 12),
9094
GEN_VXFORM_UIMM(vcfsx, 5, 13),
9095
GEN_VXFORM_UIMM(vctuxs, 5, 14),
9096
GEN_VXFORM_UIMM(vctsxs, 5, 15),
9098
#undef GEN_VAFORM_PAIRED
9099
#define GEN_VAFORM_PAIRED(name0, name1, opc2) \
9100
GEN_HANDLER(name0##_##name1, 0x04, opc2, 0xFF, 0x00000000, PPC_ALTIVEC)
9101
GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16),
9102
GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18),
9103
GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19),
9104
GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20),
9105
GEN_VAFORM_PAIRED(vsel, vperm, 21),
9106
GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23),
9109
#define GEN_SPE(name0, name1, opc2, opc3, inval0, inval1, type) \
9110
GEN_OPCODE_DUAL(name0##_##name1, 0x04, opc2, opc3, inval0, inval1, type, PPC_NONE)
9111
GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9112
GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9113
GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9114
GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9115
GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, 0x0000F800, PPC_SPE),
9116
GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, 0x0000F800, PPC_SPE),
9117
GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, 0x0000F800, PPC_SPE),
9118
GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x0000F800, 0x00000000, PPC_SPE),
9119
GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, 0xFFFFFFFF, PPC_SPE),
9120
GEN_SPE(speundef, evand, 0x08, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE),
9121
GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9122
GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9123
GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9124
GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, 0x00000000, PPC_SPE),
9125
GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, 0x00000000, PPC_SPE),
9126
GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, 0x00000000, PPC_SPE),
9127
GEN_SPE(speundef, evorc, 0x0D, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE),
9128
GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9129
GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9130
GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9131
GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9132
GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9133
GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, 0x0000F800, PPC_SPE),
9134
GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, 0x0000F800, PPC_SPE),
9135
GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9136
GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9137
GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, 0x00600000, PPC_SPE),
9138
GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, 0x00600000, PPC_SPE),
9139
GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, 0xFFFFFFFF, PPC_SPE),
9141
GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
9142
GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE),
9143
GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE),
9144
GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
9145
GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
9146
GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9147
GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9148
GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9149
GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9150
GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9151
GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9152
GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9153
GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
9154
GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9156
GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
9157
GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE),
9158
GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE),
9159
GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
9160
GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
9161
GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, 0x00180000, PPC_SPE_SINGLE),
9162
GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9163
GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9164
GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9165
GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9166
GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9167
GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9168
GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
9169
GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9171
GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE),
9172
GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9173
GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_DOUBLE),
9174
GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_DOUBLE),
9175
GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE),
9176
GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9177
GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE),
9178
GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, 0x00180000, PPC_SPE_DOUBLE),
9179
GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9180
GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9181
GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9182
GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9183
GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE),
9184
GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE),
9185
GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE),
9186
GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_DOUBLE),
9188
#undef GEN_SPEOP_LDST
9189
#define GEN_SPEOP_LDST(name, opc2, sh) \
9190
GEN_HANDLER(name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE)
9191
GEN_SPEOP_LDST(evldd, 0x00, 3),
9192
GEN_SPEOP_LDST(evldw, 0x01, 3),
9193
GEN_SPEOP_LDST(evldh, 0x02, 3),
9194
GEN_SPEOP_LDST(evlhhesplat, 0x04, 1),
9195
GEN_SPEOP_LDST(evlhhousplat, 0x06, 1),
9196
GEN_SPEOP_LDST(evlhhossplat, 0x07, 1),
9197
GEN_SPEOP_LDST(evlwhe, 0x08, 2),
9198
GEN_SPEOP_LDST(evlwhou, 0x0A, 2),
9199
GEN_SPEOP_LDST(evlwhos, 0x0B, 2),
9200
GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2),
9201
GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2),
9203
GEN_SPEOP_LDST(evstdd, 0x10, 3),
9204
GEN_SPEOP_LDST(evstdw, 0x11, 3),
9205
GEN_SPEOP_LDST(evstdh, 0x12, 3),
9206
GEN_SPEOP_LDST(evstwhe, 0x18, 2),
9207
GEN_SPEOP_LDST(evstwho, 0x1A, 2),
9208
GEN_SPEOP_LDST(evstwwe, 0x1C, 2),
9209
GEN_SPEOP_LDST(evstwwo, 0x1E, 2),
9212
#include "translate_init.c"
9213
#include "helper_regs.h"
9215
/*****************************************************************************/
9216
/* Misc PowerPC helpers */
9217
void cpu_dump_state (CPUState *env, FILE *f, fprintf_function cpu_fprintf,
9225
cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR "
9226
TARGET_FMT_lx " XER " TARGET_FMT_lx "\n",
9227
env->nip, env->lr, env->ctr, env->xer);
9228
cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF "
9229
TARGET_FMT_lx " idx %d\n", env->msr, env->spr[SPR_HID0],
9230
env->hflags, env->mmu_idx);
9231
#if !defined(NO_TIMER_DUMP)
9232
cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64
9233
#if !defined(CONFIG_USER_ONLY)
9237
cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
9238
#if !defined(CONFIG_USER_ONLY)
9239
, cpu_ppc_load_decr(env)
9243
for (i = 0; i < 32; i++) {
9244
if ((i & (RGPL - 1)) == 0)
9245
cpu_fprintf(f, "GPR%02d", i);
9246
cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i));
9247
if ((i & (RGPL - 1)) == (RGPL - 1))
9248
cpu_fprintf(f, "\n");
9250
cpu_fprintf(f, "CR ");
9251
for (i = 0; i < 8; i++)
9252
cpu_fprintf(f, "%01x", env->crf[i]);
9253
cpu_fprintf(f, " [");
9254
for (i = 0; i < 8; i++) {
9256
if (env->crf[i] & 0x08)
9258
else if (env->crf[i] & 0x04)
9260
else if (env->crf[i] & 0x02)
9262
cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
9264
cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n",
9266
for (i = 0; i < 32; i++) {
9267
if ((i & (RFPL - 1)) == 0)
9268
cpu_fprintf(f, "FPR%02d", i);
9269
cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
9270
if ((i & (RFPL - 1)) == (RFPL - 1))
9271
cpu_fprintf(f, "\n");
9273
cpu_fprintf(f, "FPSCR %08x\n", env->fpscr);
9274
#if !defined(CONFIG_USER_ONLY)
9275
cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx
9276
" PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n",
9277
env->spr[SPR_SRR0], env->spr[SPR_SRR1],
9278
env->spr[SPR_PVR], env->spr[SPR_VRSAVE]);
9280
cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx
9281
" SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n",
9282
env->spr[SPR_SPRG0], env->spr[SPR_SPRG1],
9283
env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]);
9285
cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx
9286
" SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n",
9287
env->spr[SPR_SPRG4], env->spr[SPR_SPRG5],
9288
env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]);
9290
if (env->excp_model == POWERPC_EXCP_BOOKE) {
9291
cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx
9292
" MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n",
9293
env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1],
9294
env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]);
9296
cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx
9297
" ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n",
9298
env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR],
9299
env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]);
9301
cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx
9302
" IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n",
9303
env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR],
9304
env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]);
9306
cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx
9307
" EPR " TARGET_FMT_lx "\n",
9308
env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8],
9309
env->spr[SPR_BOOKE_EPR]);
9312
cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx
9313
" PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n",
9314
env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1],
9315
env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]);
9318
* IVORs are left out as they are large and do not change often --
9319
* they can be read with "p $ivor0", "p $ivor1", etc.
9323
#if defined(TARGET_PPC64)
9324
if (env->flags & POWERPC_FLAG_CFAR) {
9325
cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar);
9329
switch (env->mmu_model) {
9330
case POWERPC_MMU_32B:
9331
case POWERPC_MMU_601:
9332
case POWERPC_MMU_SOFT_6xx:
9333
case POWERPC_MMU_SOFT_74xx:
9334
#if defined(TARGET_PPC64)
9335
case POWERPC_MMU_620:
9336
case POWERPC_MMU_64B:
9338
cpu_fprintf(f, " SDR1 " TARGET_FMT_lx "\n", env->spr[SPR_SDR1]);
9340
case POWERPC_MMU_BOOKE206:
9341
cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx
9342
" MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n",
9343
env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1],
9344
env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]);
9346
cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx
9347
" MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n",
9348
env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6],
9349
env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]);
9351
cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx
9352
" TLB1CFG " TARGET_FMT_lx "\n",
9353
env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG],
9354
env->spr[SPR_BOOKE_TLB1CFG]);
9365
void cpu_dump_statistics (CPUState *env, FILE*f, fprintf_function cpu_fprintf,
9368
#if defined(DO_PPC_STATISTICS)
9369
opc_handler_t **t1, **t2, **t3, *handler;
9373
for (op1 = 0; op1 < 64; op1++) {
9375
if (is_indirect_opcode(handler)) {
9376
t2 = ind_table(handler);
9377
for (op2 = 0; op2 < 32; op2++) {
9379
if (is_indirect_opcode(handler)) {
9380
t3 = ind_table(handler);
9381
for (op3 = 0; op3 < 32; op3++) {
9383
if (handler->count == 0)
9385
cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
9386
"%016" PRIx64 " %" PRId64 "\n",
9387
op1, op2, op3, op1, (op3 << 5) | op2,
9389
handler->count, handler->count);
9392
if (handler->count == 0)
9394
cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
9395
"%016" PRIx64 " %" PRId64 "\n",
9396
op1, op2, op1, op2, handler->oname,
9397
handler->count, handler->count);
9401
if (handler->count == 0)
9403
cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64
9405
op1, op1, handler->oname,
9406
handler->count, handler->count);
9412
/*****************************************************************************/
9413
static inline void gen_intermediate_code_internal(CPUState *env,
9414
TranslationBlock *tb,
9417
DisasContext ctx, *ctxp = &ctx;
9418
opc_handler_t **table, *handler;
9419
target_ulong pc_start;
9420
uint16_t *gen_opc_end;
9427
gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
9430
ctx.exception = POWERPC_EXCP_NONE;
9431
ctx.spr_cb = env->spr_cb;
9432
ctx.mem_idx = env->mmu_idx;
9433
ctx.access_type = -1;
9434
ctx.le_mode = env->hflags & (1 << MSR_LE) ? 1 : 0;
9435
#if defined(TARGET_PPC64)
9436
ctx.sf_mode = msr_sf;
9437
ctx.has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
9439
ctx.fpu_enabled = msr_fp;
9440
if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
9441
ctx.spe_enabled = msr_spe;
9443
ctx.spe_enabled = 0;
9444
if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
9445
ctx.altivec_enabled = msr_vr;
9447
ctx.altivec_enabled = 0;
9448
if ((env->flags & POWERPC_FLAG_SE) && msr_se)
9449
ctx.singlestep_enabled = CPU_SINGLE_STEP;
9451
ctx.singlestep_enabled = 0;
9452
if ((env->flags & POWERPC_FLAG_BE) && msr_be)
9453
ctx.singlestep_enabled |= CPU_BRANCH_STEP;
9454
if (unlikely(env->singlestep_enabled))
9455
ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
9456
#if defined (DO_SINGLE_STEP) && 0
9457
/* Single step trace mode */
9461
max_insns = tb->cflags & CF_COUNT_MASK;
9463
max_insns = CF_COUNT_MASK;
9466
/* Set env in case of segfault during code fetch */
9467
while (ctx.exception == POWERPC_EXCP_NONE && gen_opc_ptr < gen_opc_end) {
9468
if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
9469
QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
9470
if (bp->pc == ctx.nip) {
9471
gen_debug_exception(ctxp);
9476
if (unlikely(search_pc)) {
9477
j = gen_opc_ptr - gen_opc_buf;
9481
gen_opc_instr_start[lj++] = 0;
9483
gen_opc_pc[lj] = ctx.nip;
9484
gen_opc_instr_start[lj] = 1;
9485
gen_opc_icount[lj] = num_insns;
9487
LOG_DISAS("----------------\n");
9488
LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
9489
ctx.nip, ctx.mem_idx, (int)msr_ir);
9490
if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
9492
if (unlikely(ctx.le_mode)) {
9493
ctx.opcode = bswap32(ldl_code(ctx.nip));
9495
ctx.opcode = ldl_code(ctx.nip);
9497
LOG_DISAS("translate opcode %08x (%02x %02x %02x) (%s)\n",
9498
ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
9499
opc3(ctx.opcode), little_endian ? "little" : "big");
9500
if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
9501
tcg_gen_debug_insn_start(ctx.nip);
9503
table = env->opcodes;
9505
handler = table[opc1(ctx.opcode)];
9506
if (is_indirect_opcode(handler)) {
9507
table = ind_table(handler);
9508
handler = table[opc2(ctx.opcode)];
9509
if (is_indirect_opcode(handler)) {
9510
table = ind_table(handler);
9511
handler = table[opc3(ctx.opcode)];
9514
/* Is opcode *REALLY* valid ? */
9515
if (unlikely(handler->handler == &gen_invalid)) {
9516
if (qemu_log_enabled()) {
9517
qemu_log("invalid/unsupported opcode: "
9518
"%02x - %02x - %02x (%08x) " TARGET_FMT_lx " %d\n",
9519
opc1(ctx.opcode), opc2(ctx.opcode),
9520
opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
9525
if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) && Rc(ctx.opcode))) {
9526
inval = handler->inval2;
9528
inval = handler->inval1;
9531
if (unlikely((ctx.opcode & inval) != 0)) {
9532
if (qemu_log_enabled()) {
9533
qemu_log("invalid bits: %08x for opcode: "
9534
"%02x - %02x - %02x (%08x) " TARGET_FMT_lx "\n",
9535
ctx.opcode & inval, opc1(ctx.opcode),
9536
opc2(ctx.opcode), opc3(ctx.opcode),
9537
ctx.opcode, ctx.nip - 4);
9539
gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL);
9543
(*(handler->handler))(&ctx);
9544
#if defined(DO_PPC_STATISTICS)
9547
/* Check trace mode exceptions */
9548
if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
9549
(ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
9550
ctx.exception != POWERPC_SYSCALL &&
9551
ctx.exception != POWERPC_EXCP_TRAP &&
9552
ctx.exception != POWERPC_EXCP_BRANCH)) {
9553
gen_exception(ctxp, POWERPC_EXCP_TRACE);
9554
} else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
9555
(env->singlestep_enabled) ||
9557
num_insns >= max_insns)) {
9558
/* if we reach a page boundary or are single stepping, stop
9564
if (tb->cflags & CF_LAST_IO)
9566
if (ctx.exception == POWERPC_EXCP_NONE) {
9567
gen_goto_tb(&ctx, 0, ctx.nip);
9568
} else if (ctx.exception != POWERPC_EXCP_BRANCH) {
9569
if (unlikely(env->singlestep_enabled)) {
9570
gen_debug_exception(ctxp);
9572
/* Generate the return instruction */
9575
gen_icount_end(tb, num_insns);
9576
*gen_opc_ptr = INDEX_op_end;
9577
if (unlikely(search_pc)) {
9578
j = gen_opc_ptr - gen_opc_buf;
9581
gen_opc_instr_start[lj++] = 0;
9583
tb->size = ctx.nip - pc_start;
9584
tb->icount = num_insns;
9586
#if defined(DEBUG_DISAS)
9587
if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
9589
flags = env->bfd_mach;
9590
flags |= ctx.le_mode << 16;
9591
qemu_log("IN: %s\n", lookup_symbol(pc_start));
9592
log_target_disas(pc_start, ctx.nip - pc_start, flags);
9598
void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
9600
gen_intermediate_code_internal(env, tb, 0);
9603
void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
9605
gen_intermediate_code_internal(env, tb, 1);
9608
void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
9610
env->nip = gen_opc_pc[pc_pos];