2
* PowerPC emulation for qemu: main translation routines.
4
* Copyright (c) 2003-2007 Jocelyn Mayer
5
* Copyright (C) 2011 Freescale Semiconductor, Inc.
7
* This library is free software; you can redistribute it and/or
8
* modify it under the terms of the GNU Lesser General Public
9
* License as published by the Free Software Foundation; either
10
* version 2 of the License, or (at your option) any later version.
12
* This library is distributed in the hope that it will be useful,
13
* but WITHOUT ANY WARRANTY; without even the implied warranty of
14
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15
* Lesser General Public License for more details.
17
* You should have received a copy of the GNU Lesser General Public
18
* License along with this library; if not, see <http://www.gnu.org/licenses/>.
22
#include "disas/disas.h"
24
#include "qemu/host-utils.h"
30
#define CPU_SINGLE_STEP 0x1
31
#define CPU_BRANCH_STEP 0x2
32
#define GDBSTUB_SINGLE_STEP 0x4
34
/* Include definitions for instructions classes and implementations flags */
35
//#define PPC_DEBUG_DISAS
36
//#define DO_PPC_STATISTICS
38
#ifdef PPC_DEBUG_DISAS
39
# define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
41
# define LOG_DISAS(...) do { } while (0)
43
/*****************************************************************************/
44
/* Code translation helpers */
46
/* global register indexes */
47
static TCGv_ptr cpu_env;
48
static char cpu_reg_names[10*3 + 22*4 /* GPR */
49
#if !defined(TARGET_PPC64)
50
+ 10*4 + 22*5 /* SPE GPRh */
52
+ 10*4 + 22*5 /* FPR */
53
+ 2*(10*6 + 22*7) /* AVRh, AVRl */
55
static TCGv cpu_gpr[32];
56
#if !defined(TARGET_PPC64)
57
static TCGv cpu_gprh[32];
59
static TCGv_i64 cpu_fpr[32];
60
static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
61
static TCGv_i32 cpu_crf[8];
66
#if defined(TARGET_PPC64)
69
static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca;
70
static TCGv cpu_reserve;
71
static TCGv cpu_fpscr;
72
static TCGv_i32 cpu_access_type;
74
#include "exec/gen-icount.h"
76
void ppc_translate_init(void)
80
size_t cpu_reg_names_size;
81
static int done_init = 0;
86
cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
89
cpu_reg_names_size = sizeof(cpu_reg_names);
91
for (i = 0; i < 8; i++) {
92
snprintf(p, cpu_reg_names_size, "crf%d", i);
93
cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0,
94
offsetof(CPUPPCState, crf[i]), p);
96
cpu_reg_names_size -= 5;
99
for (i = 0; i < 32; i++) {
100
snprintf(p, cpu_reg_names_size, "r%d", i);
101
cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
102
offsetof(CPUPPCState, gpr[i]), p);
103
p += (i < 10) ? 3 : 4;
104
cpu_reg_names_size -= (i < 10) ? 3 : 4;
105
#if !defined(TARGET_PPC64)
106
snprintf(p, cpu_reg_names_size, "r%dH", i);
107
cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0,
108
offsetof(CPUPPCState, gprh[i]), p);
109
p += (i < 10) ? 4 : 5;
110
cpu_reg_names_size -= (i < 10) ? 4 : 5;
113
snprintf(p, cpu_reg_names_size, "fp%d", i);
114
cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
115
offsetof(CPUPPCState, fpr[i]), p);
116
p += (i < 10) ? 4 : 5;
117
cpu_reg_names_size -= (i < 10) ? 4 : 5;
119
snprintf(p, cpu_reg_names_size, "avr%dH", i);
120
#ifdef HOST_WORDS_BIGENDIAN
121
cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
122
offsetof(CPUPPCState, avr[i].u64[0]), p);
124
cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
125
offsetof(CPUPPCState, avr[i].u64[1]), p);
127
p += (i < 10) ? 6 : 7;
128
cpu_reg_names_size -= (i < 10) ? 6 : 7;
130
snprintf(p, cpu_reg_names_size, "avr%dL", i);
131
#ifdef HOST_WORDS_BIGENDIAN
132
cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
133
offsetof(CPUPPCState, avr[i].u64[1]), p);
135
cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
136
offsetof(CPUPPCState, avr[i].u64[0]), p);
138
p += (i < 10) ? 6 : 7;
139
cpu_reg_names_size -= (i < 10) ? 6 : 7;
142
cpu_nip = tcg_global_mem_new(TCG_AREG0,
143
offsetof(CPUPPCState, nip), "nip");
145
cpu_msr = tcg_global_mem_new(TCG_AREG0,
146
offsetof(CPUPPCState, msr), "msr");
148
cpu_ctr = tcg_global_mem_new(TCG_AREG0,
149
offsetof(CPUPPCState, ctr), "ctr");
151
cpu_lr = tcg_global_mem_new(TCG_AREG0,
152
offsetof(CPUPPCState, lr), "lr");
154
#if defined(TARGET_PPC64)
155
cpu_cfar = tcg_global_mem_new(TCG_AREG0,
156
offsetof(CPUPPCState, cfar), "cfar");
159
cpu_xer = tcg_global_mem_new(TCG_AREG0,
160
offsetof(CPUPPCState, xer), "xer");
161
cpu_so = tcg_global_mem_new(TCG_AREG0,
162
offsetof(CPUPPCState, so), "SO");
163
cpu_ov = tcg_global_mem_new(TCG_AREG0,
164
offsetof(CPUPPCState, ov), "OV");
165
cpu_ca = tcg_global_mem_new(TCG_AREG0,
166
offsetof(CPUPPCState, ca), "CA");
168
cpu_reserve = tcg_global_mem_new(TCG_AREG0,
169
offsetof(CPUPPCState, reserve_addr),
172
cpu_fpscr = tcg_global_mem_new(TCG_AREG0,
173
offsetof(CPUPPCState, fpscr), "fpscr");
175
cpu_access_type = tcg_global_mem_new_i32(TCG_AREG0,
176
offsetof(CPUPPCState, access_type), "access_type");
178
/* register helpers */
185
/* internal defines */
186
typedef struct DisasContext {
187
struct TranslationBlock *tb;
191
/* Routine used to access memory */
194
/* Translation flags */
196
#if defined(TARGET_PPC64)
203
ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
204
int singlestep_enabled;
205
uint64_t insns_flags;
206
uint64_t insns_flags2;
209
/* True when active word size < size of target_long. */
211
# define NARROW_MODE(C) (!(C)->sf_mode)
213
# define NARROW_MODE(C) 0
216
struct opc_handler_t {
217
/* invalid bits for instruction 1 (Rc(opcode) == 0) */
219
/* invalid bits for instruction 2 (Rc(opcode) == 1) */
221
/* instruction type */
223
/* extended instruction type */
226
void (*handler)(DisasContext *ctx);
227
#if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
230
#if defined(DO_PPC_STATISTICS)
235
static inline void gen_reset_fpstatus(void)
237
gen_helper_reset_fpstatus(cpu_env);
240
static inline void gen_compute_fprf(TCGv_i64 arg, int set_fprf, int set_rc)
242
TCGv_i32 t0 = tcg_temp_new_i32();
245
/* This case might be optimized later */
246
tcg_gen_movi_i32(t0, 1);
247
gen_helper_compute_fprf(t0, cpu_env, arg, t0);
248
if (unlikely(set_rc)) {
249
tcg_gen_mov_i32(cpu_crf[1], t0);
251
gen_helper_float_check_status(cpu_env);
252
} else if (unlikely(set_rc)) {
253
/* We always need to compute fpcc */
254
tcg_gen_movi_i32(t0, 0);
255
gen_helper_compute_fprf(t0, cpu_env, arg, t0);
256
tcg_gen_mov_i32(cpu_crf[1], t0);
259
tcg_temp_free_i32(t0);
262
static inline void gen_set_access_type(DisasContext *ctx, int access_type)
264
if (ctx->access_type != access_type) {
265
tcg_gen_movi_i32(cpu_access_type, access_type);
266
ctx->access_type = access_type;
270
static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
272
if (NARROW_MODE(ctx)) {
275
tcg_gen_movi_tl(cpu_nip, nip);
278
static inline void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
281
if (ctx->exception == POWERPC_EXCP_NONE) {
282
gen_update_nip(ctx, ctx->nip);
284
t0 = tcg_const_i32(excp);
285
t1 = tcg_const_i32(error);
286
gen_helper_raise_exception_err(cpu_env, t0, t1);
287
tcg_temp_free_i32(t0);
288
tcg_temp_free_i32(t1);
289
ctx->exception = (excp);
292
static inline void gen_exception(DisasContext *ctx, uint32_t excp)
295
if (ctx->exception == POWERPC_EXCP_NONE) {
296
gen_update_nip(ctx, ctx->nip);
298
t0 = tcg_const_i32(excp);
299
gen_helper_raise_exception(cpu_env, t0);
300
tcg_temp_free_i32(t0);
301
ctx->exception = (excp);
304
static inline void gen_debug_exception(DisasContext *ctx)
308
if ((ctx->exception != POWERPC_EXCP_BRANCH) &&
309
(ctx->exception != POWERPC_EXCP_SYNC)) {
310
gen_update_nip(ctx, ctx->nip);
312
t0 = tcg_const_i32(EXCP_DEBUG);
313
gen_helper_raise_exception(cpu_env, t0);
314
tcg_temp_free_i32(t0);
317
static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
319
gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_INVAL | error);
322
/* Stop translation */
323
static inline void gen_stop_exception(DisasContext *ctx)
325
gen_update_nip(ctx, ctx->nip);
326
ctx->exception = POWERPC_EXCP_STOP;
329
/* No need to update nip here, as execution flow will change */
330
static inline void gen_sync_exception(DisasContext *ctx)
332
ctx->exception = POWERPC_EXCP_SYNC;
335
#define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
336
GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
338
#define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
339
GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
341
#define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
342
GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
344
#define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
345
GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
347
typedef struct opcode_t {
348
unsigned char opc1, opc2, opc3;
349
#if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
350
unsigned char pad[5];
352
unsigned char pad[1];
354
opc_handler_t handler;
358
/*****************************************************************************/
359
/*** Instruction decoding ***/
360
#define EXTRACT_HELPER(name, shift, nb) \
361
static inline uint32_t name(uint32_t opcode) \
363
return (opcode >> (shift)) & ((1 << (nb)) - 1); \
366
#define EXTRACT_SHELPER(name, shift, nb) \
367
static inline int32_t name(uint32_t opcode) \
369
return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
373
EXTRACT_HELPER(opc1, 26, 6);
375
EXTRACT_HELPER(opc2, 1, 5);
377
EXTRACT_HELPER(opc3, 6, 5);
378
/* Update Cr0 flags */
379
EXTRACT_HELPER(Rc, 0, 1);
381
EXTRACT_HELPER(rD, 21, 5);
383
EXTRACT_HELPER(rS, 21, 5);
385
EXTRACT_HELPER(rA, 16, 5);
387
EXTRACT_HELPER(rB, 11, 5);
389
EXTRACT_HELPER(rC, 6, 5);
391
EXTRACT_HELPER(crfD, 23, 3);
392
EXTRACT_HELPER(crfS, 18, 3);
393
EXTRACT_HELPER(crbD, 21, 5);
394
EXTRACT_HELPER(crbA, 16, 5);
395
EXTRACT_HELPER(crbB, 11, 5);
397
EXTRACT_HELPER(_SPR, 11, 10);
398
static inline uint32_t SPR(uint32_t opcode)
400
uint32_t sprn = _SPR(opcode);
402
return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
404
/*** Get constants ***/
405
EXTRACT_HELPER(IMM, 12, 8);
406
/* 16 bits signed immediate value */
407
EXTRACT_SHELPER(SIMM, 0, 16);
408
/* 16 bits unsigned immediate value */
409
EXTRACT_HELPER(UIMM, 0, 16);
410
/* 5 bits signed immediate value */
411
EXTRACT_HELPER(SIMM5, 16, 5);
412
/* 5 bits signed immediate value */
413
EXTRACT_HELPER(UIMM5, 16, 5);
415
EXTRACT_HELPER(NB, 11, 5);
417
EXTRACT_HELPER(SH, 11, 5);
418
/* Vector shift count */
419
EXTRACT_HELPER(VSH, 6, 4);
421
EXTRACT_HELPER(MB, 6, 5);
423
EXTRACT_HELPER(ME, 1, 5);
425
EXTRACT_HELPER(TO, 21, 5);
427
EXTRACT_HELPER(CRM, 12, 8);
428
EXTRACT_HELPER(SR, 16, 4);
431
EXTRACT_HELPER(FPBF, 19, 3);
432
EXTRACT_HELPER(FPIMM, 12, 4);
433
EXTRACT_HELPER(FPL, 21, 1);
434
EXTRACT_HELPER(FPFLM, 17, 8);
435
EXTRACT_HELPER(FPW, 16, 1);
437
/*** Jump target decoding ***/
439
EXTRACT_SHELPER(d, 0, 16);
440
/* Immediate address */
441
static inline target_ulong LI(uint32_t opcode)
443
return (opcode >> 0) & 0x03FFFFFC;
446
static inline uint32_t BD(uint32_t opcode)
448
return (opcode >> 0) & 0xFFFC;
451
EXTRACT_HELPER(BO, 21, 5);
452
EXTRACT_HELPER(BI, 16, 5);
453
/* Absolute/relative address */
454
EXTRACT_HELPER(AA, 1, 1);
456
EXTRACT_HELPER(LK, 0, 1);
458
/* Create a mask between <start> and <end> bits */
459
static inline target_ulong MASK(uint32_t start, uint32_t end)
463
#if defined(TARGET_PPC64)
464
if (likely(start == 0)) {
465
ret = UINT64_MAX << (63 - end);
466
} else if (likely(end == 63)) {
467
ret = UINT64_MAX >> start;
470
if (likely(start == 0)) {
471
ret = UINT32_MAX << (31 - end);
472
} else if (likely(end == 31)) {
473
ret = UINT32_MAX >> start;
477
ret = (((target_ulong)(-1ULL)) >> (start)) ^
478
(((target_ulong)(-1ULL) >> (end)) >> 1);
479
if (unlikely(start > end))
486
/*****************************************************************************/
487
/* PowerPC instructions table */
489
#if defined(DO_PPC_STATISTICS)
490
#define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
500
.handler = &gen_##name, \
501
.oname = stringify(name), \
503
.oname = stringify(name), \
505
#define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
516
.handler = &gen_##name, \
517
.oname = stringify(name), \
519
.oname = stringify(name), \
521
#define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
531
.handler = &gen_##name, \
537
#define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
547
.handler = &gen_##name, \
549
.oname = stringify(name), \
551
#define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
562
.handler = &gen_##name, \
564
.oname = stringify(name), \
566
#define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
576
.handler = &gen_##name, \
582
/* SPR load/store helpers */
583
static inline void gen_load_spr(TCGv t, int reg)
585
tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
588
static inline void gen_store_spr(int reg, TCGv t)
590
tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
593
/* Invalid instruction */
594
static void gen_invalid(DisasContext *ctx)
596
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
599
static opc_handler_t invalid_handler = {
600
.inval1 = 0xFFFFFFFF,
601
.inval2 = 0xFFFFFFFF,
604
.handler = gen_invalid,
607
/*** Integer comparison ***/
609
static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
611
TCGv t0 = tcg_temp_new();
612
TCGv_i32 t1 = tcg_temp_new_i32();
614
tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
616
tcg_gen_setcond_tl((s ? TCG_COND_LT: TCG_COND_LTU), t0, arg0, arg1);
617
tcg_gen_trunc_tl_i32(t1, t0);
618
tcg_gen_shli_i32(t1, t1, CRF_LT);
619
tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
621
tcg_gen_setcond_tl((s ? TCG_COND_GT: TCG_COND_GTU), t0, arg0, arg1);
622
tcg_gen_trunc_tl_i32(t1, t0);
623
tcg_gen_shli_i32(t1, t1, CRF_GT);
624
tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
626
tcg_gen_setcond_tl(TCG_COND_EQ, t0, arg0, arg1);
627
tcg_gen_trunc_tl_i32(t1, t0);
628
tcg_gen_shli_i32(t1, t1, CRF_EQ);
629
tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
632
tcg_temp_free_i32(t1);
635
static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
637
TCGv t0 = tcg_const_tl(arg1);
638
gen_op_cmp(arg0, t0, s, crf);
642
static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
648
tcg_gen_ext32s_tl(t0, arg0);
649
tcg_gen_ext32s_tl(t1, arg1);
651
tcg_gen_ext32u_tl(t0, arg0);
652
tcg_gen_ext32u_tl(t1, arg1);
654
gen_op_cmp(t0, t1, s, crf);
659
static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
661
TCGv t0 = tcg_const_tl(arg1);
662
gen_op_cmp32(arg0, t0, s, crf);
666
static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
668
if (NARROW_MODE(ctx)) {
669
gen_op_cmpi32(reg, 0, 1, 0);
671
gen_op_cmpi(reg, 0, 1, 0);
676
static void gen_cmp(DisasContext *ctx)
678
if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
679
gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
680
1, crfD(ctx->opcode));
682
gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
683
1, crfD(ctx->opcode));
688
static void gen_cmpi(DisasContext *ctx)
690
if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
691
gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
692
1, crfD(ctx->opcode));
694
gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
695
1, crfD(ctx->opcode));
700
static void gen_cmpl(DisasContext *ctx)
702
if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
703
gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
704
0, crfD(ctx->opcode));
706
gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
707
0, crfD(ctx->opcode));
712
static void gen_cmpli(DisasContext *ctx)
714
if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
715
gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
716
0, crfD(ctx->opcode));
718
gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
719
0, crfD(ctx->opcode));
723
/* isel (PowerPC 2.03 specification) */
724
static void gen_isel(DisasContext *ctx)
727
uint32_t bi = rC(ctx->opcode);
731
l1 = gen_new_label();
732
l2 = gen_new_label();
734
mask = 1 << (3 - (bi & 0x03));
735
t0 = tcg_temp_new_i32();
736
tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask);
737
tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
738
if (rA(ctx->opcode) == 0)
739
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
741
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
744
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
746
tcg_temp_free_i32(t0);
749
/* cmpb: PowerPC 2.05 specification */
750
static void gen_cmpb(DisasContext *ctx)
752
gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
753
cpu_gpr[rB(ctx->opcode)]);
756
/*** Integer arithmetic ***/
758
static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
759
TCGv arg1, TCGv arg2, int sub)
761
TCGv t0 = tcg_temp_new();
763
tcg_gen_xor_tl(cpu_ov, arg0, arg2);
764
tcg_gen_xor_tl(t0, arg1, arg2);
766
tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
768
tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
771
if (NARROW_MODE(ctx)) {
772
tcg_gen_ext32s_tl(cpu_ov, cpu_ov);
774
tcg_gen_shri_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1);
775
tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
778
/* Common add function */
779
static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
780
TCGv arg2, bool add_ca, bool compute_ca,
781
bool compute_ov, bool compute_rc0)
785
if (compute_ca || compute_ov) {
790
if (NARROW_MODE(ctx)) {
791
/* Caution: a non-obvious corner case of the spec is that we
792
must produce the *entire* 64-bit addition, but produce the
793
carry into bit 32. */
794
TCGv t1 = tcg_temp_new();
795
tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */
796
tcg_gen_add_tl(t0, arg1, arg2);
798
tcg_gen_add_tl(t0, t0, cpu_ca);
800
tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changed w/ carry */
802
tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */
803
tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
805
TCGv zero = tcg_const_tl(0);
807
tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, cpu_ca, zero);
808
tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, arg2, zero);
810
tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, arg2, zero);
815
tcg_gen_add_tl(t0, arg1, arg2);
817
tcg_gen_add_tl(t0, t0, cpu_ca);
822
gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
824
if (unlikely(compute_rc0)) {
825
gen_set_Rc0(ctx, t0);
828
if (!TCGV_EQUAL(t0, ret)) {
829
tcg_gen_mov_tl(ret, t0);
833
/* Add functions with two operands */
834
#define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
835
static void glue(gen_, name)(DisasContext *ctx) \
837
gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
838
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
839
add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
841
/* Add functions with one operand and one immediate */
842
#define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
843
add_ca, compute_ca, compute_ov) \
844
static void glue(gen_, name)(DisasContext *ctx) \
846
TCGv t0 = tcg_const_tl(const_val); \
847
gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
848
cpu_gpr[rA(ctx->opcode)], t0, \
849
add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
853
/* add add. addo addo. */
854
GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
855
GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
856
/* addc addc. addco addco. */
857
GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
858
GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
859
/* adde adde. addeo addeo. */
860
GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
861
GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
862
/* addme addme. addmeo addmeo. */
863
GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
864
GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
865
/* addze addze. addzeo addzeo.*/
866
GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
867
GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
869
static void gen_addi(DisasContext *ctx)
871
target_long simm = SIMM(ctx->opcode);
873
if (rA(ctx->opcode) == 0) {
875
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
877
tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
878
cpu_gpr[rA(ctx->opcode)], simm);
882
static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
884
TCGv c = tcg_const_tl(SIMM(ctx->opcode));
885
gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
886
c, 0, 1, 0, compute_rc0);
890
static void gen_addic(DisasContext *ctx)
892
gen_op_addic(ctx, 0);
895
static void gen_addic_(DisasContext *ctx)
897
gen_op_addic(ctx, 1);
901
static void gen_addis(DisasContext *ctx)
903
target_long simm = SIMM(ctx->opcode);
905
if (rA(ctx->opcode) == 0) {
907
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
909
tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
910
cpu_gpr[rA(ctx->opcode)], simm << 16);
914
static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
915
TCGv arg2, int sign, int compute_ov)
917
int l1 = gen_new_label();
918
int l2 = gen_new_label();
919
TCGv_i32 t0 = tcg_temp_local_new_i32();
920
TCGv_i32 t1 = tcg_temp_local_new_i32();
922
tcg_gen_trunc_tl_i32(t0, arg1);
923
tcg_gen_trunc_tl_i32(t1, arg2);
924
tcg_gen_brcondi_i32(TCG_COND_EQ, t1, 0, l1);
926
int l3 = gen_new_label();
927
tcg_gen_brcondi_i32(TCG_COND_NE, t1, -1, l3);
928
tcg_gen_brcondi_i32(TCG_COND_EQ, t0, INT32_MIN, l1);
930
tcg_gen_div_i32(t0, t0, t1);
932
tcg_gen_divu_i32(t0, t0, t1);
935
tcg_gen_movi_tl(cpu_ov, 0);
940
tcg_gen_sari_i32(t0, t0, 31);
942
tcg_gen_movi_i32(t0, 0);
945
tcg_gen_movi_tl(cpu_ov, 1);
946
tcg_gen_movi_tl(cpu_so, 1);
949
tcg_gen_extu_i32_tl(ret, t0);
950
tcg_temp_free_i32(t0);
951
tcg_temp_free_i32(t1);
952
if (unlikely(Rc(ctx->opcode) != 0))
953
gen_set_Rc0(ctx, ret);
956
#define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
957
static void glue(gen_, name)(DisasContext *ctx) \
959
gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
960
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
963
/* divwu divwu. divwuo divwuo. */
964
GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
965
GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
966
/* divw divw. divwo divwo. */
967
GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
968
GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
969
#if defined(TARGET_PPC64)
970
static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
971
TCGv arg2, int sign, int compute_ov)
973
int l1 = gen_new_label();
974
int l2 = gen_new_label();
976
tcg_gen_brcondi_i64(TCG_COND_EQ, arg2, 0, l1);
978
int l3 = gen_new_label();
979
tcg_gen_brcondi_i64(TCG_COND_NE, arg2, -1, l3);
980
tcg_gen_brcondi_i64(TCG_COND_EQ, arg1, INT64_MIN, l1);
982
tcg_gen_div_i64(ret, arg1, arg2);
984
tcg_gen_divu_i64(ret, arg1, arg2);
987
tcg_gen_movi_tl(cpu_ov, 0);
992
tcg_gen_sari_i64(ret, arg1, 63);
994
tcg_gen_movi_i64(ret, 0);
997
tcg_gen_movi_tl(cpu_ov, 1);
998
tcg_gen_movi_tl(cpu_so, 1);
1001
if (unlikely(Rc(ctx->opcode) != 0))
1002
gen_set_Rc0(ctx, ret);
1004
#define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1005
static void glue(gen_, name)(DisasContext *ctx) \
1007
gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1008
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1009
sign, compute_ov); \
1011
/* divwu divwu. divwuo divwuo. */
1012
GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1013
GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1014
/* divw divw. divwo divwo. */
1015
GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1016
GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1020
static void gen_mulhw(DisasContext *ctx)
1022
TCGv_i32 t0 = tcg_temp_new_i32();
1023
TCGv_i32 t1 = tcg_temp_new_i32();
1025
tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1026
tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1027
tcg_gen_muls2_i32(t0, t1, t0, t1);
1028
tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1029
tcg_temp_free_i32(t0);
1030
tcg_temp_free_i32(t1);
1031
if (unlikely(Rc(ctx->opcode) != 0))
1032
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1035
/* mulhwu mulhwu. */
1036
static void gen_mulhwu(DisasContext *ctx)
1038
TCGv_i32 t0 = tcg_temp_new_i32();
1039
TCGv_i32 t1 = tcg_temp_new_i32();
1041
tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1042
tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1043
tcg_gen_mulu2_i32(t0, t1, t0, t1);
1044
tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1045
tcg_temp_free_i32(t0);
1046
tcg_temp_free_i32(t1);
1047
if (unlikely(Rc(ctx->opcode) != 0))
1048
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1052
static void gen_mullw(DisasContext *ctx)
1054
tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1055
cpu_gpr[rB(ctx->opcode)]);
1056
tcg_gen_ext32s_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)]);
1057
if (unlikely(Rc(ctx->opcode) != 0))
1058
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1061
/* mullwo mullwo. */
1062
static void gen_mullwo(DisasContext *ctx)
1064
TCGv_i32 t0 = tcg_temp_new_i32();
1065
TCGv_i32 t1 = tcg_temp_new_i32();
1067
tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1068
tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1069
tcg_gen_muls2_i32(t0, t1, t0, t1);
1070
tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
1072
tcg_gen_sari_i32(t0, t0, 31);
1073
tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
1074
tcg_gen_extu_i32_tl(cpu_ov, t0);
1075
tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1077
tcg_temp_free_i32(t0);
1078
tcg_temp_free_i32(t1);
1079
if (unlikely(Rc(ctx->opcode) != 0))
1080
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1084
static void gen_mulli(DisasContext *ctx)
1086
tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1090
#if defined(TARGET_PPC64)
1092
static void gen_mulhd(DisasContext *ctx)
1094
TCGv lo = tcg_temp_new();
1095
tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
1096
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1098
if (unlikely(Rc(ctx->opcode) != 0)) {
1099
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1103
/* mulhdu mulhdu. */
1104
static void gen_mulhdu(DisasContext *ctx)
1106
TCGv lo = tcg_temp_new();
1107
tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
1108
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1110
if (unlikely(Rc(ctx->opcode) != 0)) {
1111
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1116
static void gen_mulld(DisasContext *ctx)
1118
tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1119
cpu_gpr[rB(ctx->opcode)]);
1120
if (unlikely(Rc(ctx->opcode) != 0))
1121
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1124
/* mulldo mulldo. */
1125
static void gen_mulldo(DisasContext *ctx)
1127
gen_helper_mulldo(cpu_gpr[rD(ctx->opcode)], cpu_env,
1128
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1129
if (unlikely(Rc(ctx->opcode) != 0)) {
1130
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1135
/* Common subf function */
1136
static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1137
TCGv arg2, bool add_ca, bool compute_ca,
1138
bool compute_ov, bool compute_rc0)
1142
if (compute_ca || compute_ov) {
1143
t0 = tcg_temp_new();
1147
/* dest = ~arg1 + arg2 [+ ca]. */
1148
if (NARROW_MODE(ctx)) {
1149
/* Caution: a non-obvious corner case of the spec is that we
1150
must produce the *entire* 64-bit addition, but produce the
1151
carry into bit 32. */
1152
TCGv inv1 = tcg_temp_new();
1153
TCGv t1 = tcg_temp_new();
1154
tcg_gen_not_tl(inv1, arg1);
1156
tcg_gen_add_tl(t0, arg2, cpu_ca);
1158
tcg_gen_addi_tl(t0, arg2, 1);
1160
tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */
1161
tcg_gen_add_tl(t0, t0, inv1);
1162
tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */
1164
tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */
1165
tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
1166
} else if (add_ca) {
1167
TCGv zero, inv1 = tcg_temp_new();
1168
tcg_gen_not_tl(inv1, arg1);
1169
zero = tcg_const_tl(0);
1170
tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
1171
tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
1172
tcg_temp_free(zero);
1173
tcg_temp_free(inv1);
1175
tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
1176
tcg_gen_sub_tl(t0, arg2, arg1);
1178
} else if (add_ca) {
1179
/* Since we're ignoring carry-out, we can simplify the
1180
standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */
1181
tcg_gen_sub_tl(t0, arg2, arg1);
1182
tcg_gen_add_tl(t0, t0, cpu_ca);
1183
tcg_gen_subi_tl(t0, t0, 1);
1185
tcg_gen_sub_tl(t0, arg2, arg1);
1189
gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1191
if (unlikely(compute_rc0)) {
1192
gen_set_Rc0(ctx, t0);
1195
if (!TCGV_EQUAL(t0, ret)) {
1196
tcg_gen_mov_tl(ret, t0);
1200
/* Sub functions with Two operands functions */
1201
#define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1202
static void glue(gen_, name)(DisasContext *ctx) \
1204
gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1205
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1206
add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1208
/* Sub functions with one operand and one immediate */
1209
#define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1210
add_ca, compute_ca, compute_ov) \
1211
static void glue(gen_, name)(DisasContext *ctx) \
1213
TCGv t0 = tcg_const_tl(const_val); \
1214
gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1215
cpu_gpr[rA(ctx->opcode)], t0, \
1216
add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1217
tcg_temp_free(t0); \
1219
/* subf subf. subfo subfo. */
1220
GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
1221
GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
1222
/* subfc subfc. subfco subfco. */
1223
GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
1224
GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
1225
/* subfe subfe. subfeo subfo. */
1226
GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
1227
GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
1228
/* subfme subfme. subfmeo subfmeo. */
1229
GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
1230
GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
1231
/* subfze subfze. subfzeo subfzeo.*/
1232
GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
1233
GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
1236
static void gen_subfic(DisasContext *ctx)
1238
TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1239
gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1244
/* neg neg. nego nego. */
1245
static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
1247
TCGv zero = tcg_const_tl(0);
1248
gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1249
zero, 0, 0, compute_ov, Rc(ctx->opcode));
1250
tcg_temp_free(zero);
1253
static void gen_neg(DisasContext *ctx)
1255
gen_op_arith_neg(ctx, 0);
1258
static void gen_nego(DisasContext *ctx)
1260
gen_op_arith_neg(ctx, 1);
1263
/*** Integer logical ***/
1264
#define GEN_LOGICAL2(name, tcg_op, opc, type) \
1265
static void glue(gen_, name)(DisasContext *ctx) \
1267
tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1268
cpu_gpr[rB(ctx->opcode)]); \
1269
if (unlikely(Rc(ctx->opcode) != 0)) \
1270
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1273
#define GEN_LOGICAL1(name, tcg_op, opc, type) \
1274
static void glue(gen_, name)(DisasContext *ctx) \
1276
tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1277
if (unlikely(Rc(ctx->opcode) != 0)) \
1278
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1282
GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
1284
GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
1287
static void gen_andi_(DisasContext *ctx)
1289
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode));
1290
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1294
static void gen_andis_(DisasContext *ctx)
1296
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16);
1297
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1301
static void gen_cntlzw(DisasContext *ctx)
1303
gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1304
if (unlikely(Rc(ctx->opcode) != 0))
1305
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1308
GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
1309
/* extsb & extsb. */
1310
GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
1311
/* extsh & extsh. */
1312
GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
1314
GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
1316
GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
1319
static void gen_or(DisasContext *ctx)
1323
rs = rS(ctx->opcode);
1324
ra = rA(ctx->opcode);
1325
rb = rB(ctx->opcode);
1326
/* Optimisation for mr. ri case */
1327
if (rs != ra || rs != rb) {
1329
tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
1331
tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
1332
if (unlikely(Rc(ctx->opcode) != 0))
1333
gen_set_Rc0(ctx, cpu_gpr[ra]);
1334
} else if (unlikely(Rc(ctx->opcode) != 0)) {
1335
gen_set_Rc0(ctx, cpu_gpr[rs]);
1336
#if defined(TARGET_PPC64)
1342
/* Set process priority to low */
1346
/* Set process priority to medium-low */
1350
/* Set process priority to normal */
1353
#if !defined(CONFIG_USER_ONLY)
1355
if (ctx->mem_idx > 0) {
1356
/* Set process priority to very low */
1361
if (ctx->mem_idx > 0) {
1362
/* Set process priority to medium-hight */
1367
if (ctx->mem_idx > 0) {
1368
/* Set process priority to high */
1373
if (ctx->mem_idx > 1) {
1374
/* Set process priority to very high */
1384
TCGv t0 = tcg_temp_new();
1385
gen_load_spr(t0, SPR_PPR);
1386
tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
1387
tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
1388
gen_store_spr(SPR_PPR, t0);
1395
GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
1398
static void gen_xor(DisasContext *ctx)
1400
/* Optimisation for "set to zero" case */
1401
if (rS(ctx->opcode) != rB(ctx->opcode))
1402
tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1404
tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1405
if (unlikely(Rc(ctx->opcode) != 0))
1406
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1410
static void gen_ori(DisasContext *ctx)
1412
target_ulong uimm = UIMM(ctx->opcode);
1414
if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1416
/* XXX: should handle special NOPs for POWER series */
1419
tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1423
static void gen_oris(DisasContext *ctx)
1425
target_ulong uimm = UIMM(ctx->opcode);
1427
if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1431
tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1435
static void gen_xori(DisasContext *ctx)
1437
target_ulong uimm = UIMM(ctx->opcode);
1439
if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1443
tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1447
static void gen_xoris(DisasContext *ctx)
1449
target_ulong uimm = UIMM(ctx->opcode);
1451
if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1455
tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1458
/* popcntb : PowerPC 2.03 specification */
1459
static void gen_popcntb(DisasContext *ctx)
1461
gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1464
static void gen_popcntw(DisasContext *ctx)
1466
gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1469
#if defined(TARGET_PPC64)
1470
/* popcntd: PowerPC 2.06 specification */
1471
static void gen_popcntd(DisasContext *ctx)
1473
gen_helper_popcntd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1477
/* prtyw: PowerPC 2.05 specification */
1478
static void gen_prtyw(DisasContext *ctx)
1480
TCGv ra = cpu_gpr[rA(ctx->opcode)];
1481
TCGv rs = cpu_gpr[rS(ctx->opcode)];
1482
TCGv t0 = tcg_temp_new();
1483
tcg_gen_shri_tl(t0, rs, 16);
1484
tcg_gen_xor_tl(ra, rs, t0);
1485
tcg_gen_shri_tl(t0, ra, 8);
1486
tcg_gen_xor_tl(ra, ra, t0);
1487
tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
1491
#if defined(TARGET_PPC64)
1492
/* prtyd: PowerPC 2.05 specification */
1493
static void gen_prtyd(DisasContext *ctx)
1495
TCGv ra = cpu_gpr[rA(ctx->opcode)];
1496
TCGv rs = cpu_gpr[rS(ctx->opcode)];
1497
TCGv t0 = tcg_temp_new();
1498
tcg_gen_shri_tl(t0, rs, 32);
1499
tcg_gen_xor_tl(ra, rs, t0);
1500
tcg_gen_shri_tl(t0, ra, 16);
1501
tcg_gen_xor_tl(ra, ra, t0);
1502
tcg_gen_shri_tl(t0, ra, 8);
1503
tcg_gen_xor_tl(ra, ra, t0);
1504
tcg_gen_andi_tl(ra, ra, 1);
1509
#if defined(TARGET_PPC64)
1510
/* extsw & extsw. */
1511
GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
1514
static void gen_cntlzd(DisasContext *ctx)
1516
gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1517
if (unlikely(Rc(ctx->opcode) != 0))
1518
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1522
/*** Integer rotate ***/
1524
/* rlwimi & rlwimi. */
1525
static void gen_rlwimi(DisasContext *ctx)
1527
uint32_t mb, me, sh;
1529
mb = MB(ctx->opcode);
1530
me = ME(ctx->opcode);
1531
sh = SH(ctx->opcode);
1532
if (likely(sh == 0 && mb == 0 && me == 31)) {
1533
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1537
TCGv t0 = tcg_temp_new();
1538
#if defined(TARGET_PPC64)
1539
TCGv_i32 t2 = tcg_temp_new_i32();
1540
tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]);
1541
tcg_gen_rotli_i32(t2, t2, sh);
1542
tcg_gen_extu_i32_i64(t0, t2);
1543
tcg_temp_free_i32(t2);
1545
tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1547
#if defined(TARGET_PPC64)
1551
mask = MASK(mb, me);
1552
t1 = tcg_temp_new();
1553
tcg_gen_andi_tl(t0, t0, mask);
1554
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1555
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1559
if (unlikely(Rc(ctx->opcode) != 0))
1560
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1563
/* rlwinm & rlwinm. */
1564
static void gen_rlwinm(DisasContext *ctx)
1566
uint32_t mb, me, sh;
1568
sh = SH(ctx->opcode);
1569
mb = MB(ctx->opcode);
1570
me = ME(ctx->opcode);
1572
if (likely(mb == 0 && me == (31 - sh))) {
1573
if (likely(sh == 0)) {
1574
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1576
TCGv t0 = tcg_temp_new();
1577
tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1578
tcg_gen_shli_tl(t0, t0, sh);
1579
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1582
} else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) {
1583
TCGv t0 = tcg_temp_new();
1584
tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1585
tcg_gen_shri_tl(t0, t0, mb);
1586
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1589
TCGv t0 = tcg_temp_new();
1590
#if defined(TARGET_PPC64)
1591
TCGv_i32 t1 = tcg_temp_new_i32();
1592
tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1593
tcg_gen_rotli_i32(t1, t1, sh);
1594
tcg_gen_extu_i32_i64(t0, t1);
1595
tcg_temp_free_i32(t1);
1597
tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1599
#if defined(TARGET_PPC64)
1603
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1606
if (unlikely(Rc(ctx->opcode) != 0))
1607
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1610
/* rlwnm & rlwnm. */
1611
static void gen_rlwnm(DisasContext *ctx)
1615
#if defined(TARGET_PPC64)
1619
mb = MB(ctx->opcode);
1620
me = ME(ctx->opcode);
1621
t0 = tcg_temp_new();
1622
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f);
1623
#if defined(TARGET_PPC64)
1624
t1 = tcg_temp_new_i32();
1625
t2 = tcg_temp_new_i32();
1626
tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1627
tcg_gen_trunc_i64_i32(t2, t0);
1628
tcg_gen_rotl_i32(t1, t1, t2);
1629
tcg_gen_extu_i32_i64(t0, t1);
1630
tcg_temp_free_i32(t1);
1631
tcg_temp_free_i32(t2);
1633
tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0);
1635
if (unlikely(mb != 0 || me != 31)) {
1636
#if defined(TARGET_PPC64)
1640
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1642
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1645
if (unlikely(Rc(ctx->opcode) != 0))
1646
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1649
#if defined(TARGET_PPC64)
1650
#define GEN_PPC64_R2(name, opc1, opc2) \
1651
static void glue(gen_, name##0)(DisasContext *ctx) \
1653
gen_##name(ctx, 0); \
1656
static void glue(gen_, name##1)(DisasContext *ctx) \
1658
gen_##name(ctx, 1); \
1660
#define GEN_PPC64_R4(name, opc1, opc2) \
1661
static void glue(gen_, name##0)(DisasContext *ctx) \
1663
gen_##name(ctx, 0, 0); \
1666
static void glue(gen_, name##1)(DisasContext *ctx) \
1668
gen_##name(ctx, 0, 1); \
1671
static void glue(gen_, name##2)(DisasContext *ctx) \
1673
gen_##name(ctx, 1, 0); \
1676
static void glue(gen_, name##3)(DisasContext *ctx) \
1678
gen_##name(ctx, 1, 1); \
1681
static inline void gen_rldinm(DisasContext *ctx, uint32_t mb, uint32_t me,
1684
if (likely(sh != 0 && mb == 0 && me == (63 - sh))) {
1685
tcg_gen_shli_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1686
} else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) {
1687
tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb);
1689
TCGv t0 = tcg_temp_new();
1690
tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1691
if (likely(mb == 0 && me == 63)) {
1692
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1694
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1698
if (unlikely(Rc(ctx->opcode) != 0))
1699
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1701
/* rldicl - rldicl. */
1702
static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
1706
sh = SH(ctx->opcode) | (shn << 5);
1707
mb = MB(ctx->opcode) | (mbn << 5);
1708
gen_rldinm(ctx, mb, 63, sh);
1710
GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1711
/* rldicr - rldicr. */
1712
static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
1716
sh = SH(ctx->opcode) | (shn << 5);
1717
me = MB(ctx->opcode) | (men << 5);
1718
gen_rldinm(ctx, 0, me, sh);
1720
GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1721
/* rldic - rldic. */
1722
static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
1726
sh = SH(ctx->opcode) | (shn << 5);
1727
mb = MB(ctx->opcode) | (mbn << 5);
1728
gen_rldinm(ctx, mb, 63 - sh, sh);
1730
GEN_PPC64_R4(rldic, 0x1E, 0x04);
1732
static inline void gen_rldnm(DisasContext *ctx, uint32_t mb, uint32_t me)
1736
t0 = tcg_temp_new();
1737
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1738
tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1739
if (unlikely(mb != 0 || me != 63)) {
1740
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1742
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1745
if (unlikely(Rc(ctx->opcode) != 0))
1746
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1749
/* rldcl - rldcl. */
1750
static inline void gen_rldcl(DisasContext *ctx, int mbn)
1754
mb = MB(ctx->opcode) | (mbn << 5);
1755
gen_rldnm(ctx, mb, 63);
1757
GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1758
/* rldcr - rldcr. */
1759
static inline void gen_rldcr(DisasContext *ctx, int men)
1763
me = MB(ctx->opcode) | (men << 5);
1764
gen_rldnm(ctx, 0, me);
1766
GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1767
/* rldimi - rldimi. */
1768
static inline void gen_rldimi(DisasContext *ctx, int mbn, int shn)
1770
uint32_t sh, mb, me;
1772
sh = SH(ctx->opcode) | (shn << 5);
1773
mb = MB(ctx->opcode) | (mbn << 5);
1775
if (unlikely(sh == 0 && mb == 0)) {
1776
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1781
t0 = tcg_temp_new();
1782
tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1783
t1 = tcg_temp_new();
1784
mask = MASK(mb, me);
1785
tcg_gen_andi_tl(t0, t0, mask);
1786
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1787
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1791
if (unlikely(Rc(ctx->opcode) != 0))
1792
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1794
GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1797
/*** Integer shift ***/
1800
static void gen_slw(DisasContext *ctx)
1804
t0 = tcg_temp_new();
1805
/* AND rS with a mask that is 0 when rB >= 0x20 */
1806
#if defined(TARGET_PPC64)
1807
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1808
tcg_gen_sari_tl(t0, t0, 0x3f);
1810
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1811
tcg_gen_sari_tl(t0, t0, 0x1f);
1813
tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1814
t1 = tcg_temp_new();
1815
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1816
tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1819
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
1820
if (unlikely(Rc(ctx->opcode) != 0))
1821
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1825
static void gen_sraw(DisasContext *ctx)
1827
gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
1828
cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1829
if (unlikely(Rc(ctx->opcode) != 0))
1830
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1833
/* srawi & srawi. */
1834
static void gen_srawi(DisasContext *ctx)
1836
int sh = SH(ctx->opcode);
1837
TCGv dst = cpu_gpr[rA(ctx->opcode)];
1838
TCGv src = cpu_gpr[rS(ctx->opcode)];
1840
tcg_gen_mov_tl(dst, src);
1841
tcg_gen_movi_tl(cpu_ca, 0);
1844
tcg_gen_ext32s_tl(dst, src);
1845
tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
1846
t0 = tcg_temp_new();
1847
tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
1848
tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
1850
tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
1851
tcg_gen_sari_tl(dst, dst, sh);
1853
if (unlikely(Rc(ctx->opcode) != 0)) {
1854
gen_set_Rc0(ctx, dst);
1859
static void gen_srw(DisasContext *ctx)
1863
t0 = tcg_temp_new();
1864
/* AND rS with a mask that is 0 when rB >= 0x20 */
1865
#if defined(TARGET_PPC64)
1866
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1867
tcg_gen_sari_tl(t0, t0, 0x3f);
1869
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1870
tcg_gen_sari_tl(t0, t0, 0x1f);
1872
tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1873
tcg_gen_ext32u_tl(t0, t0);
1874
t1 = tcg_temp_new();
1875
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1876
tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1879
if (unlikely(Rc(ctx->opcode) != 0))
1880
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1883
#if defined(TARGET_PPC64)
1885
static void gen_sld(DisasContext *ctx)
1889
t0 = tcg_temp_new();
1890
/* AND rS with a mask that is 0 when rB >= 0x40 */
1891
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1892
tcg_gen_sari_tl(t0, t0, 0x3f);
1893
tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1894
t1 = tcg_temp_new();
1895
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1896
tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1899
if (unlikely(Rc(ctx->opcode) != 0))
1900
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1904
static void gen_srad(DisasContext *ctx)
1906
gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
1907
cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1908
if (unlikely(Rc(ctx->opcode) != 0))
1909
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1911
/* sradi & sradi. */
1912
static inline void gen_sradi(DisasContext *ctx, int n)
1914
int sh = SH(ctx->opcode) + (n << 5);
1915
TCGv dst = cpu_gpr[rA(ctx->opcode)];
1916
TCGv src = cpu_gpr[rS(ctx->opcode)];
1918
tcg_gen_mov_tl(dst, src);
1919
tcg_gen_movi_tl(cpu_ca, 0);
1922
tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
1923
t0 = tcg_temp_new();
1924
tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
1925
tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
1927
tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
1928
tcg_gen_sari_tl(dst, src, sh);
1930
if (unlikely(Rc(ctx->opcode) != 0)) {
1931
gen_set_Rc0(ctx, dst);
1935
static void gen_sradi0(DisasContext *ctx)
1940
static void gen_sradi1(DisasContext *ctx)
1946
static void gen_srd(DisasContext *ctx)
1950
t0 = tcg_temp_new();
1951
/* AND rS with a mask that is 0 when rB >= 0x40 */
1952
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1953
tcg_gen_sari_tl(t0, t0, 0x3f);
1954
tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1955
t1 = tcg_temp_new();
1956
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1957
tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1960
if (unlikely(Rc(ctx->opcode) != 0))
1961
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1965
/*** Floating-Point arithmetic ***/
1966
#define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
1967
static void gen_f##name(DisasContext *ctx) \
1969
if (unlikely(!ctx->fpu_enabled)) { \
1970
gen_exception(ctx, POWERPC_EXCP_FPU); \
1973
/* NIP cannot be restored if the memory exception comes from an helper */ \
1974
gen_update_nip(ctx, ctx->nip - 4); \
1975
gen_reset_fpstatus(); \
1976
gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
1977
cpu_fpr[rA(ctx->opcode)], \
1978
cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
1980
gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
1981
cpu_fpr[rD(ctx->opcode)]); \
1983
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], set_fprf, \
1984
Rc(ctx->opcode) != 0); \
1987
#define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
1988
_GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
1989
_GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
1991
#define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
1992
static void gen_f##name(DisasContext *ctx) \
1994
if (unlikely(!ctx->fpu_enabled)) { \
1995
gen_exception(ctx, POWERPC_EXCP_FPU); \
1998
/* NIP cannot be restored if the memory exception comes from an helper */ \
1999
gen_update_nip(ctx, ctx->nip - 4); \
2000
gen_reset_fpstatus(); \
2001
gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2002
cpu_fpr[rA(ctx->opcode)], \
2003
cpu_fpr[rB(ctx->opcode)]); \
2005
gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2006
cpu_fpr[rD(ctx->opcode)]); \
2008
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2009
set_fprf, Rc(ctx->opcode) != 0); \
2011
#define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
2012
_GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2013
_GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2015
#define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2016
static void gen_f##name(DisasContext *ctx) \
2018
if (unlikely(!ctx->fpu_enabled)) { \
2019
gen_exception(ctx, POWERPC_EXCP_FPU); \
2022
/* NIP cannot be restored if the memory exception comes from an helper */ \
2023
gen_update_nip(ctx, ctx->nip - 4); \
2024
gen_reset_fpstatus(); \
2025
gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2026
cpu_fpr[rA(ctx->opcode)], \
2027
cpu_fpr[rC(ctx->opcode)]); \
2029
gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2030
cpu_fpr[rD(ctx->opcode)]); \
2032
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2033
set_fprf, Rc(ctx->opcode) != 0); \
2035
#define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
2036
_GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2037
_GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2039
#define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
2040
static void gen_f##name(DisasContext *ctx) \
2042
if (unlikely(!ctx->fpu_enabled)) { \
2043
gen_exception(ctx, POWERPC_EXCP_FPU); \
2046
/* NIP cannot be restored if the memory exception comes from an helper */ \
2047
gen_update_nip(ctx, ctx->nip - 4); \
2048
gen_reset_fpstatus(); \
2049
gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2050
cpu_fpr[rB(ctx->opcode)]); \
2051
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2052
set_fprf, Rc(ctx->opcode) != 0); \
2055
#define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
2056
static void gen_f##name(DisasContext *ctx) \
2058
if (unlikely(!ctx->fpu_enabled)) { \
2059
gen_exception(ctx, POWERPC_EXCP_FPU); \
2062
/* NIP cannot be restored if the memory exception comes from an helper */ \
2063
gen_update_nip(ctx, ctx->nip - 4); \
2064
gen_reset_fpstatus(); \
2065
gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2066
cpu_fpr[rB(ctx->opcode)]); \
2067
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2068
set_fprf, Rc(ctx->opcode) != 0); \
2072
GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
2074
GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
2076
GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
2079
GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
2082
GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
2085
GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
2088
static void gen_frsqrtes(DisasContext *ctx)
2090
if (unlikely(!ctx->fpu_enabled)) {
2091
gen_exception(ctx, POWERPC_EXCP_FPU);
2094
/* NIP cannot be restored if the memory exception comes from an helper */
2095
gen_update_nip(ctx, ctx->nip - 4);
2096
gen_reset_fpstatus();
2097
gen_helper_frsqrte(cpu_fpr[rD(ctx->opcode)], cpu_env,
2098
cpu_fpr[rB(ctx->opcode)]);
2099
gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env,
2100
cpu_fpr[rD(ctx->opcode)]);
2101
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2105
_GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
2107
GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
2111
static void gen_fsqrt(DisasContext *ctx)
2113
if (unlikely(!ctx->fpu_enabled)) {
2114
gen_exception(ctx, POWERPC_EXCP_FPU);
2117
/* NIP cannot be restored if the memory exception comes from an helper */
2118
gen_update_nip(ctx, ctx->nip - 4);
2119
gen_reset_fpstatus();
2120
gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_env,
2121
cpu_fpr[rB(ctx->opcode)]);
2122
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2125
static void gen_fsqrts(DisasContext *ctx)
2127
if (unlikely(!ctx->fpu_enabled)) {
2128
gen_exception(ctx, POWERPC_EXCP_FPU);
2131
/* NIP cannot be restored if the memory exception comes from an helper */
2132
gen_update_nip(ctx, ctx->nip - 4);
2133
gen_reset_fpstatus();
2134
gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_env,
2135
cpu_fpr[rB(ctx->opcode)]);
2136
gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env,
2137
cpu_fpr[rD(ctx->opcode)]);
2138
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2141
/*** Floating-Point multiply-and-add ***/
2142
/* fmadd - fmadds */
2143
GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
2144
/* fmsub - fmsubs */
2145
GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
2146
/* fnmadd - fnmadds */
2147
GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
2148
/* fnmsub - fnmsubs */
2149
GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
2151
/*** Floating-Point round & convert ***/
2153
GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
2155
GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
2157
GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
2158
#if defined(TARGET_PPC64)
2160
GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
2162
GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
2164
GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
2168
GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
2170
GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
2172
GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
2174
GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
2176
/*** Floating-Point compare ***/
2179
static void gen_fcmpo(DisasContext *ctx)
2182
if (unlikely(!ctx->fpu_enabled)) {
2183
gen_exception(ctx, POWERPC_EXCP_FPU);
2186
/* NIP cannot be restored if the memory exception comes from an helper */
2187
gen_update_nip(ctx, ctx->nip - 4);
2188
gen_reset_fpstatus();
2189
crf = tcg_const_i32(crfD(ctx->opcode));
2190
gen_helper_fcmpo(cpu_env, cpu_fpr[rA(ctx->opcode)],
2191
cpu_fpr[rB(ctx->opcode)], crf);
2192
tcg_temp_free_i32(crf);
2193
gen_helper_float_check_status(cpu_env);
2197
static void gen_fcmpu(DisasContext *ctx)
2200
if (unlikely(!ctx->fpu_enabled)) {
2201
gen_exception(ctx, POWERPC_EXCP_FPU);
2204
/* NIP cannot be restored if the memory exception comes from an helper */
2205
gen_update_nip(ctx, ctx->nip - 4);
2206
gen_reset_fpstatus();
2207
crf = tcg_const_i32(crfD(ctx->opcode));
2208
gen_helper_fcmpu(cpu_env, cpu_fpr[rA(ctx->opcode)],
2209
cpu_fpr[rB(ctx->opcode)], crf);
2210
tcg_temp_free_i32(crf);
2211
gen_helper_float_check_status(cpu_env);
2214
/*** Floating-point move ***/
2216
/* XXX: beware that fabs never checks for NaNs nor update FPSCR */
2217
static void gen_fabs(DisasContext *ctx)
2219
if (unlikely(!ctx->fpu_enabled)) {
2220
gen_exception(ctx, POWERPC_EXCP_FPU);
2223
tcg_gen_andi_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)],
2225
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2229
/* XXX: beware that fmr never checks for NaNs nor update FPSCR */
2230
static void gen_fmr(DisasContext *ctx)
2232
if (unlikely(!ctx->fpu_enabled)) {
2233
gen_exception(ctx, POWERPC_EXCP_FPU);
2236
tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2237
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2241
/* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
2242
static void gen_fnabs(DisasContext *ctx)
2244
if (unlikely(!ctx->fpu_enabled)) {
2245
gen_exception(ctx, POWERPC_EXCP_FPU);
2248
tcg_gen_ori_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)],
2250
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2254
/* XXX: beware that fneg never checks for NaNs nor update FPSCR */
2255
static void gen_fneg(DisasContext *ctx)
2257
if (unlikely(!ctx->fpu_enabled)) {
2258
gen_exception(ctx, POWERPC_EXCP_FPU);
2261
tcg_gen_xori_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)],
2263
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2266
/* fcpsgn: PowerPC 2.05 specification */
2267
/* XXX: beware that fcpsgn never checks for NaNs nor update FPSCR */
2268
static void gen_fcpsgn(DisasContext *ctx)
2270
if (unlikely(!ctx->fpu_enabled)) {
2271
gen_exception(ctx, POWERPC_EXCP_FPU);
2274
tcg_gen_deposit_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)],
2275
cpu_fpr[rB(ctx->opcode)], 0, 63);
2276
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2279
/*** Floating-Point status & ctrl register ***/
2282
static void gen_mcrfs(DisasContext *ctx)
2284
TCGv tmp = tcg_temp_new();
2287
if (unlikely(!ctx->fpu_enabled)) {
2288
gen_exception(ctx, POWERPC_EXCP_FPU);
2291
bfa = 4 * (7 - crfS(ctx->opcode));
2292
tcg_gen_shri_tl(tmp, cpu_fpscr, bfa);
2293
tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], tmp);
2295
tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 0xf);
2296
tcg_gen_andi_tl(cpu_fpscr, cpu_fpscr, ~(0xF << bfa));
2300
static void gen_mffs(DisasContext *ctx)
2302
if (unlikely(!ctx->fpu_enabled)) {
2303
gen_exception(ctx, POWERPC_EXCP_FPU);
2306
gen_reset_fpstatus();
2307
tcg_gen_extu_tl_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpscr);
2308
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2312
static void gen_mtfsb0(DisasContext *ctx)
2316
if (unlikely(!ctx->fpu_enabled)) {
2317
gen_exception(ctx, POWERPC_EXCP_FPU);
2320
crb = 31 - crbD(ctx->opcode);
2321
gen_reset_fpstatus();
2322
if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) {
2324
/* NIP cannot be restored if the memory exception comes from an helper */
2325
gen_update_nip(ctx, ctx->nip - 4);
2326
t0 = tcg_const_i32(crb);
2327
gen_helper_fpscr_clrbit(cpu_env, t0);
2328
tcg_temp_free_i32(t0);
2330
if (unlikely(Rc(ctx->opcode) != 0)) {
2331
tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
2332
tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
2337
static void gen_mtfsb1(DisasContext *ctx)
2341
if (unlikely(!ctx->fpu_enabled)) {
2342
gen_exception(ctx, POWERPC_EXCP_FPU);
2345
crb = 31 - crbD(ctx->opcode);
2346
gen_reset_fpstatus();
2347
/* XXX: we pretend we can only do IEEE floating-point computations */
2348
if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
2350
/* NIP cannot be restored if the memory exception comes from an helper */
2351
gen_update_nip(ctx, ctx->nip - 4);
2352
t0 = tcg_const_i32(crb);
2353
gen_helper_fpscr_setbit(cpu_env, t0);
2354
tcg_temp_free_i32(t0);
2356
if (unlikely(Rc(ctx->opcode) != 0)) {
2357
tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
2358
tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
2360
/* We can raise a differed exception */
2361
gen_helper_float_check_status(cpu_env);
2365
static void gen_mtfsf(DisasContext *ctx)
2370
if (unlikely(!ctx->fpu_enabled)) {
2371
gen_exception(ctx, POWERPC_EXCP_FPU);
2374
flm = FPFLM(ctx->opcode);
2375
l = FPL(ctx->opcode);
2376
w = FPW(ctx->opcode);
2377
if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
2378
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2381
/* NIP cannot be restored if the memory exception comes from an helper */
2382
gen_update_nip(ctx, ctx->nip - 4);
2383
gen_reset_fpstatus();
2385
t0 = tcg_const_i32((ctx->insns_flags2 & PPC2_ISA205) ? 0xffff : 0xff);
2387
t0 = tcg_const_i32(flm << (w * 8));
2389
gen_helper_store_fpscr(cpu_env, cpu_fpr[rB(ctx->opcode)], t0);
2390
tcg_temp_free_i32(t0);
2391
if (unlikely(Rc(ctx->opcode) != 0)) {
2392
tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
2393
tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
2395
/* We can raise a differed exception */
2396
gen_helper_float_check_status(cpu_env);
2400
static void gen_mtfsfi(DisasContext *ctx)
2406
if (unlikely(!ctx->fpu_enabled)) {
2407
gen_exception(ctx, POWERPC_EXCP_FPU);
2410
w = FPW(ctx->opcode);
2411
bf = FPBF(ctx->opcode);
2412
if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
2413
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2416
sh = (8 * w) + 7 - bf;
2417
/* NIP cannot be restored if the memory exception comes from an helper */
2418
gen_update_nip(ctx, ctx->nip - 4);
2419
gen_reset_fpstatus();
2420
t0 = tcg_const_i64(((uint64_t)FPIMM(ctx->opcode)) << (4 * sh));
2421
t1 = tcg_const_i32(1 << sh);
2422
gen_helper_store_fpscr(cpu_env, t0, t1);
2423
tcg_temp_free_i64(t0);
2424
tcg_temp_free_i32(t1);
2425
if (unlikely(Rc(ctx->opcode) != 0)) {
2426
tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
2427
tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
2429
/* We can raise a differed exception */
2430
gen_helper_float_check_status(cpu_env);
2433
/*** Addressing modes ***/
2434
/* Register indirect with immediate index : EA = (rA|0) + SIMM */
2435
static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2438
target_long simm = SIMM(ctx->opcode);
2441
if (rA(ctx->opcode) == 0) {
2442
if (NARROW_MODE(ctx)) {
2443
simm = (uint32_t)simm;
2445
tcg_gen_movi_tl(EA, simm);
2446
} else if (likely(simm != 0)) {
2447
tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2448
if (NARROW_MODE(ctx)) {
2449
tcg_gen_ext32u_tl(EA, EA);
2452
if (NARROW_MODE(ctx)) {
2453
tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2455
tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2460
static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
2462
if (rA(ctx->opcode) == 0) {
2463
if (NARROW_MODE(ctx)) {
2464
tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2466
tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2469
tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2470
if (NARROW_MODE(ctx)) {
2471
tcg_gen_ext32u_tl(EA, EA);
2476
static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
2478
if (rA(ctx->opcode) == 0) {
2479
tcg_gen_movi_tl(EA, 0);
2480
} else if (NARROW_MODE(ctx)) {
2481
tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2483
tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2487
static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
2490
tcg_gen_addi_tl(ret, arg1, val);
2491
if (NARROW_MODE(ctx)) {
2492
tcg_gen_ext32u_tl(ret, ret);
2496
static inline void gen_check_align(DisasContext *ctx, TCGv EA, int mask)
2498
int l1 = gen_new_label();
2499
TCGv t0 = tcg_temp_new();
2501
/* NIP cannot be restored if the memory exception comes from an helper */
2502
gen_update_nip(ctx, ctx->nip - 4);
2503
tcg_gen_andi_tl(t0, EA, mask);
2504
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2505
t1 = tcg_const_i32(POWERPC_EXCP_ALIGN);
2506
t2 = tcg_const_i32(0);
2507
gen_helper_raise_exception_err(cpu_env, t1, t2);
2508
tcg_temp_free_i32(t1);
2509
tcg_temp_free_i32(t2);
2514
/*** Integer load ***/
2515
static inline void gen_qemu_ld8u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2517
tcg_gen_qemu_ld8u(arg1, arg2, ctx->mem_idx);
2520
static inline void gen_qemu_ld8s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2522
tcg_gen_qemu_ld8s(arg1, arg2, ctx->mem_idx);
2525
static inline void gen_qemu_ld16u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2527
tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2528
if (unlikely(ctx->le_mode)) {
2529
tcg_gen_bswap16_tl(arg1, arg1);
2533
static inline void gen_qemu_ld16s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2535
if (unlikely(ctx->le_mode)) {
2536
tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2537
tcg_gen_bswap16_tl(arg1, arg1);
2538
tcg_gen_ext16s_tl(arg1, arg1);
2540
tcg_gen_qemu_ld16s(arg1, arg2, ctx->mem_idx);
2544
static inline void gen_qemu_ld32u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2546
tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2547
if (unlikely(ctx->le_mode)) {
2548
tcg_gen_bswap32_tl(arg1, arg1);
2552
static inline void gen_qemu_ld32s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2554
if (unlikely(ctx->le_mode)) {
2555
tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2556
tcg_gen_bswap32_tl(arg1, arg1);
2557
tcg_gen_ext32s_tl(arg1, arg1);
2559
tcg_gen_qemu_ld32s(arg1, arg2, ctx->mem_idx);
2562
static inline void gen_qemu_ld64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2564
tcg_gen_qemu_ld64(arg1, arg2, ctx->mem_idx);
2565
if (unlikely(ctx->le_mode)) {
2566
tcg_gen_bswap64_i64(arg1, arg1);
2570
static inline void gen_qemu_st8(DisasContext *ctx, TCGv arg1, TCGv arg2)
2572
tcg_gen_qemu_st8(arg1, arg2, ctx->mem_idx);
2575
static inline void gen_qemu_st16(DisasContext *ctx, TCGv arg1, TCGv arg2)
2577
if (unlikely(ctx->le_mode)) {
2578
TCGv t0 = tcg_temp_new();
2579
tcg_gen_ext16u_tl(t0, arg1);
2580
tcg_gen_bswap16_tl(t0, t0);
2581
tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2584
tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2588
static inline void gen_qemu_st32(DisasContext *ctx, TCGv arg1, TCGv arg2)
2590
if (unlikely(ctx->le_mode)) {
2591
TCGv t0 = tcg_temp_new();
2592
tcg_gen_ext32u_tl(t0, arg1);
2593
tcg_gen_bswap32_tl(t0, t0);
2594
tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2597
tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2601
static inline void gen_qemu_st64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2603
if (unlikely(ctx->le_mode)) {
2604
TCGv_i64 t0 = tcg_temp_new_i64();
2605
tcg_gen_bswap64_i64(t0, arg1);
2606
tcg_gen_qemu_st64(t0, arg2, ctx->mem_idx);
2607
tcg_temp_free_i64(t0);
2609
tcg_gen_qemu_st64(arg1, arg2, ctx->mem_idx);
2612
#define GEN_LD(name, ldop, opc, type) \
2613
static void glue(gen_, name)(DisasContext *ctx) \
2616
gen_set_access_type(ctx, ACCESS_INT); \
2617
EA = tcg_temp_new(); \
2618
gen_addr_imm_index(ctx, EA, 0); \
2619
gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2620
tcg_temp_free(EA); \
2623
#define GEN_LDU(name, ldop, opc, type) \
2624
static void glue(gen_, name##u)(DisasContext *ctx) \
2627
if (unlikely(rA(ctx->opcode) == 0 || \
2628
rA(ctx->opcode) == rD(ctx->opcode))) { \
2629
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2632
gen_set_access_type(ctx, ACCESS_INT); \
2633
EA = tcg_temp_new(); \
2634
if (type == PPC_64B) \
2635
gen_addr_imm_index(ctx, EA, 0x03); \
2637
gen_addr_imm_index(ctx, EA, 0); \
2638
gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2639
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2640
tcg_temp_free(EA); \
2643
#define GEN_LDUX(name, ldop, opc2, opc3, type) \
2644
static void glue(gen_, name##ux)(DisasContext *ctx) \
2647
if (unlikely(rA(ctx->opcode) == 0 || \
2648
rA(ctx->opcode) == rD(ctx->opcode))) { \
2649
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2652
gen_set_access_type(ctx, ACCESS_INT); \
2653
EA = tcg_temp_new(); \
2654
gen_addr_reg_index(ctx, EA); \
2655
gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2656
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2657
tcg_temp_free(EA); \
2660
#define GEN_LDX_E(name, ldop, opc2, opc3, type, type2) \
2661
static void glue(gen_, name##x)(DisasContext *ctx) \
2664
gen_set_access_type(ctx, ACCESS_INT); \
2665
EA = tcg_temp_new(); \
2666
gen_addr_reg_index(ctx, EA); \
2667
gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2668
tcg_temp_free(EA); \
2670
#define GEN_LDX(name, ldop, opc2, opc3, type) \
2671
GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE)
2673
#define GEN_LDS(name, ldop, op, type) \
2674
GEN_LD(name, ldop, op | 0x20, type); \
2675
GEN_LDU(name, ldop, op | 0x21, type); \
2676
GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
2677
GEN_LDX(name, ldop, 0x17, op | 0x00, type)
2679
/* lbz lbzu lbzux lbzx */
2680
GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
2681
/* lha lhau lhaux lhax */
2682
GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
2683
/* lhz lhzu lhzux lhzx */
2684
GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
2685
/* lwz lwzu lwzux lwzx */
2686
GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
2687
#if defined(TARGET_PPC64)
2689
GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
2691
GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
2693
GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B);
2695
GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B);
2697
static void gen_ld(DisasContext *ctx)
2700
if (Rc(ctx->opcode)) {
2701
if (unlikely(rA(ctx->opcode) == 0 ||
2702
rA(ctx->opcode) == rD(ctx->opcode))) {
2703
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2707
gen_set_access_type(ctx, ACCESS_INT);
2708
EA = tcg_temp_new();
2709
gen_addr_imm_index(ctx, EA, 0x03);
2710
if (ctx->opcode & 0x02) {
2711
/* lwa (lwau is undefined) */
2712
gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2715
gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2717
if (Rc(ctx->opcode))
2718
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2723
static void gen_lq(DisasContext *ctx)
2725
#if defined(CONFIG_USER_ONLY)
2726
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2731
/* Restore CPU state */
2732
if (unlikely(ctx->mem_idx == 0)) {
2733
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2736
ra = rA(ctx->opcode);
2737
rd = rD(ctx->opcode);
2738
if (unlikely((rd & 1) || rd == ra)) {
2739
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2742
if (unlikely(ctx->le_mode)) {
2743
/* Little-endian mode is not handled */
2744
gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2747
gen_set_access_type(ctx, ACCESS_INT);
2748
EA = tcg_temp_new();
2749
gen_addr_imm_index(ctx, EA, 0x0F);
2750
gen_qemu_ld64(ctx, cpu_gpr[rd], EA);
2751
gen_addr_add(ctx, EA, EA, 8);
2752
gen_qemu_ld64(ctx, cpu_gpr[rd+1], EA);
2758
/*** Integer store ***/
2759
#define GEN_ST(name, stop, opc, type) \
2760
static void glue(gen_, name)(DisasContext *ctx) \
2763
gen_set_access_type(ctx, ACCESS_INT); \
2764
EA = tcg_temp_new(); \
2765
gen_addr_imm_index(ctx, EA, 0); \
2766
gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2767
tcg_temp_free(EA); \
2770
#define GEN_STU(name, stop, opc, type) \
2771
static void glue(gen_, stop##u)(DisasContext *ctx) \
2774
if (unlikely(rA(ctx->opcode) == 0)) { \
2775
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2778
gen_set_access_type(ctx, ACCESS_INT); \
2779
EA = tcg_temp_new(); \
2780
if (type == PPC_64B) \
2781
gen_addr_imm_index(ctx, EA, 0x03); \
2783
gen_addr_imm_index(ctx, EA, 0); \
2784
gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2785
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2786
tcg_temp_free(EA); \
2789
#define GEN_STUX(name, stop, opc2, opc3, type) \
2790
static void glue(gen_, name##ux)(DisasContext *ctx) \
2793
if (unlikely(rA(ctx->opcode) == 0)) { \
2794
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2797
gen_set_access_type(ctx, ACCESS_INT); \
2798
EA = tcg_temp_new(); \
2799
gen_addr_reg_index(ctx, EA); \
2800
gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2801
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2802
tcg_temp_free(EA); \
2805
#define GEN_STX_E(name, stop, opc2, opc3, type, type2) \
2806
static void glue(gen_, name##x)(DisasContext *ctx) \
2809
gen_set_access_type(ctx, ACCESS_INT); \
2810
EA = tcg_temp_new(); \
2811
gen_addr_reg_index(ctx, EA); \
2812
gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2813
tcg_temp_free(EA); \
2815
#define GEN_STX(name, stop, opc2, opc3, type) \
2816
GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE)
2818
#define GEN_STS(name, stop, op, type) \
2819
GEN_ST(name, stop, op | 0x20, type); \
2820
GEN_STU(name, stop, op | 0x21, type); \
2821
GEN_STUX(name, stop, 0x17, op | 0x01, type); \
2822
GEN_STX(name, stop, 0x17, op | 0x00, type)
2824
/* stb stbu stbux stbx */
2825
GEN_STS(stb, st8, 0x06, PPC_INTEGER);
2826
/* sth sthu sthux sthx */
2827
GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
2828
/* stw stwu stwux stwx */
2829
GEN_STS(stw, st32, 0x04, PPC_INTEGER);
2830
#if defined(TARGET_PPC64)
2831
GEN_STUX(std, st64, 0x15, 0x05, PPC_64B);
2832
GEN_STX(std, st64, 0x15, 0x04, PPC_64B);
2834
static void gen_std(DisasContext *ctx)
2839
rs = rS(ctx->opcode);
2840
if ((ctx->opcode & 0x3) == 0x2) {
2841
#if defined(CONFIG_USER_ONLY)
2842
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2845
if (unlikely(ctx->mem_idx == 0)) {
2846
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2849
if (unlikely(rs & 1)) {
2850
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2853
if (unlikely(ctx->le_mode)) {
2854
/* Little-endian mode is not handled */
2855
gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2858
gen_set_access_type(ctx, ACCESS_INT);
2859
EA = tcg_temp_new();
2860
gen_addr_imm_index(ctx, EA, 0x03);
2861
gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2862
gen_addr_add(ctx, EA, EA, 8);
2863
gen_qemu_st64(ctx, cpu_gpr[rs+1], EA);
2868
if (Rc(ctx->opcode)) {
2869
if (unlikely(rA(ctx->opcode) == 0)) {
2870
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2874
gen_set_access_type(ctx, ACCESS_INT);
2875
EA = tcg_temp_new();
2876
gen_addr_imm_index(ctx, EA, 0x03);
2877
gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2878
if (Rc(ctx->opcode))
2879
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2884
/*** Integer load and store with byte reverse ***/
2886
static inline void gen_qemu_ld16ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2888
tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2889
if (likely(!ctx->le_mode)) {
2890
tcg_gen_bswap16_tl(arg1, arg1);
2893
GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
2896
static inline void gen_qemu_ld32ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2898
tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2899
if (likely(!ctx->le_mode)) {
2900
tcg_gen_bswap32_tl(arg1, arg1);
2903
GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
2905
#if defined(TARGET_PPC64)
2907
static inline void gen_qemu_ld64ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2909
tcg_gen_qemu_ld64(arg1, arg2, ctx->mem_idx);
2910
if (likely(!ctx->le_mode)) {
2911
tcg_gen_bswap64_tl(arg1, arg1);
2914
GEN_LDX_E(ldbr, ld64ur, 0x14, 0x10, PPC_NONE, PPC2_DBRX);
2915
#endif /* TARGET_PPC64 */
2918
static inline void gen_qemu_st16r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2920
if (likely(!ctx->le_mode)) {
2921
TCGv t0 = tcg_temp_new();
2922
tcg_gen_ext16u_tl(t0, arg1);
2923
tcg_gen_bswap16_tl(t0, t0);
2924
tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2927
tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2930
GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
2933
static inline void gen_qemu_st32r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2935
if (likely(!ctx->le_mode)) {
2936
TCGv t0 = tcg_temp_new();
2937
tcg_gen_ext32u_tl(t0, arg1);
2938
tcg_gen_bswap32_tl(t0, t0);
2939
tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2942
tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2945
GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
2947
#if defined(TARGET_PPC64)
2949
static inline void gen_qemu_st64r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2951
if (likely(!ctx->le_mode)) {
2952
TCGv t0 = tcg_temp_new();
2953
tcg_gen_bswap64_tl(t0, arg1);
2954
tcg_gen_qemu_st64(t0, arg2, ctx->mem_idx);
2957
tcg_gen_qemu_st64(arg1, arg2, ctx->mem_idx);
2960
GEN_STX_E(stdbr, st64r, 0x14, 0x14, PPC_NONE, PPC2_DBRX);
2961
#endif /* TARGET_PPC64 */
2963
/*** Integer load and store multiple ***/
2966
static void gen_lmw(DisasContext *ctx)
2970
gen_set_access_type(ctx, ACCESS_INT);
2971
/* NIP cannot be restored if the memory exception comes from an helper */
2972
gen_update_nip(ctx, ctx->nip - 4);
2973
t0 = tcg_temp_new();
2974
t1 = tcg_const_i32(rD(ctx->opcode));
2975
gen_addr_imm_index(ctx, t0, 0);
2976
gen_helper_lmw(cpu_env, t0, t1);
2978
tcg_temp_free_i32(t1);
2982
static void gen_stmw(DisasContext *ctx)
2986
gen_set_access_type(ctx, ACCESS_INT);
2987
/* NIP cannot be restored if the memory exception comes from an helper */
2988
gen_update_nip(ctx, ctx->nip - 4);
2989
t0 = tcg_temp_new();
2990
t1 = tcg_const_i32(rS(ctx->opcode));
2991
gen_addr_imm_index(ctx, t0, 0);
2992
gen_helper_stmw(cpu_env, t0, t1);
2994
tcg_temp_free_i32(t1);
2997
/*** Integer load and store strings ***/
3000
/* PowerPC32 specification says we must generate an exception if
3001
* rA is in the range of registers to be loaded.
3002
* In an other hand, IBM says this is valid, but rA won't be loaded.
3003
* For now, I'll follow the spec...
3005
static void gen_lswi(DisasContext *ctx)
3009
int nb = NB(ctx->opcode);
3010
int start = rD(ctx->opcode);
3011
int ra = rA(ctx->opcode);
3017
if (unlikely(((start + nr) > 32 &&
3018
start <= ra && (start + nr - 32) > ra) ||
3019
((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
3020
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
3023
gen_set_access_type(ctx, ACCESS_INT);
3024
/* NIP cannot be restored if the memory exception comes from an helper */
3025
gen_update_nip(ctx, ctx->nip - 4);
3026
t0 = tcg_temp_new();
3027
gen_addr_register(ctx, t0);
3028
t1 = tcg_const_i32(nb);
3029
t2 = tcg_const_i32(start);
3030
gen_helper_lsw(cpu_env, t0, t1, t2);
3032
tcg_temp_free_i32(t1);
3033
tcg_temp_free_i32(t2);
3037
static void gen_lswx(DisasContext *ctx)
3040
TCGv_i32 t1, t2, t3;
3041
gen_set_access_type(ctx, ACCESS_INT);
3042
/* NIP cannot be restored if the memory exception comes from an helper */
3043
gen_update_nip(ctx, ctx->nip - 4);
3044
t0 = tcg_temp_new();
3045
gen_addr_reg_index(ctx, t0);
3046
t1 = tcg_const_i32(rD(ctx->opcode));
3047
t2 = tcg_const_i32(rA(ctx->opcode));
3048
t3 = tcg_const_i32(rB(ctx->opcode));
3049
gen_helper_lswx(cpu_env, t0, t1, t2, t3);
3051
tcg_temp_free_i32(t1);
3052
tcg_temp_free_i32(t2);
3053
tcg_temp_free_i32(t3);
3057
static void gen_stswi(DisasContext *ctx)
3061
int nb = NB(ctx->opcode);
3062
gen_set_access_type(ctx, ACCESS_INT);
3063
/* NIP cannot be restored if the memory exception comes from an helper */
3064
gen_update_nip(ctx, ctx->nip - 4);
3065
t0 = tcg_temp_new();
3066
gen_addr_register(ctx, t0);
3069
t1 = tcg_const_i32(nb);
3070
t2 = tcg_const_i32(rS(ctx->opcode));
3071
gen_helper_stsw(cpu_env, t0, t1, t2);
3073
tcg_temp_free_i32(t1);
3074
tcg_temp_free_i32(t2);
3078
static void gen_stswx(DisasContext *ctx)
3082
gen_set_access_type(ctx, ACCESS_INT);
3083
/* NIP cannot be restored if the memory exception comes from an helper */
3084
gen_update_nip(ctx, ctx->nip - 4);
3085
t0 = tcg_temp_new();
3086
gen_addr_reg_index(ctx, t0);
3087
t1 = tcg_temp_new_i32();
3088
tcg_gen_trunc_tl_i32(t1, cpu_xer);
3089
tcg_gen_andi_i32(t1, t1, 0x7F);
3090
t2 = tcg_const_i32(rS(ctx->opcode));
3091
gen_helper_stsw(cpu_env, t0, t1, t2);
3093
tcg_temp_free_i32(t1);
3094
tcg_temp_free_i32(t2);
3097
/*** Memory synchronisation ***/
3099
static void gen_eieio(DisasContext *ctx)
3104
static void gen_isync(DisasContext *ctx)
3106
gen_stop_exception(ctx);
3110
static void gen_lwarx(DisasContext *ctx)
3113
TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3114
gen_set_access_type(ctx, ACCESS_RES);
3115
t0 = tcg_temp_local_new();
3116
gen_addr_reg_index(ctx, t0);
3117
gen_check_align(ctx, t0, 0x03);
3118
gen_qemu_ld32u(ctx, gpr, t0);
3119
tcg_gen_mov_tl(cpu_reserve, t0);
3120
tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUPPCState, reserve_val));
3124
#if defined(CONFIG_USER_ONLY)
3125
static void gen_conditional_store (DisasContext *ctx, TCGv EA,
3128
TCGv t0 = tcg_temp_new();
3129
uint32_t save_exception = ctx->exception;
3131
tcg_gen_st_tl(EA, cpu_env, offsetof(CPUPPCState, reserve_ea));
3132
tcg_gen_movi_tl(t0, (size << 5) | reg);
3133
tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, reserve_info));
3135
gen_update_nip(ctx, ctx->nip-4);
3136
ctx->exception = POWERPC_EXCP_BRANCH;
3137
gen_exception(ctx, POWERPC_EXCP_STCX);
3138
ctx->exception = save_exception;
3143
static void gen_stwcx_(DisasContext *ctx)
3146
gen_set_access_type(ctx, ACCESS_RES);
3147
t0 = tcg_temp_local_new();
3148
gen_addr_reg_index(ctx, t0);
3149
gen_check_align(ctx, t0, 0x03);
3150
#if defined(CONFIG_USER_ONLY)
3151
gen_conditional_store(ctx, t0, rS(ctx->opcode), 4);
3156
tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3157
l1 = gen_new_label();
3158
tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3159
tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3160
gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3162
tcg_gen_movi_tl(cpu_reserve, -1);
3168
#if defined(TARGET_PPC64)
3170
static void gen_ldarx(DisasContext *ctx)
3173
TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3174
gen_set_access_type(ctx, ACCESS_RES);
3175
t0 = tcg_temp_local_new();
3176
gen_addr_reg_index(ctx, t0);
3177
gen_check_align(ctx, t0, 0x07);
3178
gen_qemu_ld64(ctx, gpr, t0);
3179
tcg_gen_mov_tl(cpu_reserve, t0);
3180
tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUPPCState, reserve_val));
3185
static void gen_stdcx_(DisasContext *ctx)
3188
gen_set_access_type(ctx, ACCESS_RES);
3189
t0 = tcg_temp_local_new();
3190
gen_addr_reg_index(ctx, t0);
3191
gen_check_align(ctx, t0, 0x07);
3192
#if defined(CONFIG_USER_ONLY)
3193
gen_conditional_store(ctx, t0, rS(ctx->opcode), 8);
3197
tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3198
l1 = gen_new_label();
3199
tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3200
tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3201
gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3203
tcg_gen_movi_tl(cpu_reserve, -1);
3208
#endif /* defined(TARGET_PPC64) */
3211
static void gen_sync(DisasContext *ctx)
3216
static void gen_wait(DisasContext *ctx)
3218
TCGv_i32 t0 = tcg_temp_new_i32();
3219
tcg_gen_st_i32(t0, cpu_env,
3220
-offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
3221
tcg_temp_free_i32(t0);
3222
/* Stop translation, as the CPU is supposed to sleep from now */
3223
gen_exception_err(ctx, EXCP_HLT, 1);
3226
/*** Floating-point load ***/
3227
#define GEN_LDF(name, ldop, opc, type) \
3228
static void glue(gen_, name)(DisasContext *ctx) \
3231
if (unlikely(!ctx->fpu_enabled)) { \
3232
gen_exception(ctx, POWERPC_EXCP_FPU); \
3235
gen_set_access_type(ctx, ACCESS_FLOAT); \
3236
EA = tcg_temp_new(); \
3237
gen_addr_imm_index(ctx, EA, 0); \
3238
gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3239
tcg_temp_free(EA); \
3242
#define GEN_LDUF(name, ldop, opc, type) \
3243
static void glue(gen_, name##u)(DisasContext *ctx) \
3246
if (unlikely(!ctx->fpu_enabled)) { \
3247
gen_exception(ctx, POWERPC_EXCP_FPU); \
3250
if (unlikely(rA(ctx->opcode) == 0)) { \
3251
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3254
gen_set_access_type(ctx, ACCESS_FLOAT); \
3255
EA = tcg_temp_new(); \
3256
gen_addr_imm_index(ctx, EA, 0); \
3257
gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3258
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3259
tcg_temp_free(EA); \
3262
#define GEN_LDUXF(name, ldop, opc, type) \
3263
static void glue(gen_, name##ux)(DisasContext *ctx) \
3266
if (unlikely(!ctx->fpu_enabled)) { \
3267
gen_exception(ctx, POWERPC_EXCP_FPU); \
3270
if (unlikely(rA(ctx->opcode) == 0)) { \
3271
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3274
gen_set_access_type(ctx, ACCESS_FLOAT); \
3275
EA = tcg_temp_new(); \
3276
gen_addr_reg_index(ctx, EA); \
3277
gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3278
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3279
tcg_temp_free(EA); \
3282
#define GEN_LDXF(name, ldop, opc2, opc3, type) \
3283
static void glue(gen_, name##x)(DisasContext *ctx) \
3286
if (unlikely(!ctx->fpu_enabled)) { \
3287
gen_exception(ctx, POWERPC_EXCP_FPU); \
3290
gen_set_access_type(ctx, ACCESS_FLOAT); \
3291
EA = tcg_temp_new(); \
3292
gen_addr_reg_index(ctx, EA); \
3293
gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3294
tcg_temp_free(EA); \
3297
#define GEN_LDFS(name, ldop, op, type) \
3298
GEN_LDF(name, ldop, op | 0x20, type); \
3299
GEN_LDUF(name, ldop, op | 0x21, type); \
3300
GEN_LDUXF(name, ldop, op | 0x01, type); \
3301
GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
3303
static inline void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3305
TCGv t0 = tcg_temp_new();
3306
TCGv_i32 t1 = tcg_temp_new_i32();
3307
gen_qemu_ld32u(ctx, t0, arg2);
3308
tcg_gen_trunc_tl_i32(t1, t0);
3310
gen_helper_float32_to_float64(arg1, cpu_env, t1);
3311
tcg_temp_free_i32(t1);
3314
/* lfd lfdu lfdux lfdx */
3315
GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT);
3316
/* lfs lfsu lfsux lfsx */
3317
GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT);
3320
static void gen_lfdp(DisasContext *ctx)
3323
if (unlikely(!ctx->fpu_enabled)) {
3324
gen_exception(ctx, POWERPC_EXCP_FPU);
3327
gen_set_access_type(ctx, ACCESS_FLOAT);
3328
EA = tcg_temp_new();
3329
gen_addr_imm_index(ctx, EA, 0); \
3330
if (unlikely(ctx->le_mode)) {
3331
gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3332
tcg_gen_addi_tl(EA, EA, 8);
3333
gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3335
gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3336
tcg_gen_addi_tl(EA, EA, 8);
3337
gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3343
static void gen_lfdpx(DisasContext *ctx)
3346
if (unlikely(!ctx->fpu_enabled)) {
3347
gen_exception(ctx, POWERPC_EXCP_FPU);
3350
gen_set_access_type(ctx, ACCESS_FLOAT);
3351
EA = tcg_temp_new();
3352
gen_addr_reg_index(ctx, EA);
3353
if (unlikely(ctx->le_mode)) {
3354
gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3355
tcg_gen_addi_tl(EA, EA, 8);
3356
gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3358
gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3359
tcg_gen_addi_tl(EA, EA, 8);
3360
gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3366
static void gen_lfiwax(DisasContext *ctx)
3370
if (unlikely(!ctx->fpu_enabled)) {
3371
gen_exception(ctx, POWERPC_EXCP_FPU);
3374
gen_set_access_type(ctx, ACCESS_FLOAT);
3375
EA = tcg_temp_new();
3376
t0 = tcg_temp_new();
3377
gen_addr_reg_index(ctx, EA);
3378
gen_qemu_ld32s(ctx, t0, EA);
3379
tcg_gen_ext_tl_i64(cpu_fpr[rD(ctx->opcode)], t0);
3384
/*** Floating-point store ***/
3385
#define GEN_STF(name, stop, opc, type) \
3386
static void glue(gen_, name)(DisasContext *ctx) \
3389
if (unlikely(!ctx->fpu_enabled)) { \
3390
gen_exception(ctx, POWERPC_EXCP_FPU); \
3393
gen_set_access_type(ctx, ACCESS_FLOAT); \
3394
EA = tcg_temp_new(); \
3395
gen_addr_imm_index(ctx, EA, 0); \
3396
gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3397
tcg_temp_free(EA); \
3400
#define GEN_STUF(name, stop, opc, type) \
3401
static void glue(gen_, name##u)(DisasContext *ctx) \
3404
if (unlikely(!ctx->fpu_enabled)) { \
3405
gen_exception(ctx, POWERPC_EXCP_FPU); \
3408
if (unlikely(rA(ctx->opcode) == 0)) { \
3409
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3412
gen_set_access_type(ctx, ACCESS_FLOAT); \
3413
EA = tcg_temp_new(); \
3414
gen_addr_imm_index(ctx, EA, 0); \
3415
gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3416
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3417
tcg_temp_free(EA); \
3420
#define GEN_STUXF(name, stop, opc, type) \
3421
static void glue(gen_, name##ux)(DisasContext *ctx) \
3424
if (unlikely(!ctx->fpu_enabled)) { \
3425
gen_exception(ctx, POWERPC_EXCP_FPU); \
3428
if (unlikely(rA(ctx->opcode) == 0)) { \
3429
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3432
gen_set_access_type(ctx, ACCESS_FLOAT); \
3433
EA = tcg_temp_new(); \
3434
gen_addr_reg_index(ctx, EA); \
3435
gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3436
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3437
tcg_temp_free(EA); \
3440
#define GEN_STXF(name, stop, opc2, opc3, type) \
3441
static void glue(gen_, name##x)(DisasContext *ctx) \
3444
if (unlikely(!ctx->fpu_enabled)) { \
3445
gen_exception(ctx, POWERPC_EXCP_FPU); \
3448
gen_set_access_type(ctx, ACCESS_FLOAT); \
3449
EA = tcg_temp_new(); \
3450
gen_addr_reg_index(ctx, EA); \
3451
gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3452
tcg_temp_free(EA); \
3455
#define GEN_STFS(name, stop, op, type) \
3456
GEN_STF(name, stop, op | 0x20, type); \
3457
GEN_STUF(name, stop, op | 0x21, type); \
3458
GEN_STUXF(name, stop, op | 0x01, type); \
3459
GEN_STXF(name, stop, 0x17, op | 0x00, type)
3461
static inline void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3463
TCGv_i32 t0 = tcg_temp_new_i32();
3464
TCGv t1 = tcg_temp_new();
3465
gen_helper_float64_to_float32(t0, cpu_env, arg1);
3466
tcg_gen_extu_i32_tl(t1, t0);
3467
tcg_temp_free_i32(t0);
3468
gen_qemu_st32(ctx, t1, arg2);
3472
/* stfd stfdu stfdux stfdx */
3473
GEN_STFS(stfd, st64, 0x16, PPC_FLOAT);
3474
/* stfs stfsu stfsux stfsx */
3475
GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT);
3478
static void gen_stfdp(DisasContext *ctx)
3481
if (unlikely(!ctx->fpu_enabled)) {
3482
gen_exception(ctx, POWERPC_EXCP_FPU);
3485
gen_set_access_type(ctx, ACCESS_FLOAT);
3486
EA = tcg_temp_new();
3487
gen_addr_imm_index(ctx, EA, 0); \
3488
if (unlikely(ctx->le_mode)) {
3489
gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3490
tcg_gen_addi_tl(EA, EA, 8);
3491
gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3493
gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3494
tcg_gen_addi_tl(EA, EA, 8);
3495
gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3501
static void gen_stfdpx(DisasContext *ctx)
3504
if (unlikely(!ctx->fpu_enabled)) {
3505
gen_exception(ctx, POWERPC_EXCP_FPU);
3508
gen_set_access_type(ctx, ACCESS_FLOAT);
3509
EA = tcg_temp_new();
3510
gen_addr_reg_index(ctx, EA);
3511
if (unlikely(ctx->le_mode)) {
3512
gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3513
tcg_gen_addi_tl(EA, EA, 8);
3514
gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3516
gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3517
tcg_gen_addi_tl(EA, EA, 8);
3518
gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3524
static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3526
TCGv t0 = tcg_temp_new();
3527
tcg_gen_trunc_i64_tl(t0, arg1),
3528
gen_qemu_st32(ctx, t0, arg2);
3532
GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
3534
static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
3536
#if defined(TARGET_PPC64)
3538
tcg_gen_movi_tl(cpu_cfar, nip);
3543
static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
3545
TranslationBlock *tb;
3547
if (NARROW_MODE(ctx)) {
3548
dest = (uint32_t) dest;
3550
if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
3551
likely(!ctx->singlestep_enabled)) {
3553
tcg_gen_movi_tl(cpu_nip, dest & ~3);
3554
tcg_gen_exit_tb((tcg_target_long)tb + n);
3556
tcg_gen_movi_tl(cpu_nip, dest & ~3);
3557
if (unlikely(ctx->singlestep_enabled)) {
3558
if ((ctx->singlestep_enabled &
3559
(CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
3560
(ctx->exception == POWERPC_EXCP_BRANCH ||
3561
ctx->exception == POWERPC_EXCP_TRACE)) {
3562
target_ulong tmp = ctx->nip;
3564
gen_exception(ctx, POWERPC_EXCP_TRACE);
3567
if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3568
gen_debug_exception(ctx);
3575
static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
3577
if (NARROW_MODE(ctx)) {
3578
nip = (uint32_t)nip;
3580
tcg_gen_movi_tl(cpu_lr, nip);
3584
static void gen_b(DisasContext *ctx)
3586
target_ulong li, target;
3588
ctx->exception = POWERPC_EXCP_BRANCH;
3589
/* sign extend LI */
3590
li = LI(ctx->opcode);
3591
li = (li ^ 0x02000000) - 0x02000000;
3592
if (likely(AA(ctx->opcode) == 0)) {
3593
target = ctx->nip + li - 4;
3597
if (LK(ctx->opcode)) {
3598
gen_setlr(ctx, ctx->nip);
3600
gen_update_cfar(ctx, ctx->nip);
3601
gen_goto_tb(ctx, 0, target);
3608
static inline void gen_bcond(DisasContext *ctx, int type)
3610
uint32_t bo = BO(ctx->opcode);
3614
ctx->exception = POWERPC_EXCP_BRANCH;
3615
if (type == BCOND_LR || type == BCOND_CTR) {
3616
target = tcg_temp_local_new();
3617
if (type == BCOND_CTR)
3618
tcg_gen_mov_tl(target, cpu_ctr);
3620
tcg_gen_mov_tl(target, cpu_lr);
3622
TCGV_UNUSED(target);
3624
if (LK(ctx->opcode))
3625
gen_setlr(ctx, ctx->nip);
3626
l1 = gen_new_label();
3627
if ((bo & 0x4) == 0) {
3628
/* Decrement and test CTR */
3629
TCGv temp = tcg_temp_new();
3630
if (unlikely(type == BCOND_CTR)) {
3631
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3634
tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3635
if (NARROW_MODE(ctx)) {
3636
tcg_gen_ext32u_tl(temp, cpu_ctr);
3638
tcg_gen_mov_tl(temp, cpu_ctr);
3641
tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3643
tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3645
tcg_temp_free(temp);
3647
if ((bo & 0x10) == 0) {
3649
uint32_t bi = BI(ctx->opcode);
3650
uint32_t mask = 1 << (3 - (bi & 0x03));
3651
TCGv_i32 temp = tcg_temp_new_i32();
3654
tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3655
tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3657
tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3658
tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3660
tcg_temp_free_i32(temp);
3662
gen_update_cfar(ctx, ctx->nip);
3663
if (type == BCOND_IM) {
3664
target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3665
if (likely(AA(ctx->opcode) == 0)) {
3666
gen_goto_tb(ctx, 0, ctx->nip + li - 4);
3668
gen_goto_tb(ctx, 0, li);
3671
gen_goto_tb(ctx, 1, ctx->nip);
3673
if (NARROW_MODE(ctx)) {
3674
tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3676
tcg_gen_andi_tl(cpu_nip, target, ~3);
3680
gen_update_nip(ctx, ctx->nip);
3685
static void gen_bc(DisasContext *ctx)
3687
gen_bcond(ctx, BCOND_IM);
3690
static void gen_bcctr(DisasContext *ctx)
3692
gen_bcond(ctx, BCOND_CTR);
3695
static void gen_bclr(DisasContext *ctx)
3697
gen_bcond(ctx, BCOND_LR);
3700
/*** Condition register logical ***/
3701
#define GEN_CRLOGIC(name, tcg_op, opc) \
3702
static void glue(gen_, name)(DisasContext *ctx) \
3707
sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3708
t0 = tcg_temp_new_i32(); \
3710
tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
3712
tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
3714
tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
3715
t1 = tcg_temp_new_i32(); \
3716
sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3718
tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
3720
tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
3722
tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
3723
tcg_op(t0, t0, t1); \
3724
bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3725
tcg_gen_andi_i32(t0, t0, bitmask); \
3726
tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3727
tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
3728
tcg_temp_free_i32(t0); \
3729
tcg_temp_free_i32(t1); \
3733
GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3735
GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3737
GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3739
GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3741
GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3743
GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3745
GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3747
GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3750
static void gen_mcrf(DisasContext *ctx)
3752
tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3755
/*** System linkage ***/
3757
/* rfi (mem_idx only) */
3758
static void gen_rfi(DisasContext *ctx)
3760
#if defined(CONFIG_USER_ONLY)
3761
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3763
/* Restore CPU state */
3764
if (unlikely(!ctx->mem_idx)) {
3765
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3768
gen_update_cfar(ctx, ctx->nip);
3769
gen_helper_rfi(cpu_env);
3770
gen_sync_exception(ctx);
3774
#if defined(TARGET_PPC64)
3775
static void gen_rfid(DisasContext *ctx)
3777
#if defined(CONFIG_USER_ONLY)
3778
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3780
/* Restore CPU state */
3781
if (unlikely(!ctx->mem_idx)) {
3782
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3785
gen_update_cfar(ctx, ctx->nip);
3786
gen_helper_rfid(cpu_env);
3787
gen_sync_exception(ctx);
3791
static void gen_hrfid(DisasContext *ctx)
3793
#if defined(CONFIG_USER_ONLY)
3794
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3796
/* Restore CPU state */
3797
if (unlikely(ctx->mem_idx <= 1)) {
3798
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3801
gen_helper_hrfid(cpu_env);
3802
gen_sync_exception(ctx);
3808
#if defined(CONFIG_USER_ONLY)
3809
#define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3811
#define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3813
static void gen_sc(DisasContext *ctx)
3817
lev = (ctx->opcode >> 5) & 0x7F;
3818
gen_exception_err(ctx, POWERPC_SYSCALL, lev);
3824
static void gen_tw(DisasContext *ctx)
3826
TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3827
/* Update the nip since this might generate a trap exception */
3828
gen_update_nip(ctx, ctx->nip);
3829
gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
3831
tcg_temp_free_i32(t0);
3835
static void gen_twi(DisasContext *ctx)
3837
TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3838
TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3839
/* Update the nip since this might generate a trap exception */
3840
gen_update_nip(ctx, ctx->nip);
3841
gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
3843
tcg_temp_free_i32(t1);
3846
#if defined(TARGET_PPC64)
3848
static void gen_td(DisasContext *ctx)
3850
TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3851
/* Update the nip since this might generate a trap exception */
3852
gen_update_nip(ctx, ctx->nip);
3853
gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
3855
tcg_temp_free_i32(t0);
3859
static void gen_tdi(DisasContext *ctx)
3861
TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3862
TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3863
/* Update the nip since this might generate a trap exception */
3864
gen_update_nip(ctx, ctx->nip);
3865
gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
3867
tcg_temp_free_i32(t1);
3871
/*** Processor control ***/
3873
static void gen_read_xer(TCGv dst)
3875
TCGv t0 = tcg_temp_new();
3876
TCGv t1 = tcg_temp_new();
3877
TCGv t2 = tcg_temp_new();
3878
tcg_gen_mov_tl(dst, cpu_xer);
3879
tcg_gen_shli_tl(t0, cpu_so, XER_SO);
3880
tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
3881
tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
3882
tcg_gen_or_tl(t0, t0, t1);
3883
tcg_gen_or_tl(dst, dst, t2);
3884
tcg_gen_or_tl(dst, dst, t0);
3890
static void gen_write_xer(TCGv src)
3892
tcg_gen_andi_tl(cpu_xer, src,
3893
~((1u << XER_SO) | (1u << XER_OV) | (1u << XER_CA)));
3894
tcg_gen_shri_tl(cpu_so, src, XER_SO);
3895
tcg_gen_shri_tl(cpu_ov, src, XER_OV);
3896
tcg_gen_shri_tl(cpu_ca, src, XER_CA);
3897
tcg_gen_andi_tl(cpu_so, cpu_so, 1);
3898
tcg_gen_andi_tl(cpu_ov, cpu_ov, 1);
3899
tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
3903
static void gen_mcrxr(DisasContext *ctx)
3905
TCGv_i32 t0 = tcg_temp_new_i32();
3906
TCGv_i32 t1 = tcg_temp_new_i32();
3907
TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
3909
tcg_gen_trunc_tl_i32(t0, cpu_so);
3910
tcg_gen_trunc_tl_i32(t1, cpu_ov);
3911
tcg_gen_trunc_tl_i32(dst, cpu_ca);
3912
tcg_gen_shri_i32(t0, t0, 2);
3913
tcg_gen_shri_i32(t1, t1, 1);
3914
tcg_gen_or_i32(dst, dst, t0);
3915
tcg_gen_or_i32(dst, dst, t1);
3916
tcg_temp_free_i32(t0);
3917
tcg_temp_free_i32(t1);
3919
tcg_gen_movi_tl(cpu_so, 0);
3920
tcg_gen_movi_tl(cpu_ov, 0);
3921
tcg_gen_movi_tl(cpu_ca, 0);
3925
static void gen_mfcr(DisasContext *ctx)
3929
if (likely(ctx->opcode & 0x00100000)) {
3930
crm = CRM(ctx->opcode);
3931
if (likely(crm && ((crm & (crm - 1)) == 0))) {
3933
tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
3934
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
3935
cpu_gpr[rD(ctx->opcode)], crn * 4);
3938
TCGv_i32 t0 = tcg_temp_new_i32();
3939
tcg_gen_mov_i32(t0, cpu_crf[0]);
3940
tcg_gen_shli_i32(t0, t0, 4);
3941
tcg_gen_or_i32(t0, t0, cpu_crf[1]);
3942
tcg_gen_shli_i32(t0, t0, 4);
3943
tcg_gen_or_i32(t0, t0, cpu_crf[2]);
3944
tcg_gen_shli_i32(t0, t0, 4);
3945
tcg_gen_or_i32(t0, t0, cpu_crf[3]);
3946
tcg_gen_shli_i32(t0, t0, 4);
3947
tcg_gen_or_i32(t0, t0, cpu_crf[4]);
3948
tcg_gen_shli_i32(t0, t0, 4);
3949
tcg_gen_or_i32(t0, t0, cpu_crf[5]);
3950
tcg_gen_shli_i32(t0, t0, 4);
3951
tcg_gen_or_i32(t0, t0, cpu_crf[6]);
3952
tcg_gen_shli_i32(t0, t0, 4);
3953
tcg_gen_or_i32(t0, t0, cpu_crf[7]);
3954
tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
3955
tcg_temp_free_i32(t0);
3960
static void gen_mfmsr(DisasContext *ctx)
3962
#if defined(CONFIG_USER_ONLY)
3963
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3965
if (unlikely(!ctx->mem_idx)) {
3966
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3969
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
3973
static void spr_noaccess(void *opaque, int gprn, int sprn)
3976
sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3977
printf("ERROR: try to access SPR %d !\n", sprn);
3980
#define SPR_NOACCESS (&spr_noaccess)
3983
static inline void gen_op_mfspr(DisasContext *ctx)
3985
void (*read_cb)(void *opaque, int gprn, int sprn);
3986
uint32_t sprn = SPR(ctx->opcode);
3988
#if !defined(CONFIG_USER_ONLY)
3989
if (ctx->mem_idx == 2)
3990
read_cb = ctx->spr_cb[sprn].hea_read;
3991
else if (ctx->mem_idx)
3992
read_cb = ctx->spr_cb[sprn].oea_read;
3995
read_cb = ctx->spr_cb[sprn].uea_read;
3996
if (likely(read_cb != NULL)) {
3997
if (likely(read_cb != SPR_NOACCESS)) {
3998
(*read_cb)(ctx, rD(ctx->opcode), sprn);
4000
/* Privilege exception */
4001
/* This is a hack to avoid warnings when running Linux:
4002
* this OS breaks the PowerPC virtualisation model,
4003
* allowing userland application to read the PVR
4005
if (sprn != SPR_PVR) {
4006
qemu_log("Trying to read privileged spr %d (0x%03x) at "
4007
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4008
printf("Trying to read privileged spr %d (0x%03x) at "
4009
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4011
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4015
qemu_log("Trying to read invalid spr %d (0x%03x) at "
4016
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4017
printf("Trying to read invalid spr %d (0x%03x) at "
4018
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4019
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
4023
static void gen_mfspr(DisasContext *ctx)
4029
static void gen_mftb(DisasContext *ctx)
4035
static void gen_mtcrf(DisasContext *ctx)
4039
crm = CRM(ctx->opcode);
4040
if (likely((ctx->opcode & 0x00100000))) {
4041
if (crm && ((crm & (crm - 1)) == 0)) {
4042
TCGv_i32 temp = tcg_temp_new_i32();
4044
tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4045
tcg_gen_shri_i32(temp, temp, crn * 4);
4046
tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
4047
tcg_temp_free_i32(temp);
4050
TCGv_i32 temp = tcg_temp_new_i32();
4051
tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4052
for (crn = 0 ; crn < 8 ; crn++) {
4053
if (crm & (1 << crn)) {
4054
tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
4055
tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
4058
tcg_temp_free_i32(temp);
4063
#if defined(TARGET_PPC64)
4064
static void gen_mtmsrd(DisasContext *ctx)
4066
#if defined(CONFIG_USER_ONLY)
4067
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4069
if (unlikely(!ctx->mem_idx)) {
4070
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4073
if (ctx->opcode & 0x00010000) {
4074
/* Special form that does not need any synchronisation */
4075
TCGv t0 = tcg_temp_new();
4076
tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
4077
tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
4078
tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
4081
/* XXX: we need to update nip before the store
4082
* if we enter power saving mode, we will exit the loop
4083
* directly from ppc_store_msr
4085
gen_update_nip(ctx, ctx->nip);
4086
gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]);
4087
/* Must stop the translation as machine state (may have) changed */
4088
/* Note that mtmsr is not always defined as context-synchronizing */
4089
gen_stop_exception(ctx);
4095
static void gen_mtmsr(DisasContext *ctx)
4097
#if defined(CONFIG_USER_ONLY)
4098
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4100
if (unlikely(!ctx->mem_idx)) {
4101
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4104
if (ctx->opcode & 0x00010000) {
4105
/* Special form that does not need any synchronisation */
4106
TCGv t0 = tcg_temp_new();
4107
tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
4108
tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
4109
tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
4112
TCGv msr = tcg_temp_new();
4114
/* XXX: we need to update nip before the store
4115
* if we enter power saving mode, we will exit the loop
4116
* directly from ppc_store_msr
4118
gen_update_nip(ctx, ctx->nip);
4119
#if defined(TARGET_PPC64)
4120
tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32);
4122
tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]);
4124
gen_helper_store_msr(cpu_env, msr);
4125
/* Must stop the translation as machine state (may have) changed */
4126
/* Note that mtmsr is not always defined as context-synchronizing */
4127
gen_stop_exception(ctx);
4133
static void gen_mtspr(DisasContext *ctx)
4135
void (*write_cb)(void *opaque, int sprn, int gprn);
4136
uint32_t sprn = SPR(ctx->opcode);
4138
#if !defined(CONFIG_USER_ONLY)
4139
if (ctx->mem_idx == 2)
4140
write_cb = ctx->spr_cb[sprn].hea_write;
4141
else if (ctx->mem_idx)
4142
write_cb = ctx->spr_cb[sprn].oea_write;
4145
write_cb = ctx->spr_cb[sprn].uea_write;
4146
if (likely(write_cb != NULL)) {
4147
if (likely(write_cb != SPR_NOACCESS)) {
4148
(*write_cb)(ctx, sprn, rS(ctx->opcode));
4150
/* Privilege exception */
4151
qemu_log("Trying to write privileged spr %d (0x%03x) at "
4152
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4153
printf("Trying to write privileged spr %d (0x%03x) at "
4154
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4155
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4159
qemu_log("Trying to write invalid spr %d (0x%03x) at "
4160
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4161
printf("Trying to write invalid spr %d (0x%03x) at "
4162
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4163
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
4167
/*** Cache management ***/
4170
static void gen_dcbf(DisasContext *ctx)
4172
/* XXX: specification says this is treated as a load by the MMU */
4174
gen_set_access_type(ctx, ACCESS_CACHE);
4175
t0 = tcg_temp_new();
4176
gen_addr_reg_index(ctx, t0);
4177
gen_qemu_ld8u(ctx, t0, t0);
4181
/* dcbi (Supervisor only) */
4182
static void gen_dcbi(DisasContext *ctx)
4184
#if defined(CONFIG_USER_ONLY)
4185
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4188
if (unlikely(!ctx->mem_idx)) {
4189
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4192
EA = tcg_temp_new();
4193
gen_set_access_type(ctx, ACCESS_CACHE);
4194
gen_addr_reg_index(ctx, EA);
4195
val = tcg_temp_new();
4196
/* XXX: specification says this should be treated as a store by the MMU */
4197
gen_qemu_ld8u(ctx, val, EA);
4198
gen_qemu_st8(ctx, val, EA);
4205
static void gen_dcbst(DisasContext *ctx)
4207
/* XXX: specification say this is treated as a load by the MMU */
4209
gen_set_access_type(ctx, ACCESS_CACHE);
4210
t0 = tcg_temp_new();
4211
gen_addr_reg_index(ctx, t0);
4212
gen_qemu_ld8u(ctx, t0, t0);
4217
static void gen_dcbt(DisasContext *ctx)
4219
/* interpreted as no-op */
4220
/* XXX: specification say this is treated as a load by the MMU
4221
* but does not generate any exception
4226
static void gen_dcbtst(DisasContext *ctx)
4228
/* interpreted as no-op */
4229
/* XXX: specification say this is treated as a load by the MMU
4230
* but does not generate any exception
4235
static void gen_dcbz(DisasContext *ctx)
4238
TCGv_i32 tcgv_is_dcbzl;
4239
int is_dcbzl = ctx->opcode & 0x00200000 ? 1 : 0;
4241
gen_set_access_type(ctx, ACCESS_CACHE);
4242
/* NIP cannot be restored if the memory exception comes from an helper */
4243
gen_update_nip(ctx, ctx->nip - 4);
4244
tcgv_addr = tcg_temp_new();
4245
tcgv_is_dcbzl = tcg_const_i32(is_dcbzl);
4247
gen_addr_reg_index(ctx, tcgv_addr);
4248
gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_is_dcbzl);
4250
tcg_temp_free(tcgv_addr);
4251
tcg_temp_free_i32(tcgv_is_dcbzl);
4255
static void gen_dst(DisasContext *ctx)
4257
if (rA(ctx->opcode) == 0) {
4258
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4260
/* interpreted as no-op */
4265
static void gen_dstst(DisasContext *ctx)
4267
if (rA(ctx->opcode) == 0) {
4268
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4270
/* interpreted as no-op */
4276
static void gen_dss(DisasContext *ctx)
4278
/* interpreted as no-op */
4282
static void gen_icbi(DisasContext *ctx)
4285
gen_set_access_type(ctx, ACCESS_CACHE);
4286
/* NIP cannot be restored if the memory exception comes from an helper */
4287
gen_update_nip(ctx, ctx->nip - 4);
4288
t0 = tcg_temp_new();
4289
gen_addr_reg_index(ctx, t0);
4290
gen_helper_icbi(cpu_env, t0);
4296
static void gen_dcba(DisasContext *ctx)
4298
/* interpreted as no-op */
4299
/* XXX: specification say this is treated as a store by the MMU
4300
* but does not generate any exception
4304
/*** Segment register manipulation ***/
4305
/* Supervisor only: */
4308
static void gen_mfsr(DisasContext *ctx)
4310
#if defined(CONFIG_USER_ONLY)
4311
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4314
if (unlikely(!ctx->mem_idx)) {
4315
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4318
t0 = tcg_const_tl(SR(ctx->opcode));
4319
gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4325
static void gen_mfsrin(DisasContext *ctx)
4327
#if defined(CONFIG_USER_ONLY)
4328
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4331
if (unlikely(!ctx->mem_idx)) {
4332
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4335
t0 = tcg_temp_new();
4336
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4337
tcg_gen_andi_tl(t0, t0, 0xF);
4338
gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4344
static void gen_mtsr(DisasContext *ctx)
4346
#if defined(CONFIG_USER_ONLY)
4347
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4350
if (unlikely(!ctx->mem_idx)) {
4351
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4354
t0 = tcg_const_tl(SR(ctx->opcode));
4355
gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4361
static void gen_mtsrin(DisasContext *ctx)
4363
#if defined(CONFIG_USER_ONLY)
4364
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4367
if (unlikely(!ctx->mem_idx)) {
4368
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4371
t0 = tcg_temp_new();
4372
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4373
tcg_gen_andi_tl(t0, t0, 0xF);
4374
gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
4379
#if defined(TARGET_PPC64)
4380
/* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4383
static void gen_mfsr_64b(DisasContext *ctx)
4385
#if defined(CONFIG_USER_ONLY)
4386
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4389
if (unlikely(!ctx->mem_idx)) {
4390
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4393
t0 = tcg_const_tl(SR(ctx->opcode));
4394
gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4400
static void gen_mfsrin_64b(DisasContext *ctx)
4402
#if defined(CONFIG_USER_ONLY)
4403
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4406
if (unlikely(!ctx->mem_idx)) {
4407
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4410
t0 = tcg_temp_new();
4411
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4412
tcg_gen_andi_tl(t0, t0, 0xF);
4413
gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4419
static void gen_mtsr_64b(DisasContext *ctx)
4421
#if defined(CONFIG_USER_ONLY)
4422
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4425
if (unlikely(!ctx->mem_idx)) {
4426
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4429
t0 = tcg_const_tl(SR(ctx->opcode));
4430
gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4436
static void gen_mtsrin_64b(DisasContext *ctx)
4438
#if defined(CONFIG_USER_ONLY)
4439
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4442
if (unlikely(!ctx->mem_idx)) {
4443
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4446
t0 = tcg_temp_new();
4447
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4448
tcg_gen_andi_tl(t0, t0, 0xF);
4449
gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4455
static void gen_slbmte(DisasContext *ctx)
4457
#if defined(CONFIG_USER_ONLY)
4458
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4460
if (unlikely(!ctx->mem_idx)) {
4461
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4464
gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)],
4465
cpu_gpr[rS(ctx->opcode)]);
4469
static void gen_slbmfee(DisasContext *ctx)
4471
#if defined(CONFIG_USER_ONLY)
4472
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4474
if (unlikely(!ctx->mem_idx)) {
4475
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4478
gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env,
4479
cpu_gpr[rB(ctx->opcode)]);
4483
static void gen_slbmfev(DisasContext *ctx)
4485
#if defined(CONFIG_USER_ONLY)
4486
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4488
if (unlikely(!ctx->mem_idx)) {
4489
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4492
gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
4493
cpu_gpr[rB(ctx->opcode)]);
4496
#endif /* defined(TARGET_PPC64) */
4498
/*** Lookaside buffer management ***/
4499
/* Optional & mem_idx only: */
4502
static void gen_tlbia(DisasContext *ctx)
4504
#if defined(CONFIG_USER_ONLY)
4505
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4507
if (unlikely(!ctx->mem_idx)) {
4508
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4511
gen_helper_tlbia(cpu_env);
4516
static void gen_tlbiel(DisasContext *ctx)
4518
#if defined(CONFIG_USER_ONLY)
4519
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4521
if (unlikely(!ctx->mem_idx)) {
4522
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4525
gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4530
static void gen_tlbie(DisasContext *ctx)
4532
#if defined(CONFIG_USER_ONLY)
4533
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4535
if (unlikely(!ctx->mem_idx)) {
4536
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4539
if (NARROW_MODE(ctx)) {
4540
TCGv t0 = tcg_temp_new();
4541
tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
4542
gen_helper_tlbie(cpu_env, t0);
4545
gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4551
static void gen_tlbsync(DisasContext *ctx)
4553
#if defined(CONFIG_USER_ONLY)
4554
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4556
if (unlikely(!ctx->mem_idx)) {
4557
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4560
/* This has no effect: it should ensure that all previous
4561
* tlbie have completed
4563
gen_stop_exception(ctx);
4567
#if defined(TARGET_PPC64)
4569
static void gen_slbia(DisasContext *ctx)
4571
#if defined(CONFIG_USER_ONLY)
4572
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4574
if (unlikely(!ctx->mem_idx)) {
4575
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4578
gen_helper_slbia(cpu_env);
4583
static void gen_slbie(DisasContext *ctx)
4585
#if defined(CONFIG_USER_ONLY)
4586
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4588
if (unlikely(!ctx->mem_idx)) {
4589
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4592
gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4597
/*** External control ***/
4601
static void gen_eciwx(DisasContext *ctx)
4604
/* Should check EAR[E] ! */
4605
gen_set_access_type(ctx, ACCESS_EXT);
4606
t0 = tcg_temp_new();
4607
gen_addr_reg_index(ctx, t0);
4608
gen_check_align(ctx, t0, 0x03);
4609
gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4614
static void gen_ecowx(DisasContext *ctx)
4617
/* Should check EAR[E] ! */
4618
gen_set_access_type(ctx, ACCESS_EXT);
4619
t0 = tcg_temp_new();
4620
gen_addr_reg_index(ctx, t0);
4621
gen_check_align(ctx, t0, 0x03);
4622
gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4626
/* PowerPC 601 specific instructions */
4629
static void gen_abs(DisasContext *ctx)
4631
int l1 = gen_new_label();
4632
int l2 = gen_new_label();
4633
tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1);
4634
tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4637
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4639
if (unlikely(Rc(ctx->opcode) != 0))
4640
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4644
static void gen_abso(DisasContext *ctx)
4646
int l1 = gen_new_label();
4647
int l2 = gen_new_label();
4648
int l3 = gen_new_label();
4649
/* Start with XER OV disabled, the most likely case */
4650
tcg_gen_movi_tl(cpu_ov, 0);
4651
tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2);
4652
tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1);
4653
tcg_gen_movi_tl(cpu_ov, 1);
4654
tcg_gen_movi_tl(cpu_so, 1);
4657
tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4660
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4662
if (unlikely(Rc(ctx->opcode) != 0))
4663
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4667
static void gen_clcs(DisasContext *ctx)
4669
TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
4670
gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4671
tcg_temp_free_i32(t0);
4672
/* Rc=1 sets CR0 to an undefined state */
4676
static void gen_div(DisasContext *ctx)
4678
gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4679
cpu_gpr[rB(ctx->opcode)]);
4680
if (unlikely(Rc(ctx->opcode) != 0))
4681
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4685
static void gen_divo(DisasContext *ctx)
4687
gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4688
cpu_gpr[rB(ctx->opcode)]);
4689
if (unlikely(Rc(ctx->opcode) != 0))
4690
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4694
static void gen_divs(DisasContext *ctx)
4696
gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4697
cpu_gpr[rB(ctx->opcode)]);
4698
if (unlikely(Rc(ctx->opcode) != 0))
4699
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4702
/* divso - divso. */
4703
static void gen_divso(DisasContext *ctx)
4705
gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env,
4706
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4707
if (unlikely(Rc(ctx->opcode) != 0))
4708
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4712
static void gen_doz(DisasContext *ctx)
4714
int l1 = gen_new_label();
4715
int l2 = gen_new_label();
4716
tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4717
tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4720
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4722
if (unlikely(Rc(ctx->opcode) != 0))
4723
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4727
static void gen_dozo(DisasContext *ctx)
4729
int l1 = gen_new_label();
4730
int l2 = gen_new_label();
4731
TCGv t0 = tcg_temp_new();
4732
TCGv t1 = tcg_temp_new();
4733
TCGv t2 = tcg_temp_new();
4734
/* Start with XER OV disabled, the most likely case */
4735
tcg_gen_movi_tl(cpu_ov, 0);
4736
tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4737
tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4738
tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4739
tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
4740
tcg_gen_andc_tl(t1, t1, t2);
4741
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
4742
tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4743
tcg_gen_movi_tl(cpu_ov, 1);
4744
tcg_gen_movi_tl(cpu_so, 1);
4747
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4752
if (unlikely(Rc(ctx->opcode) != 0))
4753
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4757
static void gen_dozi(DisasContext *ctx)
4759
target_long simm = SIMM(ctx->opcode);
4760
int l1 = gen_new_label();
4761
int l2 = gen_new_label();
4762
tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
4763
tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
4766
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4768
if (unlikely(Rc(ctx->opcode) != 0))
4769
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4772
/* lscbx - lscbx. */
4773
static void gen_lscbx(DisasContext *ctx)
4775
TCGv t0 = tcg_temp_new();
4776
TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
4777
TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
4778
TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
4780
gen_addr_reg_index(ctx, t0);
4781
/* NIP cannot be restored if the memory exception comes from an helper */
4782
gen_update_nip(ctx, ctx->nip - 4);
4783
gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3);
4784
tcg_temp_free_i32(t1);
4785
tcg_temp_free_i32(t2);
4786
tcg_temp_free_i32(t3);
4787
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
4788
tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
4789
if (unlikely(Rc(ctx->opcode) != 0))
4790
gen_set_Rc0(ctx, t0);
4794
/* maskg - maskg. */
4795
static void gen_maskg(DisasContext *ctx)
4797
int l1 = gen_new_label();
4798
TCGv t0 = tcg_temp_new();
4799
TCGv t1 = tcg_temp_new();
4800
TCGv t2 = tcg_temp_new();
4801
TCGv t3 = tcg_temp_new();
4802
tcg_gen_movi_tl(t3, 0xFFFFFFFF);
4803
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4804
tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
4805
tcg_gen_addi_tl(t2, t0, 1);
4806
tcg_gen_shr_tl(t2, t3, t2);
4807
tcg_gen_shr_tl(t3, t3, t1);
4808
tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
4809
tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
4810
tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4816
if (unlikely(Rc(ctx->opcode) != 0))
4817
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4820
/* maskir - maskir. */
4821
static void gen_maskir(DisasContext *ctx)
4823
TCGv t0 = tcg_temp_new();
4824
TCGv t1 = tcg_temp_new();
4825
tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4826
tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4827
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4830
if (unlikely(Rc(ctx->opcode) != 0))
4831
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4835
static void gen_mul(DisasContext *ctx)
4837
TCGv_i64 t0 = tcg_temp_new_i64();
4838
TCGv_i64 t1 = tcg_temp_new_i64();
4839
TCGv t2 = tcg_temp_new();
4840
tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4841
tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4842
tcg_gen_mul_i64(t0, t0, t1);
4843
tcg_gen_trunc_i64_tl(t2, t0);
4844
gen_store_spr(SPR_MQ, t2);
4845
tcg_gen_shri_i64(t1, t0, 32);
4846
tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4847
tcg_temp_free_i64(t0);
4848
tcg_temp_free_i64(t1);
4850
if (unlikely(Rc(ctx->opcode) != 0))
4851
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4855
static void gen_mulo(DisasContext *ctx)
4857
int l1 = gen_new_label();
4858
TCGv_i64 t0 = tcg_temp_new_i64();
4859
TCGv_i64 t1 = tcg_temp_new_i64();
4860
TCGv t2 = tcg_temp_new();
4861
/* Start with XER OV disabled, the most likely case */
4862
tcg_gen_movi_tl(cpu_ov, 0);
4863
tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4864
tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4865
tcg_gen_mul_i64(t0, t0, t1);
4866
tcg_gen_trunc_i64_tl(t2, t0);
4867
gen_store_spr(SPR_MQ, t2);
4868
tcg_gen_shri_i64(t1, t0, 32);
4869
tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4870
tcg_gen_ext32s_i64(t1, t0);
4871
tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
4872
tcg_gen_movi_tl(cpu_ov, 1);
4873
tcg_gen_movi_tl(cpu_so, 1);
4875
tcg_temp_free_i64(t0);
4876
tcg_temp_free_i64(t1);
4878
if (unlikely(Rc(ctx->opcode) != 0))
4879
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4883
static void gen_nabs(DisasContext *ctx)
4885
int l1 = gen_new_label();
4886
int l2 = gen_new_label();
4887
tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4888
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4891
tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4893
if (unlikely(Rc(ctx->opcode) != 0))
4894
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4897
/* nabso - nabso. */
4898
static void gen_nabso(DisasContext *ctx)
4900
int l1 = gen_new_label();
4901
int l2 = gen_new_label();
4902
tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4903
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4906
tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4908
/* nabs never overflows */
4909
tcg_gen_movi_tl(cpu_ov, 0);
4910
if (unlikely(Rc(ctx->opcode) != 0))
4911
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4915
static void gen_rlmi(DisasContext *ctx)
4917
uint32_t mb = MB(ctx->opcode);
4918
uint32_t me = ME(ctx->opcode);
4919
TCGv t0 = tcg_temp_new();
4920
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4921
tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4922
tcg_gen_andi_tl(t0, t0, MASK(mb, me));
4923
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me));
4924
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
4926
if (unlikely(Rc(ctx->opcode) != 0))
4927
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4931
static void gen_rrib(DisasContext *ctx)
4933
TCGv t0 = tcg_temp_new();
4934
TCGv t1 = tcg_temp_new();
4935
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4936
tcg_gen_movi_tl(t1, 0x80000000);
4937
tcg_gen_shr_tl(t1, t1, t0);
4938
tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4939
tcg_gen_and_tl(t0, t0, t1);
4940
tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
4941
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4944
if (unlikely(Rc(ctx->opcode) != 0))
4945
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4949
static void gen_sle(DisasContext *ctx)
4951
TCGv t0 = tcg_temp_new();
4952
TCGv t1 = tcg_temp_new();
4953
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4954
tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4955
tcg_gen_subfi_tl(t1, 32, t1);
4956
tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4957
tcg_gen_or_tl(t1, t0, t1);
4958
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4959
gen_store_spr(SPR_MQ, t1);
4962
if (unlikely(Rc(ctx->opcode) != 0))
4963
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4967
static void gen_sleq(DisasContext *ctx)
4969
TCGv t0 = tcg_temp_new();
4970
TCGv t1 = tcg_temp_new();
4971
TCGv t2 = tcg_temp_new();
4972
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4973
tcg_gen_movi_tl(t2, 0xFFFFFFFF);
4974
tcg_gen_shl_tl(t2, t2, t0);
4975
tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4976
gen_load_spr(t1, SPR_MQ);
4977
gen_store_spr(SPR_MQ, t0);
4978
tcg_gen_and_tl(t0, t0, t2);
4979
tcg_gen_andc_tl(t1, t1, t2);
4980
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4984
if (unlikely(Rc(ctx->opcode) != 0))
4985
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4989
static void gen_sliq(DisasContext *ctx)
4991
int sh = SH(ctx->opcode);
4992
TCGv t0 = tcg_temp_new();
4993
TCGv t1 = tcg_temp_new();
4994
tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4995
tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4996
tcg_gen_or_tl(t1, t0, t1);
4997
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4998
gen_store_spr(SPR_MQ, t1);
5001
if (unlikely(Rc(ctx->opcode) != 0))
5002
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5005
/* slliq - slliq. */
5006
static void gen_slliq(DisasContext *ctx)
5008
int sh = SH(ctx->opcode);
5009
TCGv t0 = tcg_temp_new();
5010
TCGv t1 = tcg_temp_new();
5011
tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5012
gen_load_spr(t1, SPR_MQ);
5013
gen_store_spr(SPR_MQ, t0);
5014
tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
5015
tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
5016
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5019
if (unlikely(Rc(ctx->opcode) != 0))
5020
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5024
static void gen_sllq(DisasContext *ctx)
5026
int l1 = gen_new_label();
5027
int l2 = gen_new_label();
5028
TCGv t0 = tcg_temp_local_new();
5029
TCGv t1 = tcg_temp_local_new();
5030
TCGv t2 = tcg_temp_local_new();
5031
tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5032
tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5033
tcg_gen_shl_tl(t1, t1, t2);
5034
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5035
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5036
gen_load_spr(t0, SPR_MQ);
5037
tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5040
tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5041
gen_load_spr(t2, SPR_MQ);
5042
tcg_gen_andc_tl(t1, t2, t1);
5043
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5048
if (unlikely(Rc(ctx->opcode) != 0))
5049
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5053
static void gen_slq(DisasContext *ctx)
5055
int l1 = gen_new_label();
5056
TCGv t0 = tcg_temp_new();
5057
TCGv t1 = tcg_temp_new();
5058
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5059
tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5060
tcg_gen_subfi_tl(t1, 32, t1);
5061
tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5062
tcg_gen_or_tl(t1, t0, t1);
5063
gen_store_spr(SPR_MQ, t1);
5064
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5065
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5066
tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
5067
tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5071
if (unlikely(Rc(ctx->opcode) != 0))
5072
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5075
/* sraiq - sraiq. */
5076
static void gen_sraiq(DisasContext *ctx)
5078
int sh = SH(ctx->opcode);
5079
int l1 = gen_new_label();
5080
TCGv t0 = tcg_temp_new();
5081
TCGv t1 = tcg_temp_new();
5082
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5083
tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
5084
tcg_gen_or_tl(t0, t0, t1);
5085
gen_store_spr(SPR_MQ, t0);
5086
tcg_gen_movi_tl(cpu_ca, 0);
5087
tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
5088
tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
5089
tcg_gen_movi_tl(cpu_ca, 1);
5091
tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
5094
if (unlikely(Rc(ctx->opcode) != 0))
5095
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5099
static void gen_sraq(DisasContext *ctx)
5101
int l1 = gen_new_label();
5102
int l2 = gen_new_label();
5103
TCGv t0 = tcg_temp_new();
5104
TCGv t1 = tcg_temp_local_new();
5105
TCGv t2 = tcg_temp_local_new();
5106
tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5107
tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5108
tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
5109
tcg_gen_subfi_tl(t2, 32, t2);
5110
tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
5111
tcg_gen_or_tl(t0, t0, t2);
5112
gen_store_spr(SPR_MQ, t0);
5113
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5114
tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
5115
tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
5116
tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
5119
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
5120
tcg_gen_movi_tl(cpu_ca, 0);
5121
tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
5122
tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
5123
tcg_gen_movi_tl(cpu_ca, 1);
5127
if (unlikely(Rc(ctx->opcode) != 0))
5128
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5132
static void gen_sre(DisasContext *ctx)
5134
TCGv t0 = tcg_temp_new();
5135
TCGv t1 = tcg_temp_new();
5136
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5137
tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5138
tcg_gen_subfi_tl(t1, 32, t1);
5139
tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5140
tcg_gen_or_tl(t1, t0, t1);
5141
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5142
gen_store_spr(SPR_MQ, t1);
5145
if (unlikely(Rc(ctx->opcode) != 0))
5146
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5150
static void gen_srea(DisasContext *ctx)
5152
TCGv t0 = tcg_temp_new();
5153
TCGv t1 = tcg_temp_new();
5154
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5155
tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5156
gen_store_spr(SPR_MQ, t0);
5157
tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
5160
if (unlikely(Rc(ctx->opcode) != 0))
5161
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5165
static void gen_sreq(DisasContext *ctx)
5167
TCGv t0 = tcg_temp_new();
5168
TCGv t1 = tcg_temp_new();
5169
TCGv t2 = tcg_temp_new();
5170
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
5171
tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5172
tcg_gen_shr_tl(t1, t1, t0);
5173
tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
5174
gen_load_spr(t2, SPR_MQ);
5175
gen_store_spr(SPR_MQ, t0);
5176
tcg_gen_and_tl(t0, t0, t1);
5177
tcg_gen_andc_tl(t2, t2, t1);
5178
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5182
if (unlikely(Rc(ctx->opcode) != 0))
5183
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5187
static void gen_sriq(DisasContext *ctx)
5189
int sh = SH(ctx->opcode);
5190
TCGv t0 = tcg_temp_new();
5191
TCGv t1 = tcg_temp_new();
5192
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5193
tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
5194
tcg_gen_or_tl(t1, t0, t1);
5195
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5196
gen_store_spr(SPR_MQ, t1);
5199
if (unlikely(Rc(ctx->opcode) != 0))
5200
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5204
static void gen_srliq(DisasContext *ctx)
5206
int sh = SH(ctx->opcode);
5207
TCGv t0 = tcg_temp_new();
5208
TCGv t1 = tcg_temp_new();
5209
tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5210
gen_load_spr(t1, SPR_MQ);
5211
gen_store_spr(SPR_MQ, t0);
5212
tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
5213
tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
5214
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5217
if (unlikely(Rc(ctx->opcode) != 0))
5218
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5222
static void gen_srlq(DisasContext *ctx)
5224
int l1 = gen_new_label();
5225
int l2 = gen_new_label();
5226
TCGv t0 = tcg_temp_local_new();
5227
TCGv t1 = tcg_temp_local_new();
5228
TCGv t2 = tcg_temp_local_new();
5229
tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5230
tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5231
tcg_gen_shr_tl(t2, t1, t2);
5232
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5233
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5234
gen_load_spr(t0, SPR_MQ);
5235
tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5238
tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5239
tcg_gen_and_tl(t0, t0, t2);
5240
gen_load_spr(t1, SPR_MQ);
5241
tcg_gen_andc_tl(t1, t1, t2);
5242
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5247
if (unlikely(Rc(ctx->opcode) != 0))
5248
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5252
static void gen_srq(DisasContext *ctx)
5254
int l1 = gen_new_label();
5255
TCGv t0 = tcg_temp_new();
5256
TCGv t1 = tcg_temp_new();
5257
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5258
tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5259
tcg_gen_subfi_tl(t1, 32, t1);
5260
tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5261
tcg_gen_or_tl(t1, t0, t1);
5262
gen_store_spr(SPR_MQ, t1);
5263
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5264
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5265
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5266
tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5270
if (unlikely(Rc(ctx->opcode) != 0))
5271
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5274
/* PowerPC 602 specific instructions */
5277
static void gen_dsa(DisasContext *ctx)
5280
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5284
static void gen_esa(DisasContext *ctx)
5287
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5291
static void gen_mfrom(DisasContext *ctx)
5293
#if defined(CONFIG_USER_ONLY)
5294
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5296
if (unlikely(!ctx->mem_idx)) {
5297
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5300
gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5304
/* 602 - 603 - G2 TLB management */
5307
static void gen_tlbld_6xx(DisasContext *ctx)
5309
#if defined(CONFIG_USER_ONLY)
5310
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5312
if (unlikely(!ctx->mem_idx)) {
5313
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5316
gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5321
static void gen_tlbli_6xx(DisasContext *ctx)
5323
#if defined(CONFIG_USER_ONLY)
5324
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5326
if (unlikely(!ctx->mem_idx)) {
5327
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5330
gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5334
/* 74xx TLB management */
5337
static void gen_tlbld_74xx(DisasContext *ctx)
5339
#if defined(CONFIG_USER_ONLY)
5340
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5342
if (unlikely(!ctx->mem_idx)) {
5343
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5346
gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5351
static void gen_tlbli_74xx(DisasContext *ctx)
5353
#if defined(CONFIG_USER_ONLY)
5354
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5356
if (unlikely(!ctx->mem_idx)) {
5357
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5360
gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5364
/* POWER instructions not in PowerPC 601 */
5367
static void gen_clf(DisasContext *ctx)
5369
/* Cache line flush: implemented as no-op */
5373
static void gen_cli(DisasContext *ctx)
5375
/* Cache line invalidate: privileged and treated as no-op */
5376
#if defined(CONFIG_USER_ONLY)
5377
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5379
if (unlikely(!ctx->mem_idx)) {
5380
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5387
static void gen_dclst(DisasContext *ctx)
5389
/* Data cache line store: treated as no-op */
5392
static void gen_mfsri(DisasContext *ctx)
5394
#if defined(CONFIG_USER_ONLY)
5395
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5397
int ra = rA(ctx->opcode);
5398
int rd = rD(ctx->opcode);
5400
if (unlikely(!ctx->mem_idx)) {
5401
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5404
t0 = tcg_temp_new();
5405
gen_addr_reg_index(ctx, t0);
5406
tcg_gen_shri_tl(t0, t0, 28);
5407
tcg_gen_andi_tl(t0, t0, 0xF);
5408
gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0);
5410
if (ra != 0 && ra != rd)
5411
tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
5415
static void gen_rac(DisasContext *ctx)
5417
#if defined(CONFIG_USER_ONLY)
5418
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5421
if (unlikely(!ctx->mem_idx)) {
5422
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5425
t0 = tcg_temp_new();
5426
gen_addr_reg_index(ctx, t0);
5427
gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5432
static void gen_rfsvc(DisasContext *ctx)
5434
#if defined(CONFIG_USER_ONLY)
5435
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5437
if (unlikely(!ctx->mem_idx)) {
5438
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5441
gen_helper_rfsvc(cpu_env);
5442
gen_sync_exception(ctx);
5446
/* svc is not implemented for now */
5448
/* POWER2 specific instructions */
5449
/* Quad manipulation (load/store two floats at a time) */
5452
static void gen_lfq(DisasContext *ctx)
5454
int rd = rD(ctx->opcode);
5456
gen_set_access_type(ctx, ACCESS_FLOAT);
5457
t0 = tcg_temp_new();
5458
gen_addr_imm_index(ctx, t0, 0);
5459
gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5460
gen_addr_add(ctx, t0, t0, 8);
5461
gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5466
static void gen_lfqu(DisasContext *ctx)
5468
int ra = rA(ctx->opcode);
5469
int rd = rD(ctx->opcode);
5471
gen_set_access_type(ctx, ACCESS_FLOAT);
5472
t0 = tcg_temp_new();
5473
t1 = tcg_temp_new();
5474
gen_addr_imm_index(ctx, t0, 0);
5475
gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5476
gen_addr_add(ctx, t1, t0, 8);
5477
gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5479
tcg_gen_mov_tl(cpu_gpr[ra], t0);
5485
static void gen_lfqux(DisasContext *ctx)
5487
int ra = rA(ctx->opcode);
5488
int rd = rD(ctx->opcode);
5489
gen_set_access_type(ctx, ACCESS_FLOAT);
5491
t0 = tcg_temp_new();
5492
gen_addr_reg_index(ctx, t0);
5493
gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5494
t1 = tcg_temp_new();
5495
gen_addr_add(ctx, t1, t0, 8);
5496
gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5499
tcg_gen_mov_tl(cpu_gpr[ra], t0);
5504
static void gen_lfqx(DisasContext *ctx)
5506
int rd = rD(ctx->opcode);
5508
gen_set_access_type(ctx, ACCESS_FLOAT);
5509
t0 = tcg_temp_new();
5510
gen_addr_reg_index(ctx, t0);
5511
gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5512
gen_addr_add(ctx, t0, t0, 8);
5513
gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5518
static void gen_stfq(DisasContext *ctx)
5520
int rd = rD(ctx->opcode);
5522
gen_set_access_type(ctx, ACCESS_FLOAT);
5523
t0 = tcg_temp_new();
5524
gen_addr_imm_index(ctx, t0, 0);
5525
gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5526
gen_addr_add(ctx, t0, t0, 8);
5527
gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5532
static void gen_stfqu(DisasContext *ctx)
5534
int ra = rA(ctx->opcode);
5535
int rd = rD(ctx->opcode);
5537
gen_set_access_type(ctx, ACCESS_FLOAT);
5538
t0 = tcg_temp_new();
5539
gen_addr_imm_index(ctx, t0, 0);
5540
gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5541
t1 = tcg_temp_new();
5542
gen_addr_add(ctx, t1, t0, 8);
5543
gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5546
tcg_gen_mov_tl(cpu_gpr[ra], t0);
5551
static void gen_stfqux(DisasContext *ctx)
5553
int ra = rA(ctx->opcode);
5554
int rd = rD(ctx->opcode);
5556
gen_set_access_type(ctx, ACCESS_FLOAT);
5557
t0 = tcg_temp_new();
5558
gen_addr_reg_index(ctx, t0);
5559
gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5560
t1 = tcg_temp_new();
5561
gen_addr_add(ctx, t1, t0, 8);
5562
gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5565
tcg_gen_mov_tl(cpu_gpr[ra], t0);
5570
static void gen_stfqx(DisasContext *ctx)
5572
int rd = rD(ctx->opcode);
5574
gen_set_access_type(ctx, ACCESS_FLOAT);
5575
t0 = tcg_temp_new();
5576
gen_addr_reg_index(ctx, t0);
5577
gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5578
gen_addr_add(ctx, t0, t0, 8);
5579
gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5583
/* BookE specific instructions */
5585
/* XXX: not implemented on 440 ? */
5586
static void gen_mfapidi(DisasContext *ctx)
5589
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5592
/* XXX: not implemented on 440 ? */
5593
static void gen_tlbiva(DisasContext *ctx)
5595
#if defined(CONFIG_USER_ONLY)
5596
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5599
if (unlikely(!ctx->mem_idx)) {
5600
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5603
t0 = tcg_temp_new();
5604
gen_addr_reg_index(ctx, t0);
5605
gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5610
/* All 405 MAC instructions are translated here */
5611
static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5612
int ra, int rb, int rt, int Rc)
5616
t0 = tcg_temp_local_new();
5617
t1 = tcg_temp_local_new();
5619
switch (opc3 & 0x0D) {
5621
/* macchw - macchw. - macchwo - macchwo. */
5622
/* macchws - macchws. - macchwso - macchwso. */
5623
/* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
5624
/* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
5625
/* mulchw - mulchw. */
5626
tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5627
tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5628
tcg_gen_ext16s_tl(t1, t1);
5631
/* macchwu - macchwu. - macchwuo - macchwuo. */
5632
/* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
5633
/* mulchwu - mulchwu. */
5634
tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5635
tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5636
tcg_gen_ext16u_tl(t1, t1);
5639
/* machhw - machhw. - machhwo - machhwo. */
5640
/* machhws - machhws. - machhwso - machhwso. */
5641
/* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
5642
/* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
5643
/* mulhhw - mulhhw. */
5644
tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5645
tcg_gen_ext16s_tl(t0, t0);
5646
tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5647
tcg_gen_ext16s_tl(t1, t1);
5650
/* machhwu - machhwu. - machhwuo - machhwuo. */
5651
/* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
5652
/* mulhhwu - mulhhwu. */
5653
tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5654
tcg_gen_ext16u_tl(t0, t0);
5655
tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5656
tcg_gen_ext16u_tl(t1, t1);
5659
/* maclhw - maclhw. - maclhwo - maclhwo. */
5660
/* maclhws - maclhws. - maclhwso - maclhwso. */
5661
/* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
5662
/* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
5663
/* mullhw - mullhw. */
5664
tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5665
tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5668
/* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
5669
/* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
5670
/* mullhwu - mullhwu. */
5671
tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5672
tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5676
/* (n)multiply-and-accumulate (0x0C / 0x0E) */
5677
tcg_gen_mul_tl(t1, t0, t1);
5679
/* nmultiply-and-accumulate (0x0E) */
5680
tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5682
/* multiply-and-accumulate (0x0C) */
5683
tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5687
/* Check overflow and/or saturate */
5688
int l1 = gen_new_label();
5691
/* Start with XER OV disabled, the most likely case */
5692
tcg_gen_movi_tl(cpu_ov, 0);
5696
tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5697
tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5698
tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5699
tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5702
tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5703
tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5707
tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5710
tcg_gen_movi_tl(t0, UINT32_MAX);
5714
/* Check overflow */
5715
tcg_gen_movi_tl(cpu_ov, 1);
5716
tcg_gen_movi_tl(cpu_so, 1);
5719
tcg_gen_mov_tl(cpu_gpr[rt], t0);
5722
tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5726
if (unlikely(Rc) != 0) {
5728
gen_set_Rc0(ctx, cpu_gpr[rt]);
5732
#define GEN_MAC_HANDLER(name, opc2, opc3) \
5733
static void glue(gen_, name)(DisasContext *ctx) \
5735
gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
5736
rD(ctx->opcode), Rc(ctx->opcode)); \
5739
/* macchw - macchw. */
5740
GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5741
/* macchwo - macchwo. */
5742
GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5743
/* macchws - macchws. */
5744
GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5745
/* macchwso - macchwso. */
5746
GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5747
/* macchwsu - macchwsu. */
5748
GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5749
/* macchwsuo - macchwsuo. */
5750
GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5751
/* macchwu - macchwu. */
5752
GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5753
/* macchwuo - macchwuo. */
5754
GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5755
/* machhw - machhw. */
5756
GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5757
/* machhwo - machhwo. */
5758
GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5759
/* machhws - machhws. */
5760
GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5761
/* machhwso - machhwso. */
5762
GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5763
/* machhwsu - machhwsu. */
5764
GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5765
/* machhwsuo - machhwsuo. */
5766
GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5767
/* machhwu - machhwu. */
5768
GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5769
/* machhwuo - machhwuo. */
5770
GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5771
/* maclhw - maclhw. */
5772
GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5773
/* maclhwo - maclhwo. */
5774
GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5775
/* maclhws - maclhws. */
5776
GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5777
/* maclhwso - maclhwso. */
5778
GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5779
/* maclhwu - maclhwu. */
5780
GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5781
/* maclhwuo - maclhwuo. */
5782
GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5783
/* maclhwsu - maclhwsu. */
5784
GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5785
/* maclhwsuo - maclhwsuo. */
5786
GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5787
/* nmacchw - nmacchw. */
5788
GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5789
/* nmacchwo - nmacchwo. */
5790
GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5791
/* nmacchws - nmacchws. */
5792
GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5793
/* nmacchwso - nmacchwso. */
5794
GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5795
/* nmachhw - nmachhw. */
5796
GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5797
/* nmachhwo - nmachhwo. */
5798
GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5799
/* nmachhws - nmachhws. */
5800
GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5801
/* nmachhwso - nmachhwso. */
5802
GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5803
/* nmaclhw - nmaclhw. */
5804
GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5805
/* nmaclhwo - nmaclhwo. */
5806
GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5807
/* nmaclhws - nmaclhws. */
5808
GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5809
/* nmaclhwso - nmaclhwso. */
5810
GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5812
/* mulchw - mulchw. */
5813
GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5814
/* mulchwu - mulchwu. */
5815
GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5816
/* mulhhw - mulhhw. */
5817
GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5818
/* mulhhwu - mulhhwu. */
5819
GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5820
/* mullhw - mullhw. */
5821
GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5822
/* mullhwu - mullhwu. */
5823
GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5826
static void gen_mfdcr(DisasContext *ctx)
5828
#if defined(CONFIG_USER_ONLY)
5829
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5832
if (unlikely(!ctx->mem_idx)) {
5833
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5836
/* NIP cannot be restored if the memory exception comes from an helper */
5837
gen_update_nip(ctx, ctx->nip - 4);
5838
dcrn = tcg_const_tl(SPR(ctx->opcode));
5839
gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
5840
tcg_temp_free(dcrn);
5845
static void gen_mtdcr(DisasContext *ctx)
5847
#if defined(CONFIG_USER_ONLY)
5848
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5851
if (unlikely(!ctx->mem_idx)) {
5852
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5855
/* NIP cannot be restored if the memory exception comes from an helper */
5856
gen_update_nip(ctx, ctx->nip - 4);
5857
dcrn = tcg_const_tl(SPR(ctx->opcode));
5858
gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5859
tcg_temp_free(dcrn);
5864
/* XXX: not implemented on 440 ? */
5865
static void gen_mfdcrx(DisasContext *ctx)
5867
#if defined(CONFIG_USER_ONLY)
5868
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5870
if (unlikely(!ctx->mem_idx)) {
5871
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5874
/* NIP cannot be restored if the memory exception comes from an helper */
5875
gen_update_nip(ctx, ctx->nip - 4);
5876
gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5877
cpu_gpr[rA(ctx->opcode)]);
5878
/* Note: Rc update flag set leads to undefined state of Rc0 */
5883
/* XXX: not implemented on 440 ? */
5884
static void gen_mtdcrx(DisasContext *ctx)
5886
#if defined(CONFIG_USER_ONLY)
5887
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5889
if (unlikely(!ctx->mem_idx)) {
5890
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5893
/* NIP cannot be restored if the memory exception comes from an helper */
5894
gen_update_nip(ctx, ctx->nip - 4);
5895
gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5896
cpu_gpr[rS(ctx->opcode)]);
5897
/* Note: Rc update flag set leads to undefined state of Rc0 */
5901
/* mfdcrux (PPC 460) : user-mode access to DCR */
5902
static void gen_mfdcrux(DisasContext *ctx)
5904
/* NIP cannot be restored if the memory exception comes from an helper */
5905
gen_update_nip(ctx, ctx->nip - 4);
5906
gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5907
cpu_gpr[rA(ctx->opcode)]);
5908
/* Note: Rc update flag set leads to undefined state of Rc0 */
5911
/* mtdcrux (PPC 460) : user-mode access to DCR */
5912
static void gen_mtdcrux(DisasContext *ctx)
5914
/* NIP cannot be restored if the memory exception comes from an helper */
5915
gen_update_nip(ctx, ctx->nip - 4);
5916
gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5917
cpu_gpr[rS(ctx->opcode)]);
5918
/* Note: Rc update flag set leads to undefined state of Rc0 */
5922
static void gen_dccci(DisasContext *ctx)
5924
#if defined(CONFIG_USER_ONLY)
5925
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5927
if (unlikely(!ctx->mem_idx)) {
5928
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5931
/* interpreted as no-op */
5936
static void gen_dcread(DisasContext *ctx)
5938
#if defined(CONFIG_USER_ONLY)
5939
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5942
if (unlikely(!ctx->mem_idx)) {
5943
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5946
gen_set_access_type(ctx, ACCESS_CACHE);
5947
EA = tcg_temp_new();
5948
gen_addr_reg_index(ctx, EA);
5949
val = tcg_temp_new();
5950
gen_qemu_ld32u(ctx, val, EA);
5952
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5958
static void gen_icbt_40x(DisasContext *ctx)
5960
/* interpreted as no-op */
5961
/* XXX: specification say this is treated as a load by the MMU
5962
* but does not generate any exception
5967
static void gen_iccci(DisasContext *ctx)
5969
#if defined(CONFIG_USER_ONLY)
5970
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5972
if (unlikely(!ctx->mem_idx)) {
5973
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5976
/* interpreted as no-op */
5981
static void gen_icread(DisasContext *ctx)
5983
#if defined(CONFIG_USER_ONLY)
5984
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5986
if (unlikely(!ctx->mem_idx)) {
5987
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5990
/* interpreted as no-op */
5994
/* rfci (mem_idx only) */
5995
static void gen_rfci_40x(DisasContext *ctx)
5997
#if defined(CONFIG_USER_ONLY)
5998
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6000
if (unlikely(!ctx->mem_idx)) {
6001
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6004
/* Restore CPU state */
6005
gen_helper_40x_rfci(cpu_env);
6006
gen_sync_exception(ctx);
6010
static void gen_rfci(DisasContext *ctx)
6012
#if defined(CONFIG_USER_ONLY)
6013
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6015
if (unlikely(!ctx->mem_idx)) {
6016
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6019
/* Restore CPU state */
6020
gen_helper_rfci(cpu_env);
6021
gen_sync_exception(ctx);
6025
/* BookE specific */
6027
/* XXX: not implemented on 440 ? */
6028
static void gen_rfdi(DisasContext *ctx)
6030
#if defined(CONFIG_USER_ONLY)
6031
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6033
if (unlikely(!ctx->mem_idx)) {
6034
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6037
/* Restore CPU state */
6038
gen_helper_rfdi(cpu_env);
6039
gen_sync_exception(ctx);
6043
/* XXX: not implemented on 440 ? */
6044
static void gen_rfmci(DisasContext *ctx)
6046
#if defined(CONFIG_USER_ONLY)
6047
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6049
if (unlikely(!ctx->mem_idx)) {
6050
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6053
/* Restore CPU state */
6054
gen_helper_rfmci(cpu_env);
6055
gen_sync_exception(ctx);
6059
/* TLB management - PowerPC 405 implementation */
6062
static void gen_tlbre_40x(DisasContext *ctx)
6064
#if defined(CONFIG_USER_ONLY)
6065
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6067
if (unlikely(!ctx->mem_idx)) {
6068
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6071
switch (rB(ctx->opcode)) {
6073
gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
6074
cpu_gpr[rA(ctx->opcode)]);
6077
gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
6078
cpu_gpr[rA(ctx->opcode)]);
6081
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6087
/* tlbsx - tlbsx. */
6088
static void gen_tlbsx_40x(DisasContext *ctx)
6090
#if defined(CONFIG_USER_ONLY)
6091
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6094
if (unlikely(!ctx->mem_idx)) {
6095
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6098
t0 = tcg_temp_new();
6099
gen_addr_reg_index(ctx, t0);
6100
gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6102
if (Rc(ctx->opcode)) {
6103
int l1 = gen_new_label();
6104
tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6105
tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6106
tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6113
static void gen_tlbwe_40x(DisasContext *ctx)
6115
#if defined(CONFIG_USER_ONLY)
6116
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6118
if (unlikely(!ctx->mem_idx)) {
6119
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6122
switch (rB(ctx->opcode)) {
6124
gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
6125
cpu_gpr[rS(ctx->opcode)]);
6128
gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
6129
cpu_gpr[rS(ctx->opcode)]);
6132
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6138
/* TLB management - PowerPC 440 implementation */
6141
static void gen_tlbre_440(DisasContext *ctx)
6143
#if defined(CONFIG_USER_ONLY)
6144
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6146
if (unlikely(!ctx->mem_idx)) {
6147
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6150
switch (rB(ctx->opcode)) {
6155
TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6156
gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
6157
t0, cpu_gpr[rA(ctx->opcode)]);
6158
tcg_temp_free_i32(t0);
6162
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6168
/* tlbsx - tlbsx. */
6169
static void gen_tlbsx_440(DisasContext *ctx)
6171
#if defined(CONFIG_USER_ONLY)
6172
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6175
if (unlikely(!ctx->mem_idx)) {
6176
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6179
t0 = tcg_temp_new();
6180
gen_addr_reg_index(ctx, t0);
6181
gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6183
if (Rc(ctx->opcode)) {
6184
int l1 = gen_new_label();
6185
tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6186
tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6187
tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6194
static void gen_tlbwe_440(DisasContext *ctx)
6196
#if defined(CONFIG_USER_ONLY)
6197
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6199
if (unlikely(!ctx->mem_idx)) {
6200
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6203
switch (rB(ctx->opcode)) {
6208
TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6209
gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
6210
cpu_gpr[rS(ctx->opcode)]);
6211
tcg_temp_free_i32(t0);
6215
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6221
/* TLB management - PowerPC BookE 2.06 implementation */
6224
static void gen_tlbre_booke206(DisasContext *ctx)
6226
#if defined(CONFIG_USER_ONLY)
6227
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6229
if (unlikely(!ctx->mem_idx)) {
6230
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6234
gen_helper_booke206_tlbre(cpu_env);
6238
/* tlbsx - tlbsx. */
6239
static void gen_tlbsx_booke206(DisasContext *ctx)
6241
#if defined(CONFIG_USER_ONLY)
6242
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6245
if (unlikely(!ctx->mem_idx)) {
6246
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6250
if (rA(ctx->opcode)) {
6251
t0 = tcg_temp_new();
6252
tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
6254
t0 = tcg_const_tl(0);
6257
tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
6258
gen_helper_booke206_tlbsx(cpu_env, t0);
6263
static void gen_tlbwe_booke206(DisasContext *ctx)
6265
#if defined(CONFIG_USER_ONLY)
6266
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6268
if (unlikely(!ctx->mem_idx)) {
6269
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6272
gen_update_nip(ctx, ctx->nip - 4);
6273
gen_helper_booke206_tlbwe(cpu_env);
6277
static void gen_tlbivax_booke206(DisasContext *ctx)
6279
#if defined(CONFIG_USER_ONLY)
6280
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6283
if (unlikely(!ctx->mem_idx)) {
6284
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6288
t0 = tcg_temp_new();
6289
gen_addr_reg_index(ctx, t0);
6291
gen_helper_booke206_tlbivax(cpu_env, t0);
6295
static void gen_tlbilx_booke206(DisasContext *ctx)
6297
#if defined(CONFIG_USER_ONLY)
6298
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6301
if (unlikely(!ctx->mem_idx)) {
6302
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6306
t0 = tcg_temp_new();
6307
gen_addr_reg_index(ctx, t0);
6309
switch((ctx->opcode >> 21) & 0x3) {
6311
gen_helper_booke206_tlbilx0(cpu_env, t0);
6314
gen_helper_booke206_tlbilx1(cpu_env, t0);
6317
gen_helper_booke206_tlbilx3(cpu_env, t0);
6320
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6330
static void gen_wrtee(DisasContext *ctx)
6332
#if defined(CONFIG_USER_ONLY)
6333
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6336
if (unlikely(!ctx->mem_idx)) {
6337
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6340
t0 = tcg_temp_new();
6341
tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6342
tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6343
tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6345
/* Stop translation to have a chance to raise an exception
6346
* if we just set msr_ee to 1
6348
gen_stop_exception(ctx);
6353
static void gen_wrteei(DisasContext *ctx)
6355
#if defined(CONFIG_USER_ONLY)
6356
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6358
if (unlikely(!ctx->mem_idx)) {
6359
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6362
if (ctx->opcode & 0x00008000) {
6363
tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6364
/* Stop translation to have a chance to raise an exception */
6365
gen_stop_exception(ctx);
6367
tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6372
/* PowerPC 440 specific instructions */
6375
static void gen_dlmzb(DisasContext *ctx)
6377
TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6378
gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
6379
cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
6380
tcg_temp_free_i32(t0);
6383
/* mbar replaces eieio on 440 */
6384
static void gen_mbar(DisasContext *ctx)
6386
/* interpreted as no-op */
6389
/* msync replaces sync on 440 */
6390
static void gen_msync_4xx(DisasContext *ctx)
6392
/* interpreted as no-op */
6396
static void gen_icbt_440(DisasContext *ctx)
6398
/* interpreted as no-op */
6399
/* XXX: specification say this is treated as a load by the MMU
6400
* but does not generate any exception
6404
/* Embedded.Processor Control */
6406
static void gen_msgclr(DisasContext *ctx)
6408
#if defined(CONFIG_USER_ONLY)
6409
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6411
if (unlikely(ctx->mem_idx == 0)) {
6412
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6416
gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6420
static void gen_msgsnd(DisasContext *ctx)
6422
#if defined(CONFIG_USER_ONLY)
6423
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6425
if (unlikely(ctx->mem_idx == 0)) {
6426
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6430
gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]);
6434
/*** Altivec vector extension ***/
6435
/* Altivec registers moves */
6437
static inline TCGv_ptr gen_avr_ptr(int reg)
6439
TCGv_ptr r = tcg_temp_new_ptr();
6440
tcg_gen_addi_ptr(r, cpu_env, offsetof(CPUPPCState, avr[reg]));
6444
#define GEN_VR_LDX(name, opc2, opc3) \
6445
static void glue(gen_, name)(DisasContext *ctx) \
6448
if (unlikely(!ctx->altivec_enabled)) { \
6449
gen_exception(ctx, POWERPC_EXCP_VPU); \
6452
gen_set_access_type(ctx, ACCESS_INT); \
6453
EA = tcg_temp_new(); \
6454
gen_addr_reg_index(ctx, EA); \
6455
tcg_gen_andi_tl(EA, EA, ~0xf); \
6456
if (ctx->le_mode) { \
6457
gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6458
tcg_gen_addi_tl(EA, EA, 8); \
6459
gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6461
gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6462
tcg_gen_addi_tl(EA, EA, 8); \
6463
gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6465
tcg_temp_free(EA); \
6468
#define GEN_VR_STX(name, opc2, opc3) \
6469
static void gen_st##name(DisasContext *ctx) \
6472
if (unlikely(!ctx->altivec_enabled)) { \
6473
gen_exception(ctx, POWERPC_EXCP_VPU); \
6476
gen_set_access_type(ctx, ACCESS_INT); \
6477
EA = tcg_temp_new(); \
6478
gen_addr_reg_index(ctx, EA); \
6479
tcg_gen_andi_tl(EA, EA, ~0xf); \
6480
if (ctx->le_mode) { \
6481
gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6482
tcg_gen_addi_tl(EA, EA, 8); \
6483
gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6485
gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6486
tcg_gen_addi_tl(EA, EA, 8); \
6487
gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6489
tcg_temp_free(EA); \
6492
#define GEN_VR_LVE(name, opc2, opc3) \
6493
static void gen_lve##name(DisasContext *ctx) \
6497
if (unlikely(!ctx->altivec_enabled)) { \
6498
gen_exception(ctx, POWERPC_EXCP_VPU); \
6501
gen_set_access_type(ctx, ACCESS_INT); \
6502
EA = tcg_temp_new(); \
6503
gen_addr_reg_index(ctx, EA); \
6504
rs = gen_avr_ptr(rS(ctx->opcode)); \
6505
gen_helper_lve##name(cpu_env, rs, EA); \
6506
tcg_temp_free(EA); \
6507
tcg_temp_free_ptr(rs); \
6510
#define GEN_VR_STVE(name, opc2, opc3) \
6511
static void gen_stve##name(DisasContext *ctx) \
6515
if (unlikely(!ctx->altivec_enabled)) { \
6516
gen_exception(ctx, POWERPC_EXCP_VPU); \
6519
gen_set_access_type(ctx, ACCESS_INT); \
6520
EA = tcg_temp_new(); \
6521
gen_addr_reg_index(ctx, EA); \
6522
rs = gen_avr_ptr(rS(ctx->opcode)); \
6523
gen_helper_stve##name(cpu_env, rs, EA); \
6524
tcg_temp_free(EA); \
6525
tcg_temp_free_ptr(rs); \
6528
GEN_VR_LDX(lvx, 0x07, 0x03);
6529
/* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
6530
GEN_VR_LDX(lvxl, 0x07, 0x0B);
6532
GEN_VR_LVE(bx, 0x07, 0x00);
6533
GEN_VR_LVE(hx, 0x07, 0x01);
6534
GEN_VR_LVE(wx, 0x07, 0x02);
6536
GEN_VR_STX(svx, 0x07, 0x07);
6537
/* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
6538
GEN_VR_STX(svxl, 0x07, 0x0F);
6540
GEN_VR_STVE(bx, 0x07, 0x04);
6541
GEN_VR_STVE(hx, 0x07, 0x05);
6542
GEN_VR_STVE(wx, 0x07, 0x06);
6544
static void gen_lvsl(DisasContext *ctx)
6548
if (unlikely(!ctx->altivec_enabled)) {
6549
gen_exception(ctx, POWERPC_EXCP_VPU);
6552
EA = tcg_temp_new();
6553
gen_addr_reg_index(ctx, EA);
6554
rd = gen_avr_ptr(rD(ctx->opcode));
6555
gen_helper_lvsl(rd, EA);
6557
tcg_temp_free_ptr(rd);
6560
static void gen_lvsr(DisasContext *ctx)
6564
if (unlikely(!ctx->altivec_enabled)) {
6565
gen_exception(ctx, POWERPC_EXCP_VPU);
6568
EA = tcg_temp_new();
6569
gen_addr_reg_index(ctx, EA);
6570
rd = gen_avr_ptr(rD(ctx->opcode));
6571
gen_helper_lvsr(rd, EA);
6573
tcg_temp_free_ptr(rd);
6576
static void gen_mfvscr(DisasContext *ctx)
6579
if (unlikely(!ctx->altivec_enabled)) {
6580
gen_exception(ctx, POWERPC_EXCP_VPU);
6583
tcg_gen_movi_i64(cpu_avrh[rD(ctx->opcode)], 0);
6584
t = tcg_temp_new_i32();
6585
tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, vscr));
6586
tcg_gen_extu_i32_i64(cpu_avrl[rD(ctx->opcode)], t);
6587
tcg_temp_free_i32(t);
6590
static void gen_mtvscr(DisasContext *ctx)
6593
if (unlikely(!ctx->altivec_enabled)) {
6594
gen_exception(ctx, POWERPC_EXCP_VPU);
6597
p = gen_avr_ptr(rD(ctx->opcode));
6598
gen_helper_mtvscr(cpu_env, p);
6599
tcg_temp_free_ptr(p);
6602
/* Logical operations */
6603
#define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
6604
static void glue(gen_, name)(DisasContext *ctx) \
6606
if (unlikely(!ctx->altivec_enabled)) { \
6607
gen_exception(ctx, POWERPC_EXCP_VPU); \
6610
tcg_op(cpu_avrh[rD(ctx->opcode)], cpu_avrh[rA(ctx->opcode)], cpu_avrh[rB(ctx->opcode)]); \
6611
tcg_op(cpu_avrl[rD(ctx->opcode)], cpu_avrl[rA(ctx->opcode)], cpu_avrl[rB(ctx->opcode)]); \
6614
GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16);
6615
GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17);
6616
GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18);
6617
GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19);
6618
GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20);
6620
#define GEN_VXFORM(name, opc2, opc3) \
6621
static void glue(gen_, name)(DisasContext *ctx) \
6623
TCGv_ptr ra, rb, rd; \
6624
if (unlikely(!ctx->altivec_enabled)) { \
6625
gen_exception(ctx, POWERPC_EXCP_VPU); \
6628
ra = gen_avr_ptr(rA(ctx->opcode)); \
6629
rb = gen_avr_ptr(rB(ctx->opcode)); \
6630
rd = gen_avr_ptr(rD(ctx->opcode)); \
6631
gen_helper_##name (rd, ra, rb); \
6632
tcg_temp_free_ptr(ra); \
6633
tcg_temp_free_ptr(rb); \
6634
tcg_temp_free_ptr(rd); \
6637
#define GEN_VXFORM_ENV(name, opc2, opc3) \
6638
static void glue(gen_, name)(DisasContext *ctx) \
6640
TCGv_ptr ra, rb, rd; \
6641
if (unlikely(!ctx->altivec_enabled)) { \
6642
gen_exception(ctx, POWERPC_EXCP_VPU); \
6645
ra = gen_avr_ptr(rA(ctx->opcode)); \
6646
rb = gen_avr_ptr(rB(ctx->opcode)); \
6647
rd = gen_avr_ptr(rD(ctx->opcode)); \
6648
gen_helper_##name(cpu_env, rd, ra, rb); \
6649
tcg_temp_free_ptr(ra); \
6650
tcg_temp_free_ptr(rb); \
6651
tcg_temp_free_ptr(rd); \
6654
GEN_VXFORM(vaddubm, 0, 0);
6655
GEN_VXFORM(vadduhm, 0, 1);
6656
GEN_VXFORM(vadduwm, 0, 2);
6657
GEN_VXFORM(vsububm, 0, 16);
6658
GEN_VXFORM(vsubuhm, 0, 17);
6659
GEN_VXFORM(vsubuwm, 0, 18);
6660
GEN_VXFORM(vmaxub, 1, 0);
6661
GEN_VXFORM(vmaxuh, 1, 1);
6662
GEN_VXFORM(vmaxuw, 1, 2);
6663
GEN_VXFORM(vmaxsb, 1, 4);
6664
GEN_VXFORM(vmaxsh, 1, 5);
6665
GEN_VXFORM(vmaxsw, 1, 6);
6666
GEN_VXFORM(vminub, 1, 8);
6667
GEN_VXFORM(vminuh, 1, 9);
6668
GEN_VXFORM(vminuw, 1, 10);
6669
GEN_VXFORM(vminsb, 1, 12);
6670
GEN_VXFORM(vminsh, 1, 13);
6671
GEN_VXFORM(vminsw, 1, 14);
6672
GEN_VXFORM(vavgub, 1, 16);
6673
GEN_VXFORM(vavguh, 1, 17);
6674
GEN_VXFORM(vavguw, 1, 18);
6675
GEN_VXFORM(vavgsb, 1, 20);
6676
GEN_VXFORM(vavgsh, 1, 21);
6677
GEN_VXFORM(vavgsw, 1, 22);
6678
GEN_VXFORM(vmrghb, 6, 0);
6679
GEN_VXFORM(vmrghh, 6, 1);
6680
GEN_VXFORM(vmrghw, 6, 2);
6681
GEN_VXFORM(vmrglb, 6, 4);
6682
GEN_VXFORM(vmrglh, 6, 5);
6683
GEN_VXFORM(vmrglw, 6, 6);
6684
GEN_VXFORM(vmuloub, 4, 0);
6685
GEN_VXFORM(vmulouh, 4, 1);
6686
GEN_VXFORM(vmulosb, 4, 4);
6687
GEN_VXFORM(vmulosh, 4, 5);
6688
GEN_VXFORM(vmuleub, 4, 8);
6689
GEN_VXFORM(vmuleuh, 4, 9);
6690
GEN_VXFORM(vmulesb, 4, 12);
6691
GEN_VXFORM(vmulesh, 4, 13);
6692
GEN_VXFORM(vslb, 2, 4);
6693
GEN_VXFORM(vslh, 2, 5);
6694
GEN_VXFORM(vslw, 2, 6);
6695
GEN_VXFORM(vsrb, 2, 8);
6696
GEN_VXFORM(vsrh, 2, 9);
6697
GEN_VXFORM(vsrw, 2, 10);
6698
GEN_VXFORM(vsrab, 2, 12);
6699
GEN_VXFORM(vsrah, 2, 13);
6700
GEN_VXFORM(vsraw, 2, 14);
6701
GEN_VXFORM(vslo, 6, 16);
6702
GEN_VXFORM(vsro, 6, 17);
6703
GEN_VXFORM(vaddcuw, 0, 6);
6704
GEN_VXFORM(vsubcuw, 0, 22);
6705
GEN_VXFORM_ENV(vaddubs, 0, 8);
6706
GEN_VXFORM_ENV(vadduhs, 0, 9);
6707
GEN_VXFORM_ENV(vadduws, 0, 10);
6708
GEN_VXFORM_ENV(vaddsbs, 0, 12);
6709
GEN_VXFORM_ENV(vaddshs, 0, 13);
6710
GEN_VXFORM_ENV(vaddsws, 0, 14);
6711
GEN_VXFORM_ENV(vsububs, 0, 24);
6712
GEN_VXFORM_ENV(vsubuhs, 0, 25);
6713
GEN_VXFORM_ENV(vsubuws, 0, 26);
6714
GEN_VXFORM_ENV(vsubsbs, 0, 28);
6715
GEN_VXFORM_ENV(vsubshs, 0, 29);
6716
GEN_VXFORM_ENV(vsubsws, 0, 30);
6717
GEN_VXFORM(vrlb, 2, 0);
6718
GEN_VXFORM(vrlh, 2, 1);
6719
GEN_VXFORM(vrlw, 2, 2);
6720
GEN_VXFORM(vsl, 2, 7);
6721
GEN_VXFORM(vsr, 2, 11);
6722
GEN_VXFORM_ENV(vpkuhum, 7, 0);
6723
GEN_VXFORM_ENV(vpkuwum, 7, 1);
6724
GEN_VXFORM_ENV(vpkuhus, 7, 2);
6725
GEN_VXFORM_ENV(vpkuwus, 7, 3);
6726
GEN_VXFORM_ENV(vpkshus, 7, 4);
6727
GEN_VXFORM_ENV(vpkswus, 7, 5);
6728
GEN_VXFORM_ENV(vpkshss, 7, 6);
6729
GEN_VXFORM_ENV(vpkswss, 7, 7);
6730
GEN_VXFORM(vpkpx, 7, 12);
6731
GEN_VXFORM_ENV(vsum4ubs, 4, 24);
6732
GEN_VXFORM_ENV(vsum4sbs, 4, 28);
6733
GEN_VXFORM_ENV(vsum4shs, 4, 25);
6734
GEN_VXFORM_ENV(vsum2sws, 4, 26);
6735
GEN_VXFORM_ENV(vsumsws, 4, 30);
6736
GEN_VXFORM_ENV(vaddfp, 5, 0);
6737
GEN_VXFORM_ENV(vsubfp, 5, 1);
6738
GEN_VXFORM_ENV(vmaxfp, 5, 16);
6739
GEN_VXFORM_ENV(vminfp, 5, 17);
6741
#define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
6742
static void glue(gen_, name)(DisasContext *ctx) \
6744
TCGv_ptr ra, rb, rd; \
6745
if (unlikely(!ctx->altivec_enabled)) { \
6746
gen_exception(ctx, POWERPC_EXCP_VPU); \
6749
ra = gen_avr_ptr(rA(ctx->opcode)); \
6750
rb = gen_avr_ptr(rB(ctx->opcode)); \
6751
rd = gen_avr_ptr(rD(ctx->opcode)); \
6752
gen_helper_##opname(cpu_env, rd, ra, rb); \
6753
tcg_temp_free_ptr(ra); \
6754
tcg_temp_free_ptr(rb); \
6755
tcg_temp_free_ptr(rd); \
6758
#define GEN_VXRFORM(name, opc2, opc3) \
6759
GEN_VXRFORM1(name, name, #name, opc2, opc3) \
6760
GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
6762
GEN_VXRFORM(vcmpequb, 3, 0)
6763
GEN_VXRFORM(vcmpequh, 3, 1)
6764
GEN_VXRFORM(vcmpequw, 3, 2)
6765
GEN_VXRFORM(vcmpgtsb, 3, 12)
6766
GEN_VXRFORM(vcmpgtsh, 3, 13)
6767
GEN_VXRFORM(vcmpgtsw, 3, 14)
6768
GEN_VXRFORM(vcmpgtub, 3, 8)
6769
GEN_VXRFORM(vcmpgtuh, 3, 9)
6770
GEN_VXRFORM(vcmpgtuw, 3, 10)
6771
GEN_VXRFORM(vcmpeqfp, 3, 3)
6772
GEN_VXRFORM(vcmpgefp, 3, 7)
6773
GEN_VXRFORM(vcmpgtfp, 3, 11)
6774
GEN_VXRFORM(vcmpbfp, 3, 15)
6776
#define GEN_VXFORM_SIMM(name, opc2, opc3) \
6777
static void glue(gen_, name)(DisasContext *ctx) \
6781
if (unlikely(!ctx->altivec_enabled)) { \
6782
gen_exception(ctx, POWERPC_EXCP_VPU); \
6785
simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6786
rd = gen_avr_ptr(rD(ctx->opcode)); \
6787
gen_helper_##name (rd, simm); \
6788
tcg_temp_free_i32(simm); \
6789
tcg_temp_free_ptr(rd); \
6792
GEN_VXFORM_SIMM(vspltisb, 6, 12);
6793
GEN_VXFORM_SIMM(vspltish, 6, 13);
6794
GEN_VXFORM_SIMM(vspltisw, 6, 14);
6796
#define GEN_VXFORM_NOA(name, opc2, opc3) \
6797
static void glue(gen_, name)(DisasContext *ctx) \
6800
if (unlikely(!ctx->altivec_enabled)) { \
6801
gen_exception(ctx, POWERPC_EXCP_VPU); \
6804
rb = gen_avr_ptr(rB(ctx->opcode)); \
6805
rd = gen_avr_ptr(rD(ctx->opcode)); \
6806
gen_helper_##name (rd, rb); \
6807
tcg_temp_free_ptr(rb); \
6808
tcg_temp_free_ptr(rd); \
6811
#define GEN_VXFORM_NOA_ENV(name, opc2, opc3) \
6812
static void glue(gen_, name)(DisasContext *ctx) \
6816
if (unlikely(!ctx->altivec_enabled)) { \
6817
gen_exception(ctx, POWERPC_EXCP_VPU); \
6820
rb = gen_avr_ptr(rB(ctx->opcode)); \
6821
rd = gen_avr_ptr(rD(ctx->opcode)); \
6822
gen_helper_##name(cpu_env, rd, rb); \
6823
tcg_temp_free_ptr(rb); \
6824
tcg_temp_free_ptr(rd); \
6827
GEN_VXFORM_NOA(vupkhsb, 7, 8);
6828
GEN_VXFORM_NOA(vupkhsh, 7, 9);
6829
GEN_VXFORM_NOA(vupklsb, 7, 10);
6830
GEN_VXFORM_NOA(vupklsh, 7, 11);
6831
GEN_VXFORM_NOA(vupkhpx, 7, 13);
6832
GEN_VXFORM_NOA(vupklpx, 7, 15);
6833
GEN_VXFORM_NOA_ENV(vrefp, 5, 4);
6834
GEN_VXFORM_NOA_ENV(vrsqrtefp, 5, 5);
6835
GEN_VXFORM_NOA_ENV(vexptefp, 5, 6);
6836
GEN_VXFORM_NOA_ENV(vlogefp, 5, 7);
6837
GEN_VXFORM_NOA_ENV(vrfim, 5, 8);
6838
GEN_VXFORM_NOA_ENV(vrfin, 5, 9);
6839
GEN_VXFORM_NOA_ENV(vrfip, 5, 10);
6840
GEN_VXFORM_NOA_ENV(vrfiz, 5, 11);
6842
#define GEN_VXFORM_SIMM(name, opc2, opc3) \
6843
static void glue(gen_, name)(DisasContext *ctx) \
6847
if (unlikely(!ctx->altivec_enabled)) { \
6848
gen_exception(ctx, POWERPC_EXCP_VPU); \
6851
simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6852
rd = gen_avr_ptr(rD(ctx->opcode)); \
6853
gen_helper_##name (rd, simm); \
6854
tcg_temp_free_i32(simm); \
6855
tcg_temp_free_ptr(rd); \
6858
#define GEN_VXFORM_UIMM(name, opc2, opc3) \
6859
static void glue(gen_, name)(DisasContext *ctx) \
6863
if (unlikely(!ctx->altivec_enabled)) { \
6864
gen_exception(ctx, POWERPC_EXCP_VPU); \
6867
uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
6868
rb = gen_avr_ptr(rB(ctx->opcode)); \
6869
rd = gen_avr_ptr(rD(ctx->opcode)); \
6870
gen_helper_##name (rd, rb, uimm); \
6871
tcg_temp_free_i32(uimm); \
6872
tcg_temp_free_ptr(rb); \
6873
tcg_temp_free_ptr(rd); \
6876
#define GEN_VXFORM_UIMM_ENV(name, opc2, opc3) \
6877
static void glue(gen_, name)(DisasContext *ctx) \
6882
if (unlikely(!ctx->altivec_enabled)) { \
6883
gen_exception(ctx, POWERPC_EXCP_VPU); \
6886
uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
6887
rb = gen_avr_ptr(rB(ctx->opcode)); \
6888
rd = gen_avr_ptr(rD(ctx->opcode)); \
6889
gen_helper_##name(cpu_env, rd, rb, uimm); \
6890
tcg_temp_free_i32(uimm); \
6891
tcg_temp_free_ptr(rb); \
6892
tcg_temp_free_ptr(rd); \
6895
GEN_VXFORM_UIMM(vspltb, 6, 8);
6896
GEN_VXFORM_UIMM(vsplth, 6, 9);
6897
GEN_VXFORM_UIMM(vspltw, 6, 10);
6898
GEN_VXFORM_UIMM_ENV(vcfux, 5, 12);
6899
GEN_VXFORM_UIMM_ENV(vcfsx, 5, 13);
6900
GEN_VXFORM_UIMM_ENV(vctuxs, 5, 14);
6901
GEN_VXFORM_UIMM_ENV(vctsxs, 5, 15);
6903
static void gen_vsldoi(DisasContext *ctx)
6905
TCGv_ptr ra, rb, rd;
6907
if (unlikely(!ctx->altivec_enabled)) {
6908
gen_exception(ctx, POWERPC_EXCP_VPU);
6911
ra = gen_avr_ptr(rA(ctx->opcode));
6912
rb = gen_avr_ptr(rB(ctx->opcode));
6913
rd = gen_avr_ptr(rD(ctx->opcode));
6914
sh = tcg_const_i32(VSH(ctx->opcode));
6915
gen_helper_vsldoi (rd, ra, rb, sh);
6916
tcg_temp_free_ptr(ra);
6917
tcg_temp_free_ptr(rb);
6918
tcg_temp_free_ptr(rd);
6919
tcg_temp_free_i32(sh);
6922
#define GEN_VAFORM_PAIRED(name0, name1, opc2) \
6923
static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
6925
TCGv_ptr ra, rb, rc, rd; \
6926
if (unlikely(!ctx->altivec_enabled)) { \
6927
gen_exception(ctx, POWERPC_EXCP_VPU); \
6930
ra = gen_avr_ptr(rA(ctx->opcode)); \
6931
rb = gen_avr_ptr(rB(ctx->opcode)); \
6932
rc = gen_avr_ptr(rC(ctx->opcode)); \
6933
rd = gen_avr_ptr(rD(ctx->opcode)); \
6934
if (Rc(ctx->opcode)) { \
6935
gen_helper_##name1(cpu_env, rd, ra, rb, rc); \
6937
gen_helper_##name0(cpu_env, rd, ra, rb, rc); \
6939
tcg_temp_free_ptr(ra); \
6940
tcg_temp_free_ptr(rb); \
6941
tcg_temp_free_ptr(rc); \
6942
tcg_temp_free_ptr(rd); \
6945
GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16)
6947
static void gen_vmladduhm(DisasContext *ctx)
6949
TCGv_ptr ra, rb, rc, rd;
6950
if (unlikely(!ctx->altivec_enabled)) {
6951
gen_exception(ctx, POWERPC_EXCP_VPU);
6954
ra = gen_avr_ptr(rA(ctx->opcode));
6955
rb = gen_avr_ptr(rB(ctx->opcode));
6956
rc = gen_avr_ptr(rC(ctx->opcode));
6957
rd = gen_avr_ptr(rD(ctx->opcode));
6958
gen_helper_vmladduhm(rd, ra, rb, rc);
6959
tcg_temp_free_ptr(ra);
6960
tcg_temp_free_ptr(rb);
6961
tcg_temp_free_ptr(rc);
6962
tcg_temp_free_ptr(rd);
6965
GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18)
6966
GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19)
6967
GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20)
6968
GEN_VAFORM_PAIRED(vsel, vperm, 21)
6969
GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23)
6971
/*** SPE extension ***/
6972
/* Register moves */
6975
static inline void gen_evmra(DisasContext *ctx)
6978
if (unlikely(!ctx->spe_enabled)) {
6979
gen_exception(ctx, POWERPC_EXCP_SPEU);
6983
#if defined(TARGET_PPC64)
6985
tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6988
tcg_gen_st_i64(cpu_gpr[rA(ctx->opcode)],
6990
offsetof(CPUPPCState, spe_acc));
6992
TCGv_i64 tmp = tcg_temp_new_i64();
6994
/* tmp := rA_lo + rA_hi << 32 */
6995
tcg_gen_concat_i32_i64(tmp, cpu_gpr[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6997
/* spe_acc := tmp */
6998
tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUPPCState, spe_acc));
6999
tcg_temp_free_i64(tmp);
7002
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7003
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7007
static inline void gen_load_gpr64(TCGv_i64 t, int reg)
7009
#if defined(TARGET_PPC64)
7010
tcg_gen_mov_i64(t, cpu_gpr[reg]);
7012
tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]);
7016
static inline void gen_store_gpr64(int reg, TCGv_i64 t)
7018
#if defined(TARGET_PPC64)
7019
tcg_gen_mov_i64(cpu_gpr[reg], t);
7021
TCGv_i64 tmp = tcg_temp_new_i64();
7022
tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
7023
tcg_gen_shri_i64(tmp, t, 32);
7024
tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
7025
tcg_temp_free_i64(tmp);
7029
#define GEN_SPE(name0, name1, opc2, opc3, inval0, inval1, type) \
7030
static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
7032
if (Rc(ctx->opcode)) \
7038
/* Handler for undefined SPE opcodes */
7039
static inline void gen_speundef(DisasContext *ctx)
7041
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7045
#if defined(TARGET_PPC64)
7046
#define GEN_SPEOP_LOGIC2(name, tcg_op) \
7047
static inline void gen_##name(DisasContext *ctx) \
7049
if (unlikely(!ctx->spe_enabled)) { \
7050
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7053
tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7054
cpu_gpr[rB(ctx->opcode)]); \
7057
#define GEN_SPEOP_LOGIC2(name, tcg_op) \
7058
static inline void gen_##name(DisasContext *ctx) \
7060
if (unlikely(!ctx->spe_enabled)) { \
7061
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7064
tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7065
cpu_gpr[rB(ctx->opcode)]); \
7066
tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
7067
cpu_gprh[rB(ctx->opcode)]); \
7071
GEN_SPEOP_LOGIC2(evand, tcg_gen_and_tl);
7072
GEN_SPEOP_LOGIC2(evandc, tcg_gen_andc_tl);
7073
GEN_SPEOP_LOGIC2(evxor, tcg_gen_xor_tl);
7074
GEN_SPEOP_LOGIC2(evor, tcg_gen_or_tl);
7075
GEN_SPEOP_LOGIC2(evnor, tcg_gen_nor_tl);
7076
GEN_SPEOP_LOGIC2(eveqv, tcg_gen_eqv_tl);
7077
GEN_SPEOP_LOGIC2(evorc, tcg_gen_orc_tl);
7078
GEN_SPEOP_LOGIC2(evnand, tcg_gen_nand_tl);
7080
/* SPE logic immediate */
7081
#if defined(TARGET_PPC64)
7082
#define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
7083
static inline void gen_##name(DisasContext *ctx) \
7085
if (unlikely(!ctx->spe_enabled)) { \
7086
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7089
TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7090
TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7091
TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7092
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7093
tcg_opi(t0, t0, rB(ctx->opcode)); \
7094
tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
7095
tcg_gen_trunc_i64_i32(t1, t2); \
7096
tcg_temp_free_i64(t2); \
7097
tcg_opi(t1, t1, rB(ctx->opcode)); \
7098
tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7099
tcg_temp_free_i32(t0); \
7100
tcg_temp_free_i32(t1); \
7103
#define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
7104
static inline void gen_##name(DisasContext *ctx) \
7106
if (unlikely(!ctx->spe_enabled)) { \
7107
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7110
tcg_opi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7112
tcg_opi(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
7116
GEN_SPEOP_TCG_LOGIC_IMM2(evslwi, tcg_gen_shli_i32);
7117
GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu, tcg_gen_shri_i32);
7118
GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis, tcg_gen_sari_i32);
7119
GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi, tcg_gen_rotli_i32);
7121
/* SPE arithmetic */
7122
#if defined(TARGET_PPC64)
7123
#define GEN_SPEOP_ARITH1(name, tcg_op) \
7124
static inline void gen_##name(DisasContext *ctx) \
7126
if (unlikely(!ctx->spe_enabled)) { \
7127
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7130
TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7131
TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7132
TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7133
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7135
tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
7136
tcg_gen_trunc_i64_i32(t1, t2); \
7137
tcg_temp_free_i64(t2); \
7139
tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7140
tcg_temp_free_i32(t0); \
7141
tcg_temp_free_i32(t1); \
7144
#define GEN_SPEOP_ARITH1(name, tcg_op) \
7145
static inline void gen_##name(DisasContext *ctx) \
7147
if (unlikely(!ctx->spe_enabled)) { \
7148
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7151
tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \
7152
tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \
7156
static inline void gen_op_evabs(TCGv_i32 ret, TCGv_i32 arg1)
7158
int l1 = gen_new_label();
7159
int l2 = gen_new_label();
7161
tcg_gen_brcondi_i32(TCG_COND_GE, arg1, 0, l1);
7162
tcg_gen_neg_i32(ret, arg1);
7165
tcg_gen_mov_i32(ret, arg1);
7168
GEN_SPEOP_ARITH1(evabs, gen_op_evabs);
7169
GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32);
7170
GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32);
7171
GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32);
7172
static inline void gen_op_evrndw(TCGv_i32 ret, TCGv_i32 arg1)
7174
tcg_gen_addi_i32(ret, arg1, 0x8000);
7175
tcg_gen_ext16u_i32(ret, ret);
7177
GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw);
7178
GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32);
7179
GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32);
7181
#if defined(TARGET_PPC64)
7182
#define GEN_SPEOP_ARITH2(name, tcg_op) \
7183
static inline void gen_##name(DisasContext *ctx) \
7185
if (unlikely(!ctx->spe_enabled)) { \
7186
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7189
TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7190
TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7191
TCGv_i32 t2 = tcg_temp_local_new_i32(); \
7192
TCGv_i64 t3 = tcg_temp_local_new_i64(); \
7193
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7194
tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \
7195
tcg_op(t0, t0, t2); \
7196
tcg_gen_shri_i64(t3, cpu_gpr[rA(ctx->opcode)], 32); \
7197
tcg_gen_trunc_i64_i32(t1, t3); \
7198
tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \
7199
tcg_gen_trunc_i64_i32(t2, t3); \
7200
tcg_temp_free_i64(t3); \
7201
tcg_op(t1, t1, t2); \
7202
tcg_temp_free_i32(t2); \
7203
tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7204
tcg_temp_free_i32(t0); \
7205
tcg_temp_free_i32(t1); \
7208
#define GEN_SPEOP_ARITH2(name, tcg_op) \
7209
static inline void gen_##name(DisasContext *ctx) \
7211
if (unlikely(!ctx->spe_enabled)) { \
7212
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7215
tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7216
cpu_gpr[rB(ctx->opcode)]); \
7217
tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
7218
cpu_gprh[rB(ctx->opcode)]); \
7222
static inline void gen_op_evsrwu(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
7227
l1 = gen_new_label();
7228
l2 = gen_new_label();
7229
t0 = tcg_temp_local_new_i32();
7230
/* No error here: 6 bits are used */
7231
tcg_gen_andi_i32(t0, arg2, 0x3F);
7232
tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
7233
tcg_gen_shr_i32(ret, arg1, t0);
7236
tcg_gen_movi_i32(ret, 0);
7238
tcg_temp_free_i32(t0);
7240
GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu);
7241
static inline void gen_op_evsrws(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
7246
l1 = gen_new_label();
7247
l2 = gen_new_label();
7248
t0 = tcg_temp_local_new_i32();
7249
/* No error here: 6 bits are used */
7250
tcg_gen_andi_i32(t0, arg2, 0x3F);
7251
tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
7252
tcg_gen_sar_i32(ret, arg1, t0);
7255
tcg_gen_movi_i32(ret, 0);
7257
tcg_temp_free_i32(t0);
7259
GEN_SPEOP_ARITH2(evsrws, gen_op_evsrws);
7260
static inline void gen_op_evslw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
7265
l1 = gen_new_label();
7266
l2 = gen_new_label();
7267
t0 = tcg_temp_local_new_i32();
7268
/* No error here: 6 bits are used */
7269
tcg_gen_andi_i32(t0, arg2, 0x3F);
7270
tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
7271
tcg_gen_shl_i32(ret, arg1, t0);
7274
tcg_gen_movi_i32(ret, 0);
7276
tcg_temp_free_i32(t0);
7278
GEN_SPEOP_ARITH2(evslw, gen_op_evslw);
7279
static inline void gen_op_evrlw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
7281
TCGv_i32 t0 = tcg_temp_new_i32();
7282
tcg_gen_andi_i32(t0, arg2, 0x1F);
7283
tcg_gen_rotl_i32(ret, arg1, t0);
7284
tcg_temp_free_i32(t0);
7286
GEN_SPEOP_ARITH2(evrlw, gen_op_evrlw);
7287
static inline void gen_evmergehi(DisasContext *ctx)
7289
if (unlikely(!ctx->spe_enabled)) {
7290
gen_exception(ctx, POWERPC_EXCP_SPEU);
7293
#if defined(TARGET_PPC64)
7294
TCGv t0 = tcg_temp_new();
7295
TCGv t1 = tcg_temp_new();
7296
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
7297
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
7298
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7302
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7303
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7306
GEN_SPEOP_ARITH2(evaddw, tcg_gen_add_i32);
7307
static inline void gen_op_evsubf(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
7309
tcg_gen_sub_i32(ret, arg2, arg1);
7311
GEN_SPEOP_ARITH2(evsubfw, gen_op_evsubf);
7313
/* SPE arithmetic immediate */
7314
#if defined(TARGET_PPC64)
7315
#define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
7316
static inline void gen_##name(DisasContext *ctx) \
7318
if (unlikely(!ctx->spe_enabled)) { \
7319
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7322
TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7323
TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7324
TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7325
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7326
tcg_op(t0, t0, rA(ctx->opcode)); \
7327
tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
7328
tcg_gen_trunc_i64_i32(t1, t2); \
7329
tcg_temp_free_i64(t2); \
7330
tcg_op(t1, t1, rA(ctx->opcode)); \
7331
tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7332
tcg_temp_free_i32(t0); \
7333
tcg_temp_free_i32(t1); \
7336
#define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
7337
static inline void gen_##name(DisasContext *ctx) \
7339
if (unlikely(!ctx->spe_enabled)) { \
7340
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7343
tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
7345
tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)], \
7349
GEN_SPEOP_ARITH_IMM2(evaddiw, tcg_gen_addi_i32);
7350
GEN_SPEOP_ARITH_IMM2(evsubifw, tcg_gen_subi_i32);
7352
/* SPE comparison */
7353
#if defined(TARGET_PPC64)
7354
#define GEN_SPEOP_COMP(name, tcg_cond) \
7355
static inline void gen_##name(DisasContext *ctx) \
7357
if (unlikely(!ctx->spe_enabled)) { \
7358
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7361
int l1 = gen_new_label(); \
7362
int l2 = gen_new_label(); \
7363
int l3 = gen_new_label(); \
7364
int l4 = gen_new_label(); \
7365
TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7366
TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7367
TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7368
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7369
tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7370
tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \
7371
tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \
7373
gen_set_label(l1); \
7374
tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7375
CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7376
gen_set_label(l2); \
7377
tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
7378
tcg_gen_trunc_i64_i32(t0, t2); \
7379
tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
7380
tcg_gen_trunc_i64_i32(t1, t2); \
7381
tcg_temp_free_i64(t2); \
7382
tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \
7383
tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7384
~(CRF_CH | CRF_CH_AND_CL)); \
7386
gen_set_label(l3); \
7387
tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7388
CRF_CH | CRF_CH_OR_CL); \
7389
gen_set_label(l4); \
7390
tcg_temp_free_i32(t0); \
7391
tcg_temp_free_i32(t1); \
7394
#define GEN_SPEOP_COMP(name, tcg_cond) \
7395
static inline void gen_##name(DisasContext *ctx) \
7397
if (unlikely(!ctx->spe_enabled)) { \
7398
gen_exception(ctx, POWERPC_EXCP_SPEU); \
7401
int l1 = gen_new_label(); \
7402
int l2 = gen_new_label(); \
7403
int l3 = gen_new_label(); \
7404
int l4 = gen_new_label(); \
7406
tcg_gen_brcond_i32(tcg_cond, cpu_gpr[rA(ctx->opcode)], \
7407
cpu_gpr[rB(ctx->opcode)], l1); \
7408
tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \
7410
gen_set_label(l1); \
7411
tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7412
CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7413
gen_set_label(l2); \
7414
tcg_gen_brcond_i32(tcg_cond, cpu_gprh[rA(ctx->opcode)], \
7415
cpu_gprh[rB(ctx->opcode)], l3); \
7416
tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7417
~(CRF_CH | CRF_CH_AND_CL)); \
7419
gen_set_label(l3); \
7420
tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7421
CRF_CH | CRF_CH_OR_CL); \
7422
gen_set_label(l4); \
7425
GEN_SPEOP_COMP(evcmpgtu, TCG_COND_GTU);
7426
GEN_SPEOP_COMP(evcmpgts, TCG_COND_GT);
7427
GEN_SPEOP_COMP(evcmpltu, TCG_COND_LTU);
7428
GEN_SPEOP_COMP(evcmplts, TCG_COND_LT);
7429
GEN_SPEOP_COMP(evcmpeq, TCG_COND_EQ);
7432
static inline void gen_brinc(DisasContext *ctx)
7434
/* Note: brinc is usable even if SPE is disabled */
7435
gen_helper_brinc(cpu_gpr[rD(ctx->opcode)],
7436
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7438
static inline void gen_evmergelo(DisasContext *ctx)
7440
if (unlikely(!ctx->spe_enabled)) {
7441
gen_exception(ctx, POWERPC_EXCP_SPEU);
7444
#if defined(TARGET_PPC64)
7445
TCGv t0 = tcg_temp_new();
7446
TCGv t1 = tcg_temp_new();
7447
tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7448
tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7449
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7453
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7454
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7457
static inline void gen_evmergehilo(DisasContext *ctx)
7459
if (unlikely(!ctx->spe_enabled)) {
7460
gen_exception(ctx, POWERPC_EXCP_SPEU);
7463
#if defined(TARGET_PPC64)
7464
TCGv t0 = tcg_temp_new();
7465
TCGv t1 = tcg_temp_new();
7466
tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7467
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
7468
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7472
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7473
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7476
static inline void gen_evmergelohi(DisasContext *ctx)
7478
if (unlikely(!ctx->spe_enabled)) {
7479
gen_exception(ctx, POWERPC_EXCP_SPEU);
7482
#if defined(TARGET_PPC64)
7483
TCGv t0 = tcg_temp_new();
7484
TCGv t1 = tcg_temp_new();
7485
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
7486
tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7487
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7491
if (rD(ctx->opcode) == rA(ctx->opcode)) {
7492
TCGv_i32 tmp = tcg_temp_new_i32();
7493
tcg_gen_mov_i32(tmp, cpu_gpr[rA(ctx->opcode)]);
7494
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7495
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], tmp);
7496
tcg_temp_free_i32(tmp);
7498
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7499
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7503
static inline void gen_evsplati(DisasContext *ctx)
7505
uint64_t imm = ((int32_t)(rA(ctx->opcode) << 27)) >> 27;
7507
#if defined(TARGET_PPC64)
7508
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7510
tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7511
tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7514
static inline void gen_evsplatfi(DisasContext *ctx)
7516
uint64_t imm = rA(ctx->opcode) << 27;
7518
#if defined(TARGET_PPC64)
7519
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7521
tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7522
tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7526
static inline void gen_evsel(DisasContext *ctx)
7528
int l1 = gen_new_label();
7529
int l2 = gen_new_label();
7530
int l3 = gen_new_label();
7531
int l4 = gen_new_label();
7532
TCGv_i32 t0 = tcg_temp_local_new_i32();
7533
#if defined(TARGET_PPC64)
7534
TCGv t1 = tcg_temp_local_new();
7535
TCGv t2 = tcg_temp_local_new();
7537
tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 3);
7538
tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
7539
#if defined(TARGET_PPC64)
7540
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7542
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7546
#if defined(TARGET_PPC64)
7547
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7549
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7552
tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 2);
7553
tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l3);
7554
#if defined(TARGET_PPC64)
7555
tcg_gen_ext32u_tl(t2, cpu_gpr[rA(ctx->opcode)]);
7557
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7561
#if defined(TARGET_PPC64)
7562
tcg_gen_ext32u_tl(t2, cpu_gpr[rB(ctx->opcode)]);
7564
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7567
tcg_temp_free_i32(t0);
7568
#if defined(TARGET_PPC64)
7569
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t1, t2);
7575
static void gen_evsel0(DisasContext *ctx)
7580
static void gen_evsel1(DisasContext *ctx)
7585
static void gen_evsel2(DisasContext *ctx)
7590
static void gen_evsel3(DisasContext *ctx)
7597
static inline void gen_evmwumi(DisasContext *ctx)
7601
if (unlikely(!ctx->spe_enabled)) {
7602
gen_exception(ctx, POWERPC_EXCP_SPEU);
7606
t0 = tcg_temp_new_i64();
7607
t1 = tcg_temp_new_i64();
7609
/* t0 := rA; t1 := rB */
7610
#if defined(TARGET_PPC64)
7611
tcg_gen_ext32u_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7612
tcg_gen_ext32u_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7614
tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7615
tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7618
tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7620
gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7622
tcg_temp_free_i64(t0);
7623
tcg_temp_free_i64(t1);
7626
static inline void gen_evmwumia(DisasContext *ctx)
7630
if (unlikely(!ctx->spe_enabled)) {
7631
gen_exception(ctx, POWERPC_EXCP_SPEU);
7635
gen_evmwumi(ctx); /* rD := rA * rB */
7637
tmp = tcg_temp_new_i64();
7640
gen_load_gpr64(tmp, rD(ctx->opcode));
7641
tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUPPCState, spe_acc));
7642
tcg_temp_free_i64(tmp);
7645
static inline void gen_evmwumiaa(DisasContext *ctx)
7650
if (unlikely(!ctx->spe_enabled)) {
7651
gen_exception(ctx, POWERPC_EXCP_SPEU);
7655
gen_evmwumi(ctx); /* rD := rA * rB */
7657
acc = tcg_temp_new_i64();
7658
tmp = tcg_temp_new_i64();
7661
gen_load_gpr64(tmp, rD(ctx->opcode));
7664
tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUPPCState, spe_acc));
7666
/* acc := tmp + acc */
7667
tcg_gen_add_i64(acc, acc, tmp);
7670
tcg_gen_st_i64(acc, cpu_env, offsetof(CPUPPCState, spe_acc));
7673
gen_store_gpr64(rD(ctx->opcode), acc);
7675
tcg_temp_free_i64(acc);
7676
tcg_temp_free_i64(tmp);
7679
static inline void gen_evmwsmi(DisasContext *ctx)
7683
if (unlikely(!ctx->spe_enabled)) {
7684
gen_exception(ctx, POWERPC_EXCP_SPEU);
7688
t0 = tcg_temp_new_i64();
7689
t1 = tcg_temp_new_i64();
7691
/* t0 := rA; t1 := rB */
7692
#if defined(TARGET_PPC64)
7693
tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7694
tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7696
tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7697
tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7700
tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7702
gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7704
tcg_temp_free_i64(t0);
7705
tcg_temp_free_i64(t1);
7708
static inline void gen_evmwsmia(DisasContext *ctx)
7712
gen_evmwsmi(ctx); /* rD := rA * rB */
7714
tmp = tcg_temp_new_i64();
7717
gen_load_gpr64(tmp, rD(ctx->opcode));
7718
tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUPPCState, spe_acc));
7720
tcg_temp_free_i64(tmp);
7723
static inline void gen_evmwsmiaa(DisasContext *ctx)
7725
TCGv_i64 acc = tcg_temp_new_i64();
7726
TCGv_i64 tmp = tcg_temp_new_i64();
7728
gen_evmwsmi(ctx); /* rD := rA * rB */
7730
acc = tcg_temp_new_i64();
7731
tmp = tcg_temp_new_i64();
7734
gen_load_gpr64(tmp, rD(ctx->opcode));
7737
tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUPPCState, spe_acc));
7739
/* acc := tmp + acc */
7740
tcg_gen_add_i64(acc, acc, tmp);
7743
tcg_gen_st_i64(acc, cpu_env, offsetof(CPUPPCState, spe_acc));
7746
gen_store_gpr64(rD(ctx->opcode), acc);
7748
tcg_temp_free_i64(acc);
7749
tcg_temp_free_i64(tmp);
7752
GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7753
GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE);
7754
GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7755
GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE);
7756
GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, 0x0000F800, PPC_SPE); ////
7757
GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, 0x0000F800, PPC_SPE); ////
7758
GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, 0x0000F800, PPC_SPE); ////
7759
GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x0000F800, 0x00000000, PPC_SPE); //
7760
GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, 0xFFFFFFFF, PPC_SPE);
7761
GEN_SPE(speundef, evand, 0x08, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE); ////
7762
GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7763
GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7764
GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7765
GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, 0x00000000, PPC_SPE);
7766
GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, 0x00000000, PPC_SPE);
7767
GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, 0x00000000, PPC_SPE);
7768
GEN_SPE(speundef, evorc, 0x0D, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE); ////
7769
GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7770
GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7771
GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, 0x00000000, PPC_SPE);
7772
GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7773
GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE);
7774
GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, 0x0000F800, PPC_SPE); //
7775
GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, 0x0000F800, PPC_SPE);
7776
GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7777
GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7778
GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, 0x00600000, PPC_SPE); ////
7779
GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, 0x00600000, PPC_SPE); ////
7780
GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, 0xFFFFFFFF, PPC_SPE); ////
7782
/* SPE load and stores */
7783
static inline void gen_addr_spe_imm_index(DisasContext *ctx, TCGv EA, int sh)
7785
target_ulong uimm = rB(ctx->opcode);
7787
if (rA(ctx->opcode) == 0) {
7788
tcg_gen_movi_tl(EA, uimm << sh);
7790
tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], uimm << sh);
7791
if (NARROW_MODE(ctx)) {
7792
tcg_gen_ext32u_tl(EA, EA);
7797
static inline void gen_op_evldd(DisasContext *ctx, TCGv addr)
7799
#if defined(TARGET_PPC64)
7800
gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7802
TCGv_i64 t0 = tcg_temp_new_i64();
7803
gen_qemu_ld64(ctx, t0, addr);
7804
tcg_gen_trunc_i64_i32(cpu_gpr[rD(ctx->opcode)], t0);
7805
tcg_gen_shri_i64(t0, t0, 32);
7806
tcg_gen_trunc_i64_i32(cpu_gprh[rD(ctx->opcode)], t0);
7807
tcg_temp_free_i64(t0);
7811
static inline void gen_op_evldw(DisasContext *ctx, TCGv addr)
7813
#if defined(TARGET_PPC64)
7814
TCGv t0 = tcg_temp_new();
7815
gen_qemu_ld32u(ctx, t0, addr);
7816
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7817
gen_addr_add(ctx, addr, addr, 4);
7818
gen_qemu_ld32u(ctx, t0, addr);
7819
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7822
gen_qemu_ld32u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7823
gen_addr_add(ctx, addr, addr, 4);
7824
gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7828
static inline void gen_op_evldh(DisasContext *ctx, TCGv addr)
7830
TCGv t0 = tcg_temp_new();
7831
#if defined(TARGET_PPC64)
7832
gen_qemu_ld16u(ctx, t0, addr);
7833
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7834
gen_addr_add(ctx, addr, addr, 2);
7835
gen_qemu_ld16u(ctx, t0, addr);
7836
tcg_gen_shli_tl(t0, t0, 32);
7837
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7838
gen_addr_add(ctx, addr, addr, 2);
7839
gen_qemu_ld16u(ctx, t0, addr);
7840
tcg_gen_shli_tl(t0, t0, 16);
7841
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7842
gen_addr_add(ctx, addr, addr, 2);
7843
gen_qemu_ld16u(ctx, t0, addr);
7844
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7846
gen_qemu_ld16u(ctx, t0, addr);
7847
tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7848
gen_addr_add(ctx, addr, addr, 2);
7849
gen_qemu_ld16u(ctx, t0, addr);
7850
tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7851
gen_addr_add(ctx, addr, addr, 2);
7852
gen_qemu_ld16u(ctx, t0, addr);
7853
tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7854
gen_addr_add(ctx, addr, addr, 2);
7855
gen_qemu_ld16u(ctx, t0, addr);
7856
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7861
static inline void gen_op_evlhhesplat(DisasContext *ctx, TCGv addr)
7863
TCGv t0 = tcg_temp_new();
7864
gen_qemu_ld16u(ctx, t0, addr);
7865
#if defined(TARGET_PPC64)
7866
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7867
tcg_gen_shli_tl(t0, t0, 16);
7868
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7870
tcg_gen_shli_tl(t0, t0, 16);
7871
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7872
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7877
static inline void gen_op_evlhhousplat(DisasContext *ctx, TCGv addr)
7879
TCGv t0 = tcg_temp_new();
7880
gen_qemu_ld16u(ctx, t0, addr);
7881
#if defined(TARGET_PPC64)
7882
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7883
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7885
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7886
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7891
static inline void gen_op_evlhhossplat(DisasContext *ctx, TCGv addr)
7893
TCGv t0 = tcg_temp_new();
7894
gen_qemu_ld16s(ctx, t0, addr);
7895
#if defined(TARGET_PPC64)
7896
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7897
tcg_gen_ext32u_tl(t0, t0);
7898
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7900
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7901
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7906
static inline void gen_op_evlwhe(DisasContext *ctx, TCGv addr)
7908
TCGv t0 = tcg_temp_new();
7909
#if defined(TARGET_PPC64)
7910
gen_qemu_ld16u(ctx, t0, addr);
7911
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7912
gen_addr_add(ctx, addr, addr, 2);
7913
gen_qemu_ld16u(ctx, t0, addr);
7914
tcg_gen_shli_tl(t0, t0, 16);
7915
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7917
gen_qemu_ld16u(ctx, t0, addr);
7918
tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7919
gen_addr_add(ctx, addr, addr, 2);
7920
gen_qemu_ld16u(ctx, t0, addr);
7921
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
7926
static inline void gen_op_evlwhou(DisasContext *ctx, TCGv addr)
7928
#if defined(TARGET_PPC64)
7929
TCGv t0 = tcg_temp_new();
7930
gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7931
gen_addr_add(ctx, addr, addr, 2);
7932
gen_qemu_ld16u(ctx, t0, addr);
7933
tcg_gen_shli_tl(t0, t0, 32);
7934
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7937
gen_qemu_ld16u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7938
gen_addr_add(ctx, addr, addr, 2);
7939
gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7943
static inline void gen_op_evlwhos(DisasContext *ctx, TCGv addr)
7945
#if defined(TARGET_PPC64)
7946
TCGv t0 = tcg_temp_new();
7947
gen_qemu_ld16s(ctx, t0, addr);
7948
tcg_gen_ext32u_tl(cpu_gpr[rD(ctx->opcode)], t0);
7949
gen_addr_add(ctx, addr, addr, 2);
7950
gen_qemu_ld16s(ctx, t0, addr);
7951
tcg_gen_shli_tl(t0, t0, 32);
7952
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7955
gen_qemu_ld16s(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7956
gen_addr_add(ctx, addr, addr, 2);
7957
gen_qemu_ld16s(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7961
static inline void gen_op_evlwwsplat(DisasContext *ctx, TCGv addr)
7963
TCGv t0 = tcg_temp_new();
7964
gen_qemu_ld32u(ctx, t0, addr);
7965
#if defined(TARGET_PPC64)
7966
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7967
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7969
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7970
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7975
static inline void gen_op_evlwhsplat(DisasContext *ctx, TCGv addr)
7977
TCGv t0 = tcg_temp_new();
7978
#if defined(TARGET_PPC64)
7979
gen_qemu_ld16u(ctx, t0, addr);
7980
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7981
tcg_gen_shli_tl(t0, t0, 32);
7982
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7983
gen_addr_add(ctx, addr, addr, 2);
7984
gen_qemu_ld16u(ctx, t0, addr);
7985
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7986
tcg_gen_shli_tl(t0, t0, 16);
7987
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7989
gen_qemu_ld16u(ctx, t0, addr);
7990
tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7991
tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7992
gen_addr_add(ctx, addr, addr, 2);
7993
gen_qemu_ld16u(ctx, t0, addr);
7994
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
7995
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
8000
static inline void gen_op_evstdd(DisasContext *ctx, TCGv addr)
8002
#if defined(TARGET_PPC64)
8003
gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], addr);
8005
TCGv_i64 t0 = tcg_temp_new_i64();
8006
tcg_gen_concat_i32_i64(t0, cpu_gpr[rS(ctx->opcode)], cpu_gprh[rS(ctx->opcode)]);
8007
gen_qemu_st64(ctx, t0, addr);
8008
tcg_temp_free_i64(t0);
8012
static inline void gen_op_evstdw(DisasContext *ctx, TCGv addr)
8014
#if defined(TARGET_PPC64)
8015
TCGv t0 = tcg_temp_new();
8016
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
8017
gen_qemu_st32(ctx, t0, addr);
8020
gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
8022
gen_addr_add(ctx, addr, addr, 4);
8023
gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
8026
static inline void gen_op_evstdh(DisasContext *ctx, TCGv addr)
8028
TCGv t0 = tcg_temp_new();
8029
#if defined(TARGET_PPC64)
8030
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
8032
tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
8034
gen_qemu_st16(ctx, t0, addr);
8035
gen_addr_add(ctx, addr, addr, 2);
8036
#if defined(TARGET_PPC64)
8037
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
8038
gen_qemu_st16(ctx, t0, addr);
8040
gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
8042
gen_addr_add(ctx, addr, addr, 2);
8043
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
8044
gen_qemu_st16(ctx, t0, addr);
8046
gen_addr_add(ctx, addr, addr, 2);
8047
gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
8050
static inline void gen_op_evstwhe(DisasContext *ctx, TCGv addr)
8052
TCGv t0 = tcg_temp_new();
8053
#if defined(TARGET_PPC64)
8054
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
8056
tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
8058
gen_qemu_st16(ctx, t0, addr);
8059
gen_addr_add(ctx, addr, addr, 2);
8060
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
8061
gen_qemu_st16(ctx, t0, addr);
8065
static inline void gen_op_evstwho(DisasContext *ctx, TCGv addr)
8067
#if defined(TARGET_PPC64)
8068
TCGv t0 = tcg_temp_new();
8069
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
8070
gen_qemu_st16(ctx, t0, addr);
8073
gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
8075
gen_addr_add(ctx, addr, addr, 2);
8076
gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
8079
static inline void gen_op_evstwwe(DisasContext *ctx, TCGv addr)
8081
#if defined(TARGET_PPC64)
8082
TCGv t0 = tcg_temp_new();
8083
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
8084
gen_qemu_st32(ctx, t0, addr);
8087
gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
8091
static inline void gen_op_evstwwo(DisasContext *ctx, TCGv addr)
8093
gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
8096
#define GEN_SPEOP_LDST(name, opc2, sh) \
8097
static void glue(gen_, name)(DisasContext *ctx) \
8100
if (unlikely(!ctx->spe_enabled)) { \
8101
gen_exception(ctx, POWERPC_EXCP_SPEU); \
8104
gen_set_access_type(ctx, ACCESS_INT); \
8105
t0 = tcg_temp_new(); \
8106
if (Rc(ctx->opcode)) { \
8107
gen_addr_spe_imm_index(ctx, t0, sh); \
8109
gen_addr_reg_index(ctx, t0); \
8111
gen_op_##name(ctx, t0); \
8112
tcg_temp_free(t0); \
8115
GEN_SPEOP_LDST(evldd, 0x00, 3);
8116
GEN_SPEOP_LDST(evldw, 0x01, 3);
8117
GEN_SPEOP_LDST(evldh, 0x02, 3);
8118
GEN_SPEOP_LDST(evlhhesplat, 0x04, 1);
8119
GEN_SPEOP_LDST(evlhhousplat, 0x06, 1);
8120
GEN_SPEOP_LDST(evlhhossplat, 0x07, 1);
8121
GEN_SPEOP_LDST(evlwhe, 0x08, 2);
8122
GEN_SPEOP_LDST(evlwhou, 0x0A, 2);
8123
GEN_SPEOP_LDST(evlwhos, 0x0B, 2);
8124
GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2);
8125
GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2);
8127
GEN_SPEOP_LDST(evstdd, 0x10, 3);
8128
GEN_SPEOP_LDST(evstdw, 0x11, 3);
8129
GEN_SPEOP_LDST(evstdh, 0x12, 3);
8130
GEN_SPEOP_LDST(evstwhe, 0x18, 2);
8131
GEN_SPEOP_LDST(evstwho, 0x1A, 2);
8132
GEN_SPEOP_LDST(evstwwe, 0x1C, 2);
8133
GEN_SPEOP_LDST(evstwwo, 0x1E, 2);
8135
/* Multiply and add - TODO */
8137
GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);//
8138
GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8139
GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, 0x00000000, PPC_SPE);
8140
GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8141
GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, 0x00000000, PPC_SPE);
8142
GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8143
GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8144
GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8145
GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, 0x00000000, PPC_SPE);
8146
GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8147
GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, 0x00000000, PPC_SPE);
8148
GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8150
GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8151
GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, 0xFFFFFFFF, PPC_SPE);
8152
GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, 0x00000000, PPC_SPE);
8153
GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8154
GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8155
GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8156
GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8157
GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, 0xFFFFFFFF, PPC_SPE);
8158
GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, 0x00000000, PPC_SPE);
8159
GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8160
GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8161
GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8163
GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
8164
GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
8165
GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
8166
GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
8167
GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, 0x00000000, PPC_SPE);
8169
GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8170
GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8171
GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8172
GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8173
GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8174
GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8175
GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8176
GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8177
GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8178
GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8179
GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8180
GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8182
GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, 0x00000000, PPC_SPE);
8183
GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, 0x00000000, PPC_SPE);
8184
GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8185
GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8187
GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8188
GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8189
GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8190
GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8191
GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8192
GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8193
GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8194
GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8195
GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8196
GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8197
GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8198
GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8200
GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, 0x00000000, PPC_SPE);
8201
GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, 0x00000000, PPC_SPE);
8202
GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8203
GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, 0x00000000, PPC_SPE);
8204
GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8207
/*** SPE floating-point extension ***/
8208
#if defined(TARGET_PPC64)
8209
#define GEN_SPEFPUOP_CONV_32_32(name) \
8210
static inline void gen_##name(DisasContext *ctx) \
8214
t0 = tcg_temp_new_i32(); \
8215
tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
8216
gen_helper_##name(t0, cpu_env, t0); \
8217
t1 = tcg_temp_new(); \
8218
tcg_gen_extu_i32_tl(t1, t0); \
8219
tcg_temp_free_i32(t0); \
8220
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
8221
0xFFFFFFFF00000000ULL); \
8222
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
8223
tcg_temp_free(t1); \
8225
#define GEN_SPEFPUOP_CONV_32_64(name) \
8226
static inline void gen_##name(DisasContext *ctx) \
8230
t0 = tcg_temp_new_i32(); \
8231
gen_helper_##name(t0, cpu_env, cpu_gpr[rB(ctx->opcode)]); \
8232
t1 = tcg_temp_new(); \
8233
tcg_gen_extu_i32_tl(t1, t0); \
8234
tcg_temp_free_i32(t0); \
8235
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
8236
0xFFFFFFFF00000000ULL); \
8237
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
8238
tcg_temp_free(t1); \
8240
#define GEN_SPEFPUOP_CONV_64_32(name) \
8241
static inline void gen_##name(DisasContext *ctx) \
8243
TCGv_i32 t0 = tcg_temp_new_i32(); \
8244
tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
8245
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); \
8246
tcg_temp_free_i32(t0); \
8248
#define GEN_SPEFPUOP_CONV_64_64(name) \
8249
static inline void gen_##name(DisasContext *ctx) \
8251
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
8252
cpu_gpr[rB(ctx->opcode)]); \
8254
#define GEN_SPEFPUOP_ARITH2_32_32(name) \
8255
static inline void gen_##name(DisasContext *ctx) \
8259
if (unlikely(!ctx->spe_enabled)) { \
8260
gen_exception(ctx, POWERPC_EXCP_SPEU); \
8263
t0 = tcg_temp_new_i32(); \
8264
t1 = tcg_temp_new_i32(); \
8265
tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
8266
tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
8267
gen_helper_##name(t0, cpu_env, t0, t1); \
8268
tcg_temp_free_i32(t1); \
8269
t2 = tcg_temp_new(); \
8270
tcg_gen_extu_i32_tl(t2, t0); \
8271
tcg_temp_free_i32(t0); \
8272
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
8273
0xFFFFFFFF00000000ULL); \
8274
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t2); \
8275
tcg_temp_free(t2); \
8277
#define GEN_SPEFPUOP_ARITH2_64_64(name) \
8278
static inline void gen_##name(DisasContext *ctx) \
8280
if (unlikely(!ctx->spe_enabled)) { \
8281
gen_exception(ctx, POWERPC_EXCP_SPEU); \
8284
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
8285
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8287
#define GEN_SPEFPUOP_COMP_32(name) \
8288
static inline void gen_##name(DisasContext *ctx) \
8291
if (unlikely(!ctx->spe_enabled)) { \
8292
gen_exception(ctx, POWERPC_EXCP_SPEU); \
8295
t0 = tcg_temp_new_i32(); \
8296
t1 = tcg_temp_new_i32(); \
8297
tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
8298
tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
8299
gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, t0, t1); \
8300
tcg_temp_free_i32(t0); \
8301
tcg_temp_free_i32(t1); \
8303
#define GEN_SPEFPUOP_COMP_64(name) \
8304
static inline void gen_##name(DisasContext *ctx) \
8306
if (unlikely(!ctx->spe_enabled)) { \
8307
gen_exception(ctx, POWERPC_EXCP_SPEU); \
8310
gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, \
8311
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8314
#define GEN_SPEFPUOP_CONV_32_32(name) \
8315
static inline void gen_##name(DisasContext *ctx) \
8317
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
8318
cpu_gpr[rB(ctx->opcode)]); \
8320
#define GEN_SPEFPUOP_CONV_32_64(name) \
8321
static inline void gen_##name(DisasContext *ctx) \
8323
TCGv_i64 t0 = tcg_temp_new_i64(); \
8324
gen_load_gpr64(t0, rB(ctx->opcode)); \
8325
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); \
8326
tcg_temp_free_i64(t0); \
8328
#define GEN_SPEFPUOP_CONV_64_32(name) \
8329
static inline void gen_##name(DisasContext *ctx) \
8331
TCGv_i64 t0 = tcg_temp_new_i64(); \
8332
gen_helper_##name(t0, cpu_env, cpu_gpr[rB(ctx->opcode)]); \
8333
gen_store_gpr64(rD(ctx->opcode), t0); \
8334
tcg_temp_free_i64(t0); \
8336
#define GEN_SPEFPUOP_CONV_64_64(name) \
8337
static inline void gen_##name(DisasContext *ctx) \
8339
TCGv_i64 t0 = tcg_temp_new_i64(); \
8340
gen_load_gpr64(t0, rB(ctx->opcode)); \
8341
gen_helper_##name(t0, cpu_env, t0); \
8342
gen_store_gpr64(rD(ctx->opcode), t0); \
8343
tcg_temp_free_i64(t0); \
8345
#define GEN_SPEFPUOP_ARITH2_32_32(name) \
8346
static inline void gen_##name(DisasContext *ctx) \
8348
if (unlikely(!ctx->spe_enabled)) { \
8349
gen_exception(ctx, POWERPC_EXCP_SPEU); \
8352
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
8353
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8355
#define GEN_SPEFPUOP_ARITH2_64_64(name) \
8356
static inline void gen_##name(DisasContext *ctx) \
8359
if (unlikely(!ctx->spe_enabled)) { \
8360
gen_exception(ctx, POWERPC_EXCP_SPEU); \
8363
t0 = tcg_temp_new_i64(); \
8364
t1 = tcg_temp_new_i64(); \
8365
gen_load_gpr64(t0, rA(ctx->opcode)); \
8366
gen_load_gpr64(t1, rB(ctx->opcode)); \
8367
gen_helper_##name(t0, cpu_env, t0, t1); \
8368
gen_store_gpr64(rD(ctx->opcode), t0); \
8369
tcg_temp_free_i64(t0); \
8370
tcg_temp_free_i64(t1); \
8372
#define GEN_SPEFPUOP_COMP_32(name) \
8373
static inline void gen_##name(DisasContext *ctx) \
8375
if (unlikely(!ctx->spe_enabled)) { \
8376
gen_exception(ctx, POWERPC_EXCP_SPEU); \
8379
gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, \
8380
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8382
#define GEN_SPEFPUOP_COMP_64(name) \
8383
static inline void gen_##name(DisasContext *ctx) \
8386
if (unlikely(!ctx->spe_enabled)) { \
8387
gen_exception(ctx, POWERPC_EXCP_SPEU); \
8390
t0 = tcg_temp_new_i64(); \
8391
t1 = tcg_temp_new_i64(); \
8392
gen_load_gpr64(t0, rA(ctx->opcode)); \
8393
gen_load_gpr64(t1, rB(ctx->opcode)); \
8394
gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, t0, t1); \
8395
tcg_temp_free_i64(t0); \
8396
tcg_temp_free_i64(t1); \
8400
/* Single precision floating-point vectors operations */
8402
GEN_SPEFPUOP_ARITH2_64_64(evfsadd);
8403
GEN_SPEFPUOP_ARITH2_64_64(evfssub);
8404
GEN_SPEFPUOP_ARITH2_64_64(evfsmul);
8405
GEN_SPEFPUOP_ARITH2_64_64(evfsdiv);
8406
static inline void gen_evfsabs(DisasContext *ctx)
8408
if (unlikely(!ctx->spe_enabled)) {
8409
gen_exception(ctx, POWERPC_EXCP_SPEU);
8412
#if defined(TARGET_PPC64)
8413
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000080000000LL);
8415
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x80000000);
8416
tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
8419
static inline void gen_evfsnabs(DisasContext *ctx)
8421
if (unlikely(!ctx->spe_enabled)) {
8422
gen_exception(ctx, POWERPC_EXCP_SPEU);
8425
#if defined(TARGET_PPC64)
8426
tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
8428
tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8429
tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8432
static inline void gen_evfsneg(DisasContext *ctx)
8434
if (unlikely(!ctx->spe_enabled)) {
8435
gen_exception(ctx, POWERPC_EXCP_SPEU);
8438
#if defined(TARGET_PPC64)
8439
tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
8441
tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8442
tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8447
GEN_SPEFPUOP_CONV_64_64(evfscfui);
8448
GEN_SPEFPUOP_CONV_64_64(evfscfsi);
8449
GEN_SPEFPUOP_CONV_64_64(evfscfuf);
8450
GEN_SPEFPUOP_CONV_64_64(evfscfsf);
8451
GEN_SPEFPUOP_CONV_64_64(evfsctui);
8452
GEN_SPEFPUOP_CONV_64_64(evfsctsi);
8453
GEN_SPEFPUOP_CONV_64_64(evfsctuf);
8454
GEN_SPEFPUOP_CONV_64_64(evfsctsf);
8455
GEN_SPEFPUOP_CONV_64_64(evfsctuiz);
8456
GEN_SPEFPUOP_CONV_64_64(evfsctsiz);
8459
GEN_SPEFPUOP_COMP_64(evfscmpgt);
8460
GEN_SPEFPUOP_COMP_64(evfscmplt);
8461
GEN_SPEFPUOP_COMP_64(evfscmpeq);
8462
GEN_SPEFPUOP_COMP_64(evfststgt);
8463
GEN_SPEFPUOP_COMP_64(evfststlt);
8464
GEN_SPEFPUOP_COMP_64(evfststeq);
8466
/* Opcodes definitions */
8467
GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
8468
GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE); //
8469
GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8470
GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
8471
GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
8472
GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8473
GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8474
GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8475
GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8476
GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8477
GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8478
GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8479
GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
8480
GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8482
/* Single precision floating-point operations */
8484
GEN_SPEFPUOP_ARITH2_32_32(efsadd);
8485
GEN_SPEFPUOP_ARITH2_32_32(efssub);
8486
GEN_SPEFPUOP_ARITH2_32_32(efsmul);
8487
GEN_SPEFPUOP_ARITH2_32_32(efsdiv);
8488
static inline void gen_efsabs(DisasContext *ctx)
8490
if (unlikely(!ctx->spe_enabled)) {
8491
gen_exception(ctx, POWERPC_EXCP_SPEU);
8494
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], (target_long)~0x80000000LL);
8496
static inline void gen_efsnabs(DisasContext *ctx)
8498
if (unlikely(!ctx->spe_enabled)) {
8499
gen_exception(ctx, POWERPC_EXCP_SPEU);
8502
tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8504
static inline void gen_efsneg(DisasContext *ctx)
8506
if (unlikely(!ctx->spe_enabled)) {
8507
gen_exception(ctx, POWERPC_EXCP_SPEU);
8510
tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8514
GEN_SPEFPUOP_CONV_32_32(efscfui);
8515
GEN_SPEFPUOP_CONV_32_32(efscfsi);
8516
GEN_SPEFPUOP_CONV_32_32(efscfuf);
8517
GEN_SPEFPUOP_CONV_32_32(efscfsf);
8518
GEN_SPEFPUOP_CONV_32_32(efsctui);
8519
GEN_SPEFPUOP_CONV_32_32(efsctsi);
8520
GEN_SPEFPUOP_CONV_32_32(efsctuf);
8521
GEN_SPEFPUOP_CONV_32_32(efsctsf);
8522
GEN_SPEFPUOP_CONV_32_32(efsctuiz);
8523
GEN_SPEFPUOP_CONV_32_32(efsctsiz);
8524
GEN_SPEFPUOP_CONV_32_64(efscfd);
8527
GEN_SPEFPUOP_COMP_32(efscmpgt);
8528
GEN_SPEFPUOP_COMP_32(efscmplt);
8529
GEN_SPEFPUOP_COMP_32(efscmpeq);
8530
GEN_SPEFPUOP_COMP_32(efststgt);
8531
GEN_SPEFPUOP_COMP_32(efststlt);
8532
GEN_SPEFPUOP_COMP_32(efststeq);
8534
/* Opcodes definitions */
8535
GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
8536
GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE); //
8537
GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8538
GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
8539
GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
8540
GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, 0x00180000, PPC_SPE_SINGLE); //
8541
GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8542
GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8543
GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8544
GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8545
GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8546
GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8547
GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
8548
GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8550
/* Double precision floating-point operations */
8552
GEN_SPEFPUOP_ARITH2_64_64(efdadd);
8553
GEN_SPEFPUOP_ARITH2_64_64(efdsub);
8554
GEN_SPEFPUOP_ARITH2_64_64(efdmul);
8555
GEN_SPEFPUOP_ARITH2_64_64(efddiv);
8556
static inline void gen_efdabs(DisasContext *ctx)
8558
if (unlikely(!ctx->spe_enabled)) {
8559
gen_exception(ctx, POWERPC_EXCP_SPEU);
8562
#if defined(TARGET_PPC64)
8563
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000000000000LL);
8565
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8566
tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
8569
static inline void gen_efdnabs(DisasContext *ctx)
8571
if (unlikely(!ctx->spe_enabled)) {
8572
gen_exception(ctx, POWERPC_EXCP_SPEU);
8575
#if defined(TARGET_PPC64)
8576
tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8578
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8579
tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8582
static inline void gen_efdneg(DisasContext *ctx)
8584
if (unlikely(!ctx->spe_enabled)) {
8585
gen_exception(ctx, POWERPC_EXCP_SPEU);
8588
#if defined(TARGET_PPC64)
8589
tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8591
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8592
tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8597
GEN_SPEFPUOP_CONV_64_32(efdcfui);
8598
GEN_SPEFPUOP_CONV_64_32(efdcfsi);
8599
GEN_SPEFPUOP_CONV_64_32(efdcfuf);
8600
GEN_SPEFPUOP_CONV_64_32(efdcfsf);
8601
GEN_SPEFPUOP_CONV_32_64(efdctui);
8602
GEN_SPEFPUOP_CONV_32_64(efdctsi);
8603
GEN_SPEFPUOP_CONV_32_64(efdctuf);
8604
GEN_SPEFPUOP_CONV_32_64(efdctsf);
8605
GEN_SPEFPUOP_CONV_32_64(efdctuiz);
8606
GEN_SPEFPUOP_CONV_32_64(efdctsiz);
8607
GEN_SPEFPUOP_CONV_64_32(efdcfs);
8608
GEN_SPEFPUOP_CONV_64_64(efdcfuid);
8609
GEN_SPEFPUOP_CONV_64_64(efdcfsid);
8610
GEN_SPEFPUOP_CONV_64_64(efdctuidz);
8611
GEN_SPEFPUOP_CONV_64_64(efdctsidz);
8614
GEN_SPEFPUOP_COMP_64(efdcmpgt);
8615
GEN_SPEFPUOP_COMP_64(efdcmplt);
8616
GEN_SPEFPUOP_COMP_64(efdcmpeq);
8617
GEN_SPEFPUOP_COMP_64(efdtstgt);
8618
GEN_SPEFPUOP_COMP_64(efdtstlt);
8619
GEN_SPEFPUOP_COMP_64(efdtsteq);
8621
/* Opcodes definitions */
8622
GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE); //
8623
GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8624
GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_DOUBLE); //
8625
GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
8626
GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE); //
8627
GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8628
GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE); //
8629
GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, 0x00180000, PPC_SPE_DOUBLE); //
8630
GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8631
GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8632
GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8633
GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8634
GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
8635
GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
8636
GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE); //
8637
GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
8639
static opcode_t opcodes[] = {
8640
GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
8641
GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER),
8642
GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8643
GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER),
8644
GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8645
GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
8646
GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
8647
GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8648
GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8649
GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8650
GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8651
GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
8652
GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
8653
GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
8654
GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
8655
GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8656
#if defined(TARGET_PPC64)
8657
GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
8659
GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
8660
GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
8661
GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8662
GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8663
GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8664
GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
8665
GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
8666
GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
8667
GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8668
GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8669
GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8670
GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8671
GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB),
8672
GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
8673
GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
8674
#if defined(TARGET_PPC64)
8675
GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
8676
GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
8677
GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
8679
GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8680
GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8681
GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8682
GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
8683
GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
8684
GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
8685
GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
8686
#if defined(TARGET_PPC64)
8687
GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
8688
GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
8689
GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
8690
GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
8691
GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
8693
GEN_HANDLER(frsqrtes, 0x3B, 0x1A, 0xFF, 0x001F07C0, PPC_FLOAT_FRSQRTES),
8694
GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8695
GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8696
GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT),
8697
GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT),
8698
GEN_HANDLER(fabs, 0x3F, 0x08, 0x08, 0x001F0000, PPC_FLOAT),
8699
GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT),
8700
GEN_HANDLER(fnabs, 0x3F, 0x08, 0x04, 0x001F0000, PPC_FLOAT),
8701
GEN_HANDLER(fneg, 0x3F, 0x08, 0x01, 0x001F0000, PPC_FLOAT),
8702
GEN_HANDLER_E(fcpsgn, 0x3F, 0x08, 0x00, 0x00000000, PPC_NONE, PPC2_ISA205),
8703
GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT),
8704
GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT),
8705
GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT),
8706
GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT),
8707
GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x00000000, PPC_FLOAT),
8708
GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006e0800, PPC_FLOAT),
8709
#if defined(TARGET_PPC64)
8710
GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B),
8711
GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX),
8712
GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B),
8714
GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8715
GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8716
GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
8717
GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
8718
GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
8719
GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
8720
GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO),
8721
GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
8722
GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
8723
GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
8724
#if defined(TARGET_PPC64)
8725
GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
8726
GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
8728
GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
8729
GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
8730
GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8731
GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8732
GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
8733
GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
8734
GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
8735
GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
8736
#if defined(TARGET_PPC64)
8737
GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
8738
GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
8740
GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW),
8741
GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
8742
GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8743
#if defined(TARGET_PPC64)
8744
GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
8745
GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
8747
GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
8748
GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
8749
GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
8750
GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
8751
GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
8752
GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
8753
#if defined(TARGET_PPC64)
8754
GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
8756
GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC),
8757
GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC),
8758
GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
8759
GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
8760
GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
8761
GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE),
8762
GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE),
8763
GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
8764
GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
8765
GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC),
8766
GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
8767
GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
8768
GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
8769
GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
8770
GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
8771
GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
8772
GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
8773
#if defined(TARGET_PPC64)
8774
GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
8775
GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
8777
GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
8778
GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
8780
GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
8781
GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
8782
GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
8784
GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
8785
GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x03FF0001, PPC_MEM_TLBIE),
8786
GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE),
8787
GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
8788
#if defined(TARGET_PPC64)
8789
GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI),
8790
GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
8792
GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
8793
GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
8794
GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR),
8795
GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR),
8796
GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR),
8797
GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR),
8798
GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR),
8799
GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR),
8800
GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR),
8801
GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR),
8802
GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR),
8803
GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8804
GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR),
8805
GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR),
8806
GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR),
8807
GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR),
8808
GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR),
8809
GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR),
8810
GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR),
8811
GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8812
GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR),
8813
GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR),
8814
GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR),
8815
GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR),
8816
GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR),
8817
GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR),
8818
GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR),
8819
GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR),
8820
GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR),
8821
GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR),
8822
GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR),
8823
GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR),
8824
GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR),
8825
GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR),
8826
GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR),
8827
GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR),
8828
GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC),
8829
GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC),
8830
GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC),
8831
GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
8832
GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
8833
GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB),
8834
GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB),
8835
GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER),
8836
GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER),
8837
GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER),
8838
GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER),
8839
GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER),
8840
GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER),
8841
GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8842
GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8843
GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2),
8844
GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2),
8845
GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8846
GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8847
GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2),
8848
GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2),
8849
GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
8850
GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
8851
GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
8852
GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
8853
GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
8854
GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
8855
GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
8856
GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
8857
GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
8858
GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
8859
GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
8860
GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
8861
GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
8862
GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
8863
GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
8864
GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
8865
GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
8866
GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
8867
GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
8868
GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
8869
GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
8870
GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
8871
GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
8872
GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
8873
PPC_NONE, PPC2_BOOKE206),
8874
GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
8875
PPC_NONE, PPC2_BOOKE206),
8876
GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
8877
PPC_NONE, PPC2_BOOKE206),
8878
GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
8879
PPC_NONE, PPC2_BOOKE206),
8880
GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
8881
PPC_NONE, PPC2_BOOKE206),
8882
GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001,
8883
PPC_NONE, PPC2_PRCNTL),
8884
GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001,
8885
PPC_NONE, PPC2_PRCNTL),
8886
GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
8887
GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
8888
GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
8889
GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
8890
PPC_BOOKE, PPC2_BOOKE206),
8891
GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE),
8892
GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
8893
PPC_BOOKE, PPC2_BOOKE206),
8894
GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
8895
GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
8896
GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
8897
GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
8898
GEN_HANDLER(vsldoi, 0x04, 0x16, 0xFF, 0x00000400, PPC_ALTIVEC),
8899
GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
8900
GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE),
8901
GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE),
8902
GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE),
8903
GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE),
8905
#undef GEN_INT_ARITH_ADD
8906
#undef GEN_INT_ARITH_ADD_CONST
8907
#define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
8908
GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
8909
#define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
8910
add_ca, compute_ca, compute_ov) \
8911
GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
8912
GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
8913
GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
8914
GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
8915
GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
8916
GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
8917
GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
8918
GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
8919
GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
8920
GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
8921
GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
8923
#undef GEN_INT_ARITH_DIVW
8924
#define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
8925
GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
8926
GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
8927
GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
8928
GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
8929
GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
8931
#if defined(TARGET_PPC64)
8932
#undef GEN_INT_ARITH_DIVD
8933
#define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
8934
GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8935
GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
8936
GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
8937
GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
8938
GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
8940
#undef GEN_INT_ARITH_MUL_HELPER
8941
#define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
8942
GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8943
GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
8944
GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
8945
GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
8948
#undef GEN_INT_ARITH_SUBF
8949
#undef GEN_INT_ARITH_SUBF_CONST
8950
#define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
8951
GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
8952
#define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
8953
add_ca, compute_ca, compute_ov) \
8954
GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
8955
GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
8956
GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
8957
GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
8958
GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
8959
GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
8960
GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
8961
GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
8962
GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
8963
GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
8964
GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
8968
#define GEN_LOGICAL2(name, tcg_op, opc, type) \
8969
GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
8970
#define GEN_LOGICAL1(name, tcg_op, opc, type) \
8971
GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
8972
GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
8973
GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
8974
GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
8975
GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
8976
GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
8977
GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
8978
GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
8979
GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
8980
#if defined(TARGET_PPC64)
8981
GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
8984
#if defined(TARGET_PPC64)
8987
#define GEN_PPC64_R2(name, opc1, opc2) \
8988
GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8989
GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8991
#define GEN_PPC64_R4(name, opc1, opc2) \
8992
GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8993
GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
8995
GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8997
GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
8999
GEN_PPC64_R4(rldicl, 0x1E, 0x00),
9000
GEN_PPC64_R4(rldicr, 0x1E, 0x02),
9001
GEN_PPC64_R4(rldic, 0x1E, 0x04),
9002
GEN_PPC64_R2(rldcl, 0x1E, 0x08),
9003
GEN_PPC64_R2(rldcr, 0x1E, 0x09),
9004
GEN_PPC64_R4(rldimi, 0x1E, 0x06),
9007
#undef _GEN_FLOAT_ACB
9008
#undef GEN_FLOAT_ACB
9009
#undef _GEN_FLOAT_AB
9011
#undef _GEN_FLOAT_AC
9015
#define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
9016
GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type)
9017
#define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
9018
_GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type), \
9019
_GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type)
9020
#define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
9021
GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
9022
#define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
9023
_GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
9024
_GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
9025
#define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
9026
GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
9027
#define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
9028
_GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
9029
_GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
9030
#define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
9031
GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type)
9032
#define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
9033
GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type)
9035
GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT),
9036
GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT),
9037
GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT),
9038
GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT),
9039
GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES),
9040
GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE),
9041
_GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL),
9042
GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT),
9043
GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT),
9044
GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT),
9045
GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT),
9046
GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT),
9047
GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT),
9048
GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT),
9049
GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT),
9050
#if defined(TARGET_PPC64)
9051
GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B),
9052
GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B),
9053
GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B),
9055
GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT),
9056
GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT),
9057
GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT),
9058
GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT),
9065
#define GEN_LD(name, ldop, opc, type) \
9066
GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
9067
#define GEN_LDU(name, ldop, opc, type) \
9068
GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
9069
#define GEN_LDUX(name, ldop, opc2, opc3, type) \
9070
GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
9071
#define GEN_LDX_E(name, ldop, opc2, opc3, type, type2) \
9072
GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
9073
#define GEN_LDS(name, ldop, op, type) \
9074
GEN_LD(name, ldop, op | 0x20, type) \
9075
GEN_LDU(name, ldop, op | 0x21, type) \
9076
GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
9077
GEN_LDX(name, ldop, 0x17, op | 0x00, type)
9079
GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER)
9080
GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER)
9081
GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER)
9082
GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER)
9083
#if defined(TARGET_PPC64)
9084
GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B)
9085
GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B)
9086
GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B)
9087
GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B)
9088
GEN_LDX_E(ldbr, ld64ur, 0x14, 0x10, PPC_NONE, PPC2_DBRX)
9090
GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
9091
GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
9098
#define GEN_ST(name, stop, opc, type) \
9099
GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
9100
#define GEN_STU(name, stop, opc, type) \
9101
GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
9102
#define GEN_STUX(name, stop, opc2, opc3, type) \
9103
GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
9104
#define GEN_STX_E(name, stop, opc2, opc3, type, type2) \
9105
GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
9106
#define GEN_STS(name, stop, op, type) \
9107
GEN_ST(name, stop, op | 0x20, type) \
9108
GEN_STU(name, stop, op | 0x21, type) \
9109
GEN_STUX(name, stop, 0x17, op | 0x01, type) \
9110
GEN_STX(name, stop, 0x17, op | 0x00, type)
9112
GEN_STS(stb, st8, 0x06, PPC_INTEGER)
9113
GEN_STS(sth, st16, 0x0C, PPC_INTEGER)
9114
GEN_STS(stw, st32, 0x04, PPC_INTEGER)
9115
#if defined(TARGET_PPC64)
9116
GEN_STUX(std, st64, 0x15, 0x05, PPC_64B)
9117
GEN_STX(std, st64, 0x15, 0x04, PPC_64B)
9118
GEN_STX_E(stdbr, st64r, 0x14, 0x14, PPC_NONE, PPC2_DBRX)
9120
GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
9121
GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
9128
#define GEN_LDF(name, ldop, opc, type) \
9129
GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
9130
#define GEN_LDUF(name, ldop, opc, type) \
9131
GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
9132
#define GEN_LDUXF(name, ldop, opc, type) \
9133
GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
9134
#define GEN_LDXF(name, ldop, opc2, opc3, type) \
9135
GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
9136
#define GEN_LDFS(name, ldop, op, type) \
9137
GEN_LDF(name, ldop, op | 0x20, type) \
9138
GEN_LDUF(name, ldop, op | 0x21, type) \
9139
GEN_LDUXF(name, ldop, op | 0x01, type) \
9140
GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
9142
GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT)
9143
GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT)
9144
GEN_HANDLER_E(lfiwax, 0x1f, 0x17, 0x1a, 0x00000001, PPC_NONE, PPC2_ISA205),
9145
GEN_HANDLER_E(lfdp, 0x39, 0xFF, 0xFF, 0x00200003, PPC_NONE, PPC2_ISA205),
9146
GEN_HANDLER_E(lfdpx, 0x1F, 0x17, 0x18, 0x00200001, PPC_NONE, PPC2_ISA205),
9153
#define GEN_STF(name, stop, opc, type) \
9154
GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
9155
#define GEN_STUF(name, stop, opc, type) \
9156
GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
9157
#define GEN_STUXF(name, stop, opc, type) \
9158
GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
9159
#define GEN_STXF(name, stop, opc2, opc3, type) \
9160
GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
9161
#define GEN_STFS(name, stop, op, type) \
9162
GEN_STF(name, stop, op | 0x20, type) \
9163
GEN_STUF(name, stop, op | 0x21, type) \
9164
GEN_STUXF(name, stop, op | 0x01, type) \
9165
GEN_STXF(name, stop, 0x17, op | 0x00, type)
9167
GEN_STFS(stfd, st64, 0x16, PPC_FLOAT)
9168
GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT)
9169
GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX)
9170
GEN_HANDLER_E(stfdp, 0x3D, 0xFF, 0xFF, 0x00200003, PPC_NONE, PPC2_ISA205),
9171
GEN_HANDLER_E(stfdpx, 0x1F, 0x17, 0x1C, 0x00200001, PPC_NONE, PPC2_ISA205),
9174
#define GEN_CRLOGIC(name, tcg_op, opc) \
9175
GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
9176
GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
9177
GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
9178
GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
9179
GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
9180
GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
9181
GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
9182
GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
9183
GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
9185
#undef GEN_MAC_HANDLER
9186
#define GEN_MAC_HANDLER(name, opc2, opc3) \
9187
GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
9188
GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
9189
GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
9190
GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
9191
GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
9192
GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
9193
GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
9194
GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
9195
GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
9196
GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
9197
GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
9198
GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
9199
GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
9200
GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
9201
GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
9202
GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
9203
GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
9204
GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
9205
GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
9206
GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
9207
GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
9208
GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
9209
GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
9210
GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
9211
GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
9212
GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
9213
GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
9214
GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
9215
GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
9216
GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
9217
GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
9218
GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
9219
GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
9220
GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
9221
GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
9222
GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
9223
GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
9224
GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
9225
GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
9226
GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
9227
GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
9228
GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
9229
GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
9235
#define GEN_VR_LDX(name, opc2, opc3) \
9236
GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
9237
#define GEN_VR_STX(name, opc2, opc3) \
9238
GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
9239
#define GEN_VR_LVE(name, opc2, opc3) \
9240
GEN_HANDLER(lve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
9241
#define GEN_VR_STVE(name, opc2, opc3) \
9242
GEN_HANDLER(stve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
9243
GEN_VR_LDX(lvx, 0x07, 0x03),
9244
GEN_VR_LDX(lvxl, 0x07, 0x0B),
9245
GEN_VR_LVE(bx, 0x07, 0x00),
9246
GEN_VR_LVE(hx, 0x07, 0x01),
9247
GEN_VR_LVE(wx, 0x07, 0x02),
9248
GEN_VR_STX(svx, 0x07, 0x07),
9249
GEN_VR_STX(svxl, 0x07, 0x0F),
9250
GEN_VR_STVE(bx, 0x07, 0x04),
9251
GEN_VR_STVE(hx, 0x07, 0x05),
9252
GEN_VR_STVE(wx, 0x07, 0x06),
9254
#undef GEN_VX_LOGICAL
9255
#define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
9256
GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9257
GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16),
9258
GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17),
9259
GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18),
9260
GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19),
9261
GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20),
9264
#define GEN_VXFORM(name, opc2, opc3) \
9265
GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9266
GEN_VXFORM(vaddubm, 0, 0),
9267
GEN_VXFORM(vadduhm, 0, 1),
9268
GEN_VXFORM(vadduwm, 0, 2),
9269
GEN_VXFORM(vsububm, 0, 16),
9270
GEN_VXFORM(vsubuhm, 0, 17),
9271
GEN_VXFORM(vsubuwm, 0, 18),
9272
GEN_VXFORM(vmaxub, 1, 0),
9273
GEN_VXFORM(vmaxuh, 1, 1),
9274
GEN_VXFORM(vmaxuw, 1, 2),
9275
GEN_VXFORM(vmaxsb, 1, 4),
9276
GEN_VXFORM(vmaxsh, 1, 5),
9277
GEN_VXFORM(vmaxsw, 1, 6),
9278
GEN_VXFORM(vminub, 1, 8),
9279
GEN_VXFORM(vminuh, 1, 9),
9280
GEN_VXFORM(vminuw, 1, 10),
9281
GEN_VXFORM(vminsb, 1, 12),
9282
GEN_VXFORM(vminsh, 1, 13),
9283
GEN_VXFORM(vminsw, 1, 14),
9284
GEN_VXFORM(vavgub, 1, 16),
9285
GEN_VXFORM(vavguh, 1, 17),
9286
GEN_VXFORM(vavguw, 1, 18),
9287
GEN_VXFORM(vavgsb, 1, 20),
9288
GEN_VXFORM(vavgsh, 1, 21),
9289
GEN_VXFORM(vavgsw, 1, 22),
9290
GEN_VXFORM(vmrghb, 6, 0),
9291
GEN_VXFORM(vmrghh, 6, 1),
9292
GEN_VXFORM(vmrghw, 6, 2),
9293
GEN_VXFORM(vmrglb, 6, 4),
9294
GEN_VXFORM(vmrglh, 6, 5),
9295
GEN_VXFORM(vmrglw, 6, 6),
9296
GEN_VXFORM(vmuloub, 4, 0),
9297
GEN_VXFORM(vmulouh, 4, 1),
9298
GEN_VXFORM(vmulosb, 4, 4),
9299
GEN_VXFORM(vmulosh, 4, 5),
9300
GEN_VXFORM(vmuleub, 4, 8),
9301
GEN_VXFORM(vmuleuh, 4, 9),
9302
GEN_VXFORM(vmulesb, 4, 12),
9303
GEN_VXFORM(vmulesh, 4, 13),
9304
GEN_VXFORM(vslb, 2, 4),
9305
GEN_VXFORM(vslh, 2, 5),
9306
GEN_VXFORM(vslw, 2, 6),
9307
GEN_VXFORM(vsrb, 2, 8),
9308
GEN_VXFORM(vsrh, 2, 9),
9309
GEN_VXFORM(vsrw, 2, 10),
9310
GEN_VXFORM(vsrab, 2, 12),
9311
GEN_VXFORM(vsrah, 2, 13),
9312
GEN_VXFORM(vsraw, 2, 14),
9313
GEN_VXFORM(vslo, 6, 16),
9314
GEN_VXFORM(vsro, 6, 17),
9315
GEN_VXFORM(vaddcuw, 0, 6),
9316
GEN_VXFORM(vsubcuw, 0, 22),
9317
GEN_VXFORM(vaddubs, 0, 8),
9318
GEN_VXFORM(vadduhs, 0, 9),
9319
GEN_VXFORM(vadduws, 0, 10),
9320
GEN_VXFORM(vaddsbs, 0, 12),
9321
GEN_VXFORM(vaddshs, 0, 13),
9322
GEN_VXFORM(vaddsws, 0, 14),
9323
GEN_VXFORM(vsububs, 0, 24),
9324
GEN_VXFORM(vsubuhs, 0, 25),
9325
GEN_VXFORM(vsubuws, 0, 26),
9326
GEN_VXFORM(vsubsbs, 0, 28),
9327
GEN_VXFORM(vsubshs, 0, 29),
9328
GEN_VXFORM(vsubsws, 0, 30),
9329
GEN_VXFORM(vrlb, 2, 0),
9330
GEN_VXFORM(vrlh, 2, 1),
9331
GEN_VXFORM(vrlw, 2, 2),
9332
GEN_VXFORM(vsl, 2, 7),
9333
GEN_VXFORM(vsr, 2, 11),
9334
GEN_VXFORM(vpkuhum, 7, 0),
9335
GEN_VXFORM(vpkuwum, 7, 1),
9336
GEN_VXFORM(vpkuhus, 7, 2),
9337
GEN_VXFORM(vpkuwus, 7, 3),
9338
GEN_VXFORM(vpkshus, 7, 4),
9339
GEN_VXFORM(vpkswus, 7, 5),
9340
GEN_VXFORM(vpkshss, 7, 6),
9341
GEN_VXFORM(vpkswss, 7, 7),
9342
GEN_VXFORM(vpkpx, 7, 12),
9343
GEN_VXFORM(vsum4ubs, 4, 24),
9344
GEN_VXFORM(vsum4sbs, 4, 28),
9345
GEN_VXFORM(vsum4shs, 4, 25),
9346
GEN_VXFORM(vsum2sws, 4, 26),
9347
GEN_VXFORM(vsumsws, 4, 30),
9348
GEN_VXFORM(vaddfp, 5, 0),
9349
GEN_VXFORM(vsubfp, 5, 1),
9350
GEN_VXFORM(vmaxfp, 5, 16),
9351
GEN_VXFORM(vminfp, 5, 17),
9355
#define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
9356
GEN_HANDLER2(name, str, 0x4, opc2, opc3, 0x00000000, PPC_ALTIVEC),
9357
#define GEN_VXRFORM(name, opc2, opc3) \
9358
GEN_VXRFORM1(name, name, #name, opc2, opc3) \
9359
GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
9360
GEN_VXRFORM(vcmpequb, 3, 0)
9361
GEN_VXRFORM(vcmpequh, 3, 1)
9362
GEN_VXRFORM(vcmpequw, 3, 2)
9363
GEN_VXRFORM(vcmpgtsb, 3, 12)
9364
GEN_VXRFORM(vcmpgtsh, 3, 13)
9365
GEN_VXRFORM(vcmpgtsw, 3, 14)
9366
GEN_VXRFORM(vcmpgtub, 3, 8)
9367
GEN_VXRFORM(vcmpgtuh, 3, 9)
9368
GEN_VXRFORM(vcmpgtuw, 3, 10)
9369
GEN_VXRFORM(vcmpeqfp, 3, 3)
9370
GEN_VXRFORM(vcmpgefp, 3, 7)
9371
GEN_VXRFORM(vcmpgtfp, 3, 11)
9372
GEN_VXRFORM(vcmpbfp, 3, 15)
9374
#undef GEN_VXFORM_SIMM
9375
#define GEN_VXFORM_SIMM(name, opc2, opc3) \
9376
GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9377
GEN_VXFORM_SIMM(vspltisb, 6, 12),
9378
GEN_VXFORM_SIMM(vspltish, 6, 13),
9379
GEN_VXFORM_SIMM(vspltisw, 6, 14),
9381
#undef GEN_VXFORM_NOA
9382
#define GEN_VXFORM_NOA(name, opc2, opc3) \
9383
GEN_HANDLER(name, 0x04, opc2, opc3, 0x001f0000, PPC_ALTIVEC)
9384
GEN_VXFORM_NOA(vupkhsb, 7, 8),
9385
GEN_VXFORM_NOA(vupkhsh, 7, 9),
9386
GEN_VXFORM_NOA(vupklsb, 7, 10),
9387
GEN_VXFORM_NOA(vupklsh, 7, 11),
9388
GEN_VXFORM_NOA(vupkhpx, 7, 13),
9389
GEN_VXFORM_NOA(vupklpx, 7, 15),
9390
GEN_VXFORM_NOA(vrefp, 5, 4),
9391
GEN_VXFORM_NOA(vrsqrtefp, 5, 5),
9392
GEN_VXFORM_NOA(vexptefp, 5, 6),
9393
GEN_VXFORM_NOA(vlogefp, 5, 7),
9394
GEN_VXFORM_NOA(vrfim, 5, 8),
9395
GEN_VXFORM_NOA(vrfin, 5, 9),
9396
GEN_VXFORM_NOA(vrfip, 5, 10),
9397
GEN_VXFORM_NOA(vrfiz, 5, 11),
9399
#undef GEN_VXFORM_UIMM
9400
#define GEN_VXFORM_UIMM(name, opc2, opc3) \
9401
GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9402
GEN_VXFORM_UIMM(vspltb, 6, 8),
9403
GEN_VXFORM_UIMM(vsplth, 6, 9),
9404
GEN_VXFORM_UIMM(vspltw, 6, 10),
9405
GEN_VXFORM_UIMM(vcfux, 5, 12),
9406
GEN_VXFORM_UIMM(vcfsx, 5, 13),
9407
GEN_VXFORM_UIMM(vctuxs, 5, 14),
9408
GEN_VXFORM_UIMM(vctsxs, 5, 15),
9410
#undef GEN_VAFORM_PAIRED
9411
#define GEN_VAFORM_PAIRED(name0, name1, opc2) \
9412
GEN_HANDLER(name0##_##name1, 0x04, opc2, 0xFF, 0x00000000, PPC_ALTIVEC)
9413
GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16),
9414
GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18),
9415
GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19),
9416
GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20),
9417
GEN_VAFORM_PAIRED(vsel, vperm, 21),
9418
GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23),
9421
#define GEN_SPE(name0, name1, opc2, opc3, inval0, inval1, type) \
9422
GEN_OPCODE_DUAL(name0##_##name1, 0x04, opc2, opc3, inval0, inval1, type, PPC_NONE)
9423
GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9424
GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9425
GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9426
GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9427
GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, 0x0000F800, PPC_SPE),
9428
GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, 0x0000F800, PPC_SPE),
9429
GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, 0x0000F800, PPC_SPE),
9430
GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x0000F800, 0x00000000, PPC_SPE),
9431
GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, 0xFFFFFFFF, PPC_SPE),
9432
GEN_SPE(speundef, evand, 0x08, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE),
9433
GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9434
GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9435
GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9436
GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, 0x00000000, PPC_SPE),
9437
GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, 0x00000000, PPC_SPE),
9438
GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, 0x00000000, PPC_SPE),
9439
GEN_SPE(speundef, evorc, 0x0D, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE),
9440
GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9441
GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9442
GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9443
GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9444
GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9445
GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, 0x0000F800, PPC_SPE),
9446
GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, 0x0000F800, PPC_SPE),
9447
GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9448
GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9449
GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, 0x00600000, PPC_SPE),
9450
GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, 0x00600000, PPC_SPE),
9451
GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, 0xFFFFFFFF, PPC_SPE),
9453
GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
9454
GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE),
9455
GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE),
9456
GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
9457
GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
9458
GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9459
GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9460
GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9461
GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9462
GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9463
GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9464
GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9465
GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
9466
GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9468
GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
9469
GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE),
9470
GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE),
9471
GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
9472
GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
9473
GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, 0x00180000, PPC_SPE_SINGLE),
9474
GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9475
GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9476
GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9477
GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9478
GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9479
GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9480
GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
9481
GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9483
GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE),
9484
GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9485
GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_DOUBLE),
9486
GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_DOUBLE),
9487
GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE),
9488
GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9489
GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE),
9490
GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, 0x00180000, PPC_SPE_DOUBLE),
9491
GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9492
GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9493
GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9494
GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9495
GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE),
9496
GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE),
9497
GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE),
9498
GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_DOUBLE),
9500
#undef GEN_SPEOP_LDST
9501
#define GEN_SPEOP_LDST(name, opc2, sh) \
9502
GEN_HANDLER(name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE)
9503
GEN_SPEOP_LDST(evldd, 0x00, 3),
9504
GEN_SPEOP_LDST(evldw, 0x01, 3),
9505
GEN_SPEOP_LDST(evldh, 0x02, 3),
9506
GEN_SPEOP_LDST(evlhhesplat, 0x04, 1),
9507
GEN_SPEOP_LDST(evlhhousplat, 0x06, 1),
9508
GEN_SPEOP_LDST(evlhhossplat, 0x07, 1),
9509
GEN_SPEOP_LDST(evlwhe, 0x08, 2),
9510
GEN_SPEOP_LDST(evlwhou, 0x0A, 2),
9511
GEN_SPEOP_LDST(evlwhos, 0x0B, 2),
9512
GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2),
9513
GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2),
9515
GEN_SPEOP_LDST(evstdd, 0x10, 3),
9516
GEN_SPEOP_LDST(evstdw, 0x11, 3),
9517
GEN_SPEOP_LDST(evstdh, 0x12, 3),
9518
GEN_SPEOP_LDST(evstwhe, 0x18, 2),
9519
GEN_SPEOP_LDST(evstwho, 0x1A, 2),
9520
GEN_SPEOP_LDST(evstwwe, 0x1C, 2),
9521
GEN_SPEOP_LDST(evstwwo, 0x1E, 2),
9524
#include "helper_regs.h"
9525
#include "translate_init.c"
9527
/*****************************************************************************/
9528
/* Misc PowerPC helpers */
9529
void ppc_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf,
9535
PowerPCCPU *cpu = POWERPC_CPU(cs);
9536
CPUPPCState *env = &cpu->env;
9539
cpu_synchronize_state(cs);
9541
cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR "
9542
TARGET_FMT_lx " XER " TARGET_FMT_lx "\n",
9543
env->nip, env->lr, env->ctr, cpu_read_xer(env));
9544
cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF "
9545
TARGET_FMT_lx " idx %d\n", env->msr, env->spr[SPR_HID0],
9546
env->hflags, env->mmu_idx);
9547
#if !defined(NO_TIMER_DUMP)
9548
cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64
9549
#if !defined(CONFIG_USER_ONLY)
9553
cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
9554
#if !defined(CONFIG_USER_ONLY)
9555
, cpu_ppc_load_decr(env)
9559
for (i = 0; i < 32; i++) {
9560
if ((i & (RGPL - 1)) == 0)
9561
cpu_fprintf(f, "GPR%02d", i);
9562
cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i));
9563
if ((i & (RGPL - 1)) == (RGPL - 1))
9564
cpu_fprintf(f, "\n");
9566
cpu_fprintf(f, "CR ");
9567
for (i = 0; i < 8; i++)
9568
cpu_fprintf(f, "%01x", env->crf[i]);
9569
cpu_fprintf(f, " [");
9570
for (i = 0; i < 8; i++) {
9572
if (env->crf[i] & 0x08)
9574
else if (env->crf[i] & 0x04)
9576
else if (env->crf[i] & 0x02)
9578
cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
9580
cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n",
9582
for (i = 0; i < 32; i++) {
9583
if ((i & (RFPL - 1)) == 0)
9584
cpu_fprintf(f, "FPR%02d", i);
9585
cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
9586
if ((i & (RFPL - 1)) == (RFPL - 1))
9587
cpu_fprintf(f, "\n");
9589
cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr);
9590
#if !defined(CONFIG_USER_ONLY)
9591
cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx
9592
" PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n",
9593
env->spr[SPR_SRR0], env->spr[SPR_SRR1],
9594
env->spr[SPR_PVR], env->spr[SPR_VRSAVE]);
9596
cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx
9597
" SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n",
9598
env->spr[SPR_SPRG0], env->spr[SPR_SPRG1],
9599
env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]);
9601
cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx
9602
" SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n",
9603
env->spr[SPR_SPRG4], env->spr[SPR_SPRG5],
9604
env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]);
9606
if (env->excp_model == POWERPC_EXCP_BOOKE) {
9607
cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx
9608
" MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n",
9609
env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1],
9610
env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]);
9612
cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx
9613
" ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n",
9614
env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR],
9615
env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]);
9617
cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx
9618
" IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n",
9619
env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR],
9620
env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]);
9622
cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx
9623
" EPR " TARGET_FMT_lx "\n",
9624
env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8],
9625
env->spr[SPR_BOOKE_EPR]);
9628
cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx
9629
" PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n",
9630
env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1],
9631
env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]);
9634
* IVORs are left out as they are large and do not change often --
9635
* they can be read with "p $ivor0", "p $ivor1", etc.
9639
#if defined(TARGET_PPC64)
9640
if (env->flags & POWERPC_FLAG_CFAR) {
9641
cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar);
9645
switch (env->mmu_model) {
9646
case POWERPC_MMU_32B:
9647
case POWERPC_MMU_601:
9648
case POWERPC_MMU_SOFT_6xx:
9649
case POWERPC_MMU_SOFT_74xx:
9650
#if defined(TARGET_PPC64)
9651
case POWERPC_MMU_64B:
9653
cpu_fprintf(f, " SDR1 " TARGET_FMT_lx "\n", env->spr[SPR_SDR1]);
9655
case POWERPC_MMU_BOOKE206:
9656
cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx
9657
" MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n",
9658
env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1],
9659
env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]);
9661
cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx
9662
" MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n",
9663
env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6],
9664
env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]);
9666
cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx
9667
" TLB1CFG " TARGET_FMT_lx "\n",
9668
env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG],
9669
env->spr[SPR_BOOKE_TLB1CFG]);
9680
void ppc_cpu_dump_statistics(CPUState *cs, FILE*f,
9681
fprintf_function cpu_fprintf, int flags)
9683
#if defined(DO_PPC_STATISTICS)
9684
PowerPCCPU *cpu = POWERPC_CPU(cs);
9685
opc_handler_t **t1, **t2, **t3, *handler;
9688
t1 = cpu->env.opcodes;
9689
for (op1 = 0; op1 < 64; op1++) {
9691
if (is_indirect_opcode(handler)) {
9692
t2 = ind_table(handler);
9693
for (op2 = 0; op2 < 32; op2++) {
9695
if (is_indirect_opcode(handler)) {
9696
t3 = ind_table(handler);
9697
for (op3 = 0; op3 < 32; op3++) {
9699
if (handler->count == 0)
9701
cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
9702
"%016" PRIx64 " %" PRId64 "\n",
9703
op1, op2, op3, op1, (op3 << 5) | op2,
9705
handler->count, handler->count);
9708
if (handler->count == 0)
9710
cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
9711
"%016" PRIx64 " %" PRId64 "\n",
9712
op1, op2, op1, op2, handler->oname,
9713
handler->count, handler->count);
9717
if (handler->count == 0)
9719
cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64
9721
op1, op1, handler->oname,
9722
handler->count, handler->count);
9728
/*****************************************************************************/
9729
static inline void gen_intermediate_code_internal(PowerPCCPU *cpu,
9730
TranslationBlock *tb,
9733
CPUState *cs = CPU(cpu);
9734
CPUPPCState *env = &cpu->env;
9735
DisasContext ctx, *ctxp = &ctx;
9736
opc_handler_t **table, *handler;
9737
target_ulong pc_start;
9738
uint16_t *gen_opc_end;
9745
gen_opc_end = tcg_ctx.gen_opc_buf + OPC_MAX_SIZE;
9748
ctx.exception = POWERPC_EXCP_NONE;
9749
ctx.spr_cb = env->spr_cb;
9750
ctx.mem_idx = env->mmu_idx;
9751
ctx.insns_flags = env->insns_flags;
9752
ctx.insns_flags2 = env->insns_flags2;
9753
ctx.access_type = -1;
9754
ctx.le_mode = env->hflags & (1 << MSR_LE) ? 1 : 0;
9755
#if defined(TARGET_PPC64)
9756
ctx.sf_mode = msr_is_64bit(env, env->msr);
9757
ctx.has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
9759
ctx.fpu_enabled = msr_fp;
9760
if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
9761
ctx.spe_enabled = msr_spe;
9763
ctx.spe_enabled = 0;
9764
if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
9765
ctx.altivec_enabled = msr_vr;
9767
ctx.altivec_enabled = 0;
9768
if ((env->flags & POWERPC_FLAG_SE) && msr_se)
9769
ctx.singlestep_enabled = CPU_SINGLE_STEP;
9771
ctx.singlestep_enabled = 0;
9772
if ((env->flags & POWERPC_FLAG_BE) && msr_be)
9773
ctx.singlestep_enabled |= CPU_BRANCH_STEP;
9774
if (unlikely(cs->singlestep_enabled)) {
9775
ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
9777
#if defined (DO_SINGLE_STEP) && 0
9778
/* Single step trace mode */
9782
max_insns = tb->cflags & CF_COUNT_MASK;
9784
max_insns = CF_COUNT_MASK;
9787
/* Set env in case of segfault during code fetch */
9788
while (ctx.exception == POWERPC_EXCP_NONE
9789
&& tcg_ctx.gen_opc_ptr < gen_opc_end) {
9790
if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
9791
QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
9792
if (bp->pc == ctx.nip) {
9793
gen_debug_exception(ctxp);
9798
if (unlikely(search_pc)) {
9799
j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
9803
tcg_ctx.gen_opc_instr_start[lj++] = 0;
9805
tcg_ctx.gen_opc_pc[lj] = ctx.nip;
9806
tcg_ctx.gen_opc_instr_start[lj] = 1;
9807
tcg_ctx.gen_opc_icount[lj] = num_insns;
9809
LOG_DISAS("----------------\n");
9810
LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
9811
ctx.nip, ctx.mem_idx, (int)msr_ir);
9812
if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
9814
if (unlikely(ctx.le_mode)) {
9815
ctx.opcode = bswap32(cpu_ldl_code(env, ctx.nip));
9817
ctx.opcode = cpu_ldl_code(env, ctx.nip);
9819
LOG_DISAS("translate opcode %08x (%02x %02x %02x) (%s)\n",
9820
ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
9821
opc3(ctx.opcode), ctx.le_mode ? "little" : "big");
9822
if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
9823
tcg_gen_debug_insn_start(ctx.nip);
9826
table = env->opcodes;
9828
handler = table[opc1(ctx.opcode)];
9829
if (is_indirect_opcode(handler)) {
9830
table = ind_table(handler);
9831
handler = table[opc2(ctx.opcode)];
9832
if (is_indirect_opcode(handler)) {
9833
table = ind_table(handler);
9834
handler = table[opc3(ctx.opcode)];
9837
/* Is opcode *REALLY* valid ? */
9838
if (unlikely(handler->handler == &gen_invalid)) {
9839
if (qemu_log_enabled()) {
9840
qemu_log("invalid/unsupported opcode: "
9841
"%02x - %02x - %02x (%08x) " TARGET_FMT_lx " %d\n",
9842
opc1(ctx.opcode), opc2(ctx.opcode),
9843
opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
9848
if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) && Rc(ctx.opcode))) {
9849
inval = handler->inval2;
9851
inval = handler->inval1;
9854
if (unlikely((ctx.opcode & inval) != 0)) {
9855
if (qemu_log_enabled()) {
9856
qemu_log("invalid bits: %08x for opcode: "
9857
"%02x - %02x - %02x (%08x) " TARGET_FMT_lx "\n",
9858
ctx.opcode & inval, opc1(ctx.opcode),
9859
opc2(ctx.opcode), opc3(ctx.opcode),
9860
ctx.opcode, ctx.nip - 4);
9862
gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL);
9866
(*(handler->handler))(&ctx);
9867
#if defined(DO_PPC_STATISTICS)
9870
/* Check trace mode exceptions */
9871
if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
9872
(ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
9873
ctx.exception != POWERPC_SYSCALL &&
9874
ctx.exception != POWERPC_EXCP_TRAP &&
9875
ctx.exception != POWERPC_EXCP_BRANCH)) {
9876
gen_exception(ctxp, POWERPC_EXCP_TRACE);
9877
} else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
9878
(cs->singlestep_enabled) ||
9880
num_insns >= max_insns)) {
9881
/* if we reach a page boundary or are single stepping, stop
9887
if (tb->cflags & CF_LAST_IO)
9889
if (ctx.exception == POWERPC_EXCP_NONE) {
9890
gen_goto_tb(&ctx, 0, ctx.nip);
9891
} else if (ctx.exception != POWERPC_EXCP_BRANCH) {
9892
if (unlikely(cs->singlestep_enabled)) {
9893
gen_debug_exception(ctxp);
9895
/* Generate the return instruction */
9898
gen_tb_end(tb, num_insns);
9899
*tcg_ctx.gen_opc_ptr = INDEX_op_end;
9900
if (unlikely(search_pc)) {
9901
j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
9904
tcg_ctx.gen_opc_instr_start[lj++] = 0;
9906
tb->size = ctx.nip - pc_start;
9907
tb->icount = num_insns;
9909
#if defined(DEBUG_DISAS)
9910
if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
9912
flags = env->bfd_mach;
9913
flags |= ctx.le_mode << 16;
9914
qemu_log("IN: %s\n", lookup_symbol(pc_start));
9915
log_target_disas(env, pc_start, ctx.nip - pc_start, flags);
9921
void gen_intermediate_code (CPUPPCState *env, struct TranslationBlock *tb)
9923
gen_intermediate_code_internal(ppc_env_get_cpu(env), tb, false);
9926
void gen_intermediate_code_pc (CPUPPCState *env, struct TranslationBlock *tb)
9928
gen_intermediate_code_internal(ppc_env_get_cpu(env), tb, true);
9931
void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, int pc_pos)
9933
env->nip = tcg_ctx.gen_opc_pc[pc_pos];