2
* PowerPC emulation for qemu: main translation routines.
4
* Copyright (c) 2003-2007 Jocelyn Mayer
5
* Copyright (C) 2011 Freescale Semiconductor, Inc.
7
* This library is free software; you can redistribute it and/or
8
* modify it under the terms of the GNU Lesser General Public
9
* License as published by the Free Software Foundation; either
10
* version 2 of the License, or (at your option) any later version.
12
* This library is distributed in the hope that it will be useful,
13
* but WITHOUT ANY WARRANTY; without even the implied warranty of
14
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15
* Lesser General Public License for more details.
17
* You should have received a copy of the GNU Lesser General Public
18
* License along with this library; if not, see <http://www.gnu.org/licenses/>.
29
#include "qemu-common.h"
30
#include "host-utils.h"
36
#define CPU_SINGLE_STEP 0x1
37
#define CPU_BRANCH_STEP 0x2
38
#define GDBSTUB_SINGLE_STEP 0x4
40
/* Include definitions for instructions classes and implementations flags */
41
//#define PPC_DEBUG_DISAS
42
//#define DO_PPC_STATISTICS
44
#ifdef PPC_DEBUG_DISAS
45
# define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
47
# define LOG_DISAS(...) do { } while (0)
49
/*****************************************************************************/
50
/* Code translation helpers */
52
/* global register indexes */
53
static TCGv_ptr cpu_env;
54
static char cpu_reg_names[10*3 + 22*4 /* GPR */
55
#if !defined(TARGET_PPC64)
56
+ 10*4 + 22*5 /* SPE GPRh */
58
+ 10*4 + 22*5 /* FPR */
59
+ 2*(10*6 + 22*7) /* AVRh, AVRl */
61
static TCGv cpu_gpr[32];
62
#if !defined(TARGET_PPC64)
63
static TCGv cpu_gprh[32];
65
static TCGv_i64 cpu_fpr[32];
66
static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
67
static TCGv_i32 cpu_crf[8];
73
static TCGv cpu_reserve;
74
static TCGv_i32 cpu_fpscr;
75
static TCGv_i32 cpu_access_type;
77
#include "gen-icount.h"
79
void ppc_translate_init(void)
83
size_t cpu_reg_names_size;
84
static int done_init = 0;
89
cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
92
cpu_reg_names_size = sizeof(cpu_reg_names);
94
for (i = 0; i < 8; i++) {
95
snprintf(p, cpu_reg_names_size, "crf%d", i);
96
cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0,
97
offsetof(CPUState, crf[i]), p);
99
cpu_reg_names_size -= 5;
102
for (i = 0; i < 32; i++) {
103
snprintf(p, cpu_reg_names_size, "r%d", i);
104
cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
105
offsetof(CPUState, gpr[i]), p);
106
p += (i < 10) ? 3 : 4;
107
cpu_reg_names_size -= (i < 10) ? 3 : 4;
108
#if !defined(TARGET_PPC64)
109
snprintf(p, cpu_reg_names_size, "r%dH", i);
110
cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0,
111
offsetof(CPUState, gprh[i]), p);
112
p += (i < 10) ? 4 : 5;
113
cpu_reg_names_size -= (i < 10) ? 4 : 5;
116
snprintf(p, cpu_reg_names_size, "fp%d", i);
117
cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
118
offsetof(CPUState, fpr[i]), p);
119
p += (i < 10) ? 4 : 5;
120
cpu_reg_names_size -= (i < 10) ? 4 : 5;
122
snprintf(p, cpu_reg_names_size, "avr%dH", i);
123
#ifdef HOST_WORDS_BIGENDIAN
124
cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
125
offsetof(CPUState, avr[i].u64[0]), p);
127
cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
128
offsetof(CPUState, avr[i].u64[1]), p);
130
p += (i < 10) ? 6 : 7;
131
cpu_reg_names_size -= (i < 10) ? 6 : 7;
133
snprintf(p, cpu_reg_names_size, "avr%dL", i);
134
#ifdef HOST_WORDS_BIGENDIAN
135
cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
136
offsetof(CPUState, avr[i].u64[1]), p);
138
cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
139
offsetof(CPUState, avr[i].u64[0]), p);
141
p += (i < 10) ? 6 : 7;
142
cpu_reg_names_size -= (i < 10) ? 6 : 7;
145
cpu_nip = tcg_global_mem_new(TCG_AREG0,
146
offsetof(CPUState, nip), "nip");
148
cpu_msr = tcg_global_mem_new(TCG_AREG0,
149
offsetof(CPUState, msr), "msr");
151
cpu_ctr = tcg_global_mem_new(TCG_AREG0,
152
offsetof(CPUState, ctr), "ctr");
154
cpu_lr = tcg_global_mem_new(TCG_AREG0,
155
offsetof(CPUState, lr), "lr");
157
cpu_xer = tcg_global_mem_new(TCG_AREG0,
158
offsetof(CPUState, xer), "xer");
160
cpu_reserve = tcg_global_mem_new(TCG_AREG0,
161
offsetof(CPUState, reserve_addr),
164
cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
165
offsetof(CPUState, fpscr), "fpscr");
167
cpu_access_type = tcg_global_mem_new_i32(TCG_AREG0,
168
offsetof(CPUState, access_type), "access_type");
170
/* register helpers */
177
/* internal defines */
178
typedef struct DisasContext {
179
struct TranslationBlock *tb;
183
/* Routine used to access memory */
186
/* Translation flags */
188
#if defined(TARGET_PPC64)
194
ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
195
int singlestep_enabled;
198
struct opc_handler_t {
201
/* instruction type */
203
/* extended instruction type */
206
void (*handler)(DisasContext *ctx);
207
#if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
210
#if defined(DO_PPC_STATISTICS)
215
static inline void gen_reset_fpstatus(void)
217
gen_helper_reset_fpstatus();
220
static inline void gen_compute_fprf(TCGv_i64 arg, int set_fprf, int set_rc)
222
TCGv_i32 t0 = tcg_temp_new_i32();
225
/* This case might be optimized later */
226
tcg_gen_movi_i32(t0, 1);
227
gen_helper_compute_fprf(t0, arg, t0);
228
if (unlikely(set_rc)) {
229
tcg_gen_mov_i32(cpu_crf[1], t0);
231
gen_helper_float_check_status();
232
} else if (unlikely(set_rc)) {
233
/* We always need to compute fpcc */
234
tcg_gen_movi_i32(t0, 0);
235
gen_helper_compute_fprf(t0, arg, t0);
236
tcg_gen_mov_i32(cpu_crf[1], t0);
239
tcg_temp_free_i32(t0);
242
static inline void gen_set_access_type(DisasContext *ctx, int access_type)
244
if (ctx->access_type != access_type) {
245
tcg_gen_movi_i32(cpu_access_type, access_type);
246
ctx->access_type = access_type;
250
static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
252
#if defined(TARGET_PPC64)
254
tcg_gen_movi_tl(cpu_nip, nip);
257
tcg_gen_movi_tl(cpu_nip, (uint32_t)nip);
260
static inline void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
263
if (ctx->exception == POWERPC_EXCP_NONE) {
264
gen_update_nip(ctx, ctx->nip);
266
t0 = tcg_const_i32(excp);
267
t1 = tcg_const_i32(error);
268
gen_helper_raise_exception_err(t0, t1);
269
tcg_temp_free_i32(t0);
270
tcg_temp_free_i32(t1);
271
ctx->exception = (excp);
274
static inline void gen_exception(DisasContext *ctx, uint32_t excp)
277
if (ctx->exception == POWERPC_EXCP_NONE) {
278
gen_update_nip(ctx, ctx->nip);
280
t0 = tcg_const_i32(excp);
281
gen_helper_raise_exception(t0);
282
tcg_temp_free_i32(t0);
283
ctx->exception = (excp);
286
static inline void gen_debug_exception(DisasContext *ctx)
290
if (ctx->exception != POWERPC_EXCP_BRANCH)
291
gen_update_nip(ctx, ctx->nip);
292
t0 = tcg_const_i32(EXCP_DEBUG);
293
gen_helper_raise_exception(t0);
294
tcg_temp_free_i32(t0);
297
static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
299
gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_INVAL | error);
302
/* Stop translation */
303
static inline void gen_stop_exception(DisasContext *ctx)
305
gen_update_nip(ctx, ctx->nip);
306
ctx->exception = POWERPC_EXCP_STOP;
309
/* No need to update nip here, as execution flow will change */
310
static inline void gen_sync_exception(DisasContext *ctx)
312
ctx->exception = POWERPC_EXCP_SYNC;
315
#define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
316
GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
318
#define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
319
GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
321
#define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
322
GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
324
#define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
325
GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
327
typedef struct opcode_t {
328
unsigned char opc1, opc2, opc3;
329
#if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
330
unsigned char pad[5];
332
unsigned char pad[1];
334
opc_handler_t handler;
338
/*****************************************************************************/
339
/*** Instruction decoding ***/
340
#define EXTRACT_HELPER(name, shift, nb) \
341
static inline uint32_t name(uint32_t opcode) \
343
return (opcode >> (shift)) & ((1 << (nb)) - 1); \
346
#define EXTRACT_SHELPER(name, shift, nb) \
347
static inline int32_t name(uint32_t opcode) \
349
return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
353
EXTRACT_HELPER(opc1, 26, 6);
355
EXTRACT_HELPER(opc2, 1, 5);
357
EXTRACT_HELPER(opc3, 6, 5);
358
/* Update Cr0 flags */
359
EXTRACT_HELPER(Rc, 0, 1);
361
EXTRACT_HELPER(rD, 21, 5);
363
EXTRACT_HELPER(rS, 21, 5);
365
EXTRACT_HELPER(rA, 16, 5);
367
EXTRACT_HELPER(rB, 11, 5);
369
EXTRACT_HELPER(rC, 6, 5);
371
EXTRACT_HELPER(crfD, 23, 3);
372
EXTRACT_HELPER(crfS, 18, 3);
373
EXTRACT_HELPER(crbD, 21, 5);
374
EXTRACT_HELPER(crbA, 16, 5);
375
EXTRACT_HELPER(crbB, 11, 5);
377
EXTRACT_HELPER(_SPR, 11, 10);
378
static inline uint32_t SPR(uint32_t opcode)
380
uint32_t sprn = _SPR(opcode);
382
return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
384
/*** Get constants ***/
385
EXTRACT_HELPER(IMM, 12, 8);
386
/* 16 bits signed immediate value */
387
EXTRACT_SHELPER(SIMM, 0, 16);
388
/* 16 bits unsigned immediate value */
389
EXTRACT_HELPER(UIMM, 0, 16);
390
/* 5 bits signed immediate value */
391
EXTRACT_HELPER(SIMM5, 16, 5);
392
/* 5 bits signed immediate value */
393
EXTRACT_HELPER(UIMM5, 16, 5);
395
EXTRACT_HELPER(NB, 11, 5);
397
EXTRACT_HELPER(SH, 11, 5);
398
/* Vector shift count */
399
EXTRACT_HELPER(VSH, 6, 4);
401
EXTRACT_HELPER(MB, 6, 5);
403
EXTRACT_HELPER(ME, 1, 5);
405
EXTRACT_HELPER(TO, 21, 5);
407
EXTRACT_HELPER(CRM, 12, 8);
408
EXTRACT_HELPER(FM, 17, 8);
409
EXTRACT_HELPER(SR, 16, 4);
410
EXTRACT_HELPER(FPIMM, 12, 4);
412
/*** Jump target decoding ***/
414
EXTRACT_SHELPER(d, 0, 16);
415
/* Immediate address */
416
static inline target_ulong LI(uint32_t opcode)
418
return (opcode >> 0) & 0x03FFFFFC;
421
static inline uint32_t BD(uint32_t opcode)
423
return (opcode >> 0) & 0xFFFC;
426
EXTRACT_HELPER(BO, 21, 5);
427
EXTRACT_HELPER(BI, 16, 5);
428
/* Absolute/relative address */
429
EXTRACT_HELPER(AA, 1, 1);
431
EXTRACT_HELPER(LK, 0, 1);
433
/* Create a mask between <start> and <end> bits */
434
static inline target_ulong MASK(uint32_t start, uint32_t end)
438
#if defined(TARGET_PPC64)
439
if (likely(start == 0)) {
440
ret = UINT64_MAX << (63 - end);
441
} else if (likely(end == 63)) {
442
ret = UINT64_MAX >> start;
445
if (likely(start == 0)) {
446
ret = UINT32_MAX << (31 - end);
447
} else if (likely(end == 31)) {
448
ret = UINT32_MAX >> start;
452
ret = (((target_ulong)(-1ULL)) >> (start)) ^
453
(((target_ulong)(-1ULL) >> (end)) >> 1);
454
if (unlikely(start > end))
461
/*****************************************************************************/
462
/* PowerPC instructions table */
464
#if defined(DO_PPC_STATISTICS)
465
#define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
475
.handler = &gen_##name, \
476
.oname = stringify(name), \
478
.oname = stringify(name), \
480
#define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
490
.handler = &gen_##name, \
496
#define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
506
.handler = &gen_##name, \
508
.oname = stringify(name), \
510
#define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
520
.handler = &gen_##name, \
526
/* SPR load/store helpers */
527
static inline void gen_load_spr(TCGv t, int reg)
529
tcg_gen_ld_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
532
static inline void gen_store_spr(int reg, TCGv t)
534
tcg_gen_st_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
537
/* Invalid instruction */
538
static void gen_invalid(DisasContext *ctx)
540
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
543
static opc_handler_t invalid_handler = {
547
.handler = gen_invalid,
550
/*** Integer comparison ***/
552
static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
556
tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_xer);
557
tcg_gen_shri_i32(cpu_crf[crf], cpu_crf[crf], XER_SO);
558
tcg_gen_andi_i32(cpu_crf[crf], cpu_crf[crf], 1);
560
l1 = gen_new_label();
561
l2 = gen_new_label();
562
l3 = gen_new_label();
564
tcg_gen_brcond_tl(TCG_COND_LT, arg0, arg1, l1);
565
tcg_gen_brcond_tl(TCG_COND_GT, arg0, arg1, l2);
567
tcg_gen_brcond_tl(TCG_COND_LTU, arg0, arg1, l1);
568
tcg_gen_brcond_tl(TCG_COND_GTU, arg0, arg1, l2);
570
tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_EQ);
573
tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_LT);
576
tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_GT);
580
static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
582
TCGv t0 = tcg_const_local_tl(arg1);
583
gen_op_cmp(arg0, t0, s, crf);
587
#if defined(TARGET_PPC64)
588
static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
591
t0 = tcg_temp_local_new();
592
t1 = tcg_temp_local_new();
594
tcg_gen_ext32s_tl(t0, arg0);
595
tcg_gen_ext32s_tl(t1, arg1);
597
tcg_gen_ext32u_tl(t0, arg0);
598
tcg_gen_ext32u_tl(t1, arg1);
600
gen_op_cmp(t0, t1, s, crf);
605
static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
607
TCGv t0 = tcg_const_local_tl(arg1);
608
gen_op_cmp32(arg0, t0, s, crf);
613
static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
615
#if defined(TARGET_PPC64)
617
gen_op_cmpi32(reg, 0, 1, 0);
620
gen_op_cmpi(reg, 0, 1, 0);
624
static void gen_cmp(DisasContext *ctx)
626
#if defined(TARGET_PPC64)
627
if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
628
gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
629
1, crfD(ctx->opcode));
632
gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
633
1, crfD(ctx->opcode));
637
static void gen_cmpi(DisasContext *ctx)
639
#if defined(TARGET_PPC64)
640
if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
641
gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
642
1, crfD(ctx->opcode));
645
gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
646
1, crfD(ctx->opcode));
650
static void gen_cmpl(DisasContext *ctx)
652
#if defined(TARGET_PPC64)
653
if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
654
gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
655
0, crfD(ctx->opcode));
658
gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
659
0, crfD(ctx->opcode));
663
static void gen_cmpli(DisasContext *ctx)
665
#if defined(TARGET_PPC64)
666
if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
667
gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
668
0, crfD(ctx->opcode));
671
gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
672
0, crfD(ctx->opcode));
675
/* isel (PowerPC 2.03 specification) */
676
static void gen_isel(DisasContext *ctx)
679
uint32_t bi = rC(ctx->opcode);
683
l1 = gen_new_label();
684
l2 = gen_new_label();
686
mask = 1 << (3 - (bi & 0x03));
687
t0 = tcg_temp_new_i32();
688
tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask);
689
tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
690
if (rA(ctx->opcode) == 0)
691
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
693
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
696
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
698
tcg_temp_free_i32(t0);
701
/*** Integer arithmetic ***/
703
static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
704
TCGv arg1, TCGv arg2, int sub)
709
l1 = gen_new_label();
710
/* Start with XER OV disabled, the most likely case */
711
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
712
t0 = tcg_temp_local_new();
713
tcg_gen_xor_tl(t0, arg0, arg1);
714
#if defined(TARGET_PPC64)
716
tcg_gen_ext32s_tl(t0, t0);
719
tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
721
tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
722
tcg_gen_xor_tl(t0, arg1, arg2);
723
#if defined(TARGET_PPC64)
725
tcg_gen_ext32s_tl(t0, t0);
728
tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
730
tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
731
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
736
static inline void gen_op_arith_compute_ca(DisasContext *ctx, TCGv arg1,
739
int l1 = gen_new_label();
741
#if defined(TARGET_PPC64)
742
if (!(ctx->sf_mode)) {
747
tcg_gen_ext32u_tl(t0, arg1);
748
tcg_gen_ext32u_tl(t1, arg2);
750
tcg_gen_brcond_tl(TCG_COND_GTU, t0, t1, l1);
752
tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
754
tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
762
tcg_gen_brcond_tl(TCG_COND_GTU, arg1, arg2, l1);
764
tcg_gen_brcond_tl(TCG_COND_GEU, arg1, arg2, l1);
766
tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
771
/* Common add function */
772
static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
773
TCGv arg2, int add_ca, int compute_ca,
778
if ((!compute_ca && !compute_ov) ||
779
(!TCGV_EQUAL(ret,arg1) && !TCGV_EQUAL(ret, arg2))) {
782
t0 = tcg_temp_local_new();
786
t1 = tcg_temp_local_new();
787
tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
788
tcg_gen_shri_tl(t1, t1, XER_CA);
793
if (compute_ca && compute_ov) {
794
/* Start with XER CA and OV disabled, the most likely case */
795
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
796
} else if (compute_ca) {
797
/* Start with XER CA disabled, the most likely case */
798
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
799
} else if (compute_ov) {
800
/* Start with XER OV disabled, the most likely case */
801
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
804
tcg_gen_add_tl(t0, arg1, arg2);
807
gen_op_arith_compute_ca(ctx, t0, arg1, 0);
810
tcg_gen_add_tl(t0, t0, t1);
811
gen_op_arith_compute_ca(ctx, t0, t1, 0);
815
gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
818
if (unlikely(Rc(ctx->opcode) != 0))
819
gen_set_Rc0(ctx, t0);
821
if (!TCGV_EQUAL(t0, ret)) {
822
tcg_gen_mov_tl(ret, t0);
826
/* Add functions with two operands */
827
#define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
828
static void glue(gen_, name)(DisasContext *ctx) \
830
gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
831
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
832
add_ca, compute_ca, compute_ov); \
834
/* Add functions with one operand and one immediate */
835
#define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
836
add_ca, compute_ca, compute_ov) \
837
static void glue(gen_, name)(DisasContext *ctx) \
839
TCGv t0 = tcg_const_local_tl(const_val); \
840
gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
841
cpu_gpr[rA(ctx->opcode)], t0, \
842
add_ca, compute_ca, compute_ov); \
846
/* add add. addo addo. */
847
GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
848
GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
849
/* addc addc. addco addco. */
850
GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
851
GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
852
/* adde adde. addeo addeo. */
853
GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
854
GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
855
/* addme addme. addmeo addmeo. */
856
GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
857
GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
858
/* addze addze. addzeo addzeo.*/
859
GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
860
GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
862
static void gen_addi(DisasContext *ctx)
864
target_long simm = SIMM(ctx->opcode);
866
if (rA(ctx->opcode) == 0) {
868
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
870
tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm);
874
static inline void gen_op_addic(DisasContext *ctx, TCGv ret, TCGv arg1,
877
target_long simm = SIMM(ctx->opcode);
879
/* Start with XER CA and OV disabled, the most likely case */
880
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
882
if (likely(simm != 0)) {
883
TCGv t0 = tcg_temp_local_new();
884
tcg_gen_addi_tl(t0, arg1, simm);
885
gen_op_arith_compute_ca(ctx, t0, arg1, 0);
886
tcg_gen_mov_tl(ret, t0);
889
tcg_gen_mov_tl(ret, arg1);
892
gen_set_Rc0(ctx, ret);
896
static void gen_addic(DisasContext *ctx)
898
gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
901
static void gen_addic_(DisasContext *ctx)
903
gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
907
static void gen_addis(DisasContext *ctx)
909
target_long simm = SIMM(ctx->opcode);
911
if (rA(ctx->opcode) == 0) {
913
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
915
tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm << 16);
919
static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
920
TCGv arg2, int sign, int compute_ov)
922
int l1 = gen_new_label();
923
int l2 = gen_new_label();
924
TCGv_i32 t0 = tcg_temp_local_new_i32();
925
TCGv_i32 t1 = tcg_temp_local_new_i32();
927
tcg_gen_trunc_tl_i32(t0, arg1);
928
tcg_gen_trunc_tl_i32(t1, arg2);
929
tcg_gen_brcondi_i32(TCG_COND_EQ, t1, 0, l1);
931
int l3 = gen_new_label();
932
tcg_gen_brcondi_i32(TCG_COND_NE, t1, -1, l3);
933
tcg_gen_brcondi_i32(TCG_COND_EQ, t0, INT32_MIN, l1);
935
tcg_gen_div_i32(t0, t0, t1);
937
tcg_gen_divu_i32(t0, t0, t1);
940
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
945
tcg_gen_sari_i32(t0, t0, 31);
947
tcg_gen_movi_i32(t0, 0);
950
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
953
tcg_gen_extu_i32_tl(ret, t0);
954
tcg_temp_free_i32(t0);
955
tcg_temp_free_i32(t1);
956
if (unlikely(Rc(ctx->opcode) != 0))
957
gen_set_Rc0(ctx, ret);
960
#define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
961
static void glue(gen_, name)(DisasContext *ctx) \
963
gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
964
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
967
/* divwu divwu. divwuo divwuo. */
968
GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
969
GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
970
/* divw divw. divwo divwo. */
971
GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
972
GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
973
#if defined(TARGET_PPC64)
974
static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
975
TCGv arg2, int sign, int compute_ov)
977
int l1 = gen_new_label();
978
int l2 = gen_new_label();
980
tcg_gen_brcondi_i64(TCG_COND_EQ, arg2, 0, l1);
982
int l3 = gen_new_label();
983
tcg_gen_brcondi_i64(TCG_COND_NE, arg2, -1, l3);
984
tcg_gen_brcondi_i64(TCG_COND_EQ, arg1, INT64_MIN, l1);
986
tcg_gen_div_i64(ret, arg1, arg2);
988
tcg_gen_divu_i64(ret, arg1, arg2);
991
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
996
tcg_gen_sari_i64(ret, arg1, 63);
998
tcg_gen_movi_i64(ret, 0);
1001
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1004
if (unlikely(Rc(ctx->opcode) != 0))
1005
gen_set_Rc0(ctx, ret);
1007
#define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1008
static void glue(gen_, name)(DisasContext *ctx) \
1010
gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1011
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1012
sign, compute_ov); \
1014
/* divwu divwu. divwuo divwuo. */
1015
GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1016
GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1017
/* divw divw. divwo divwo. */
1018
GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1019
GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1023
static void gen_mulhw(DisasContext *ctx)
1027
t0 = tcg_temp_new_i64();
1028
t1 = tcg_temp_new_i64();
1029
#if defined(TARGET_PPC64)
1030
tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1031
tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1032
tcg_gen_mul_i64(t0, t0, t1);
1033
tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1035
tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1036
tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1037
tcg_gen_mul_i64(t0, t0, t1);
1038
tcg_gen_shri_i64(t0, t0, 32);
1039
tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1041
tcg_temp_free_i64(t0);
1042
tcg_temp_free_i64(t1);
1043
if (unlikely(Rc(ctx->opcode) != 0))
1044
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1047
/* mulhwu mulhwu. */
1048
static void gen_mulhwu(DisasContext *ctx)
1052
t0 = tcg_temp_new_i64();
1053
t1 = tcg_temp_new_i64();
1054
#if defined(TARGET_PPC64)
1055
tcg_gen_ext32u_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1056
tcg_gen_ext32u_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1057
tcg_gen_mul_i64(t0, t0, t1);
1058
tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1060
tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1061
tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1062
tcg_gen_mul_i64(t0, t0, t1);
1063
tcg_gen_shri_i64(t0, t0, 32);
1064
tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1066
tcg_temp_free_i64(t0);
1067
tcg_temp_free_i64(t1);
1068
if (unlikely(Rc(ctx->opcode) != 0))
1069
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1073
static void gen_mullw(DisasContext *ctx)
1075
tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1076
cpu_gpr[rB(ctx->opcode)]);
1077
tcg_gen_ext32s_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)]);
1078
if (unlikely(Rc(ctx->opcode) != 0))
1079
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1082
/* mullwo mullwo. */
1083
static void gen_mullwo(DisasContext *ctx)
1088
t0 = tcg_temp_new_i64();
1089
t1 = tcg_temp_new_i64();
1090
l1 = gen_new_label();
1091
/* Start with XER OV disabled, the most likely case */
1092
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1093
#if defined(TARGET_PPC64)
1094
tcg_gen_ext32s_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1095
tcg_gen_ext32s_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1097
tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1098
tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1100
tcg_gen_mul_i64(t0, t0, t1);
1101
#if defined(TARGET_PPC64)
1102
tcg_gen_ext32s_i64(cpu_gpr[rD(ctx->opcode)], t0);
1103
tcg_gen_brcond_i64(TCG_COND_EQ, t0, cpu_gpr[rD(ctx->opcode)], l1);
1105
tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1106
tcg_gen_ext32s_i64(t1, t0);
1107
tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
1109
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1111
tcg_temp_free_i64(t0);
1112
tcg_temp_free_i64(t1);
1113
if (unlikely(Rc(ctx->opcode) != 0))
1114
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1118
static void gen_mulli(DisasContext *ctx)
1120
tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1123
#if defined(TARGET_PPC64)
1124
#define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
1125
static void glue(gen_, name)(DisasContext *ctx) \
1127
gen_helper_##name (cpu_gpr[rD(ctx->opcode)], \
1128
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
1129
if (unlikely(Rc(ctx->opcode) != 0)) \
1130
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
1133
GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00);
1134
/* mulhdu mulhdu. */
1135
GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02);
1138
static void gen_mulld(DisasContext *ctx)
1140
tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1141
cpu_gpr[rB(ctx->opcode)]);
1142
if (unlikely(Rc(ctx->opcode) != 0))
1143
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1145
/* mulldo mulldo. */
1146
GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17);
1149
/* neg neg. nego nego. */
1150
static inline void gen_op_arith_neg(DisasContext *ctx, TCGv ret, TCGv arg1,
1153
int l1 = gen_new_label();
1154
int l2 = gen_new_label();
1155
TCGv t0 = tcg_temp_local_new();
1156
#if defined(TARGET_PPC64)
1158
tcg_gen_mov_tl(t0, arg1);
1159
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT64_MIN, l1);
1163
tcg_gen_ext32s_tl(t0, arg1);
1164
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT32_MIN, l1);
1166
tcg_gen_neg_tl(ret, arg1);
1168
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1172
tcg_gen_mov_tl(ret, t0);
1174
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1178
if (unlikely(Rc(ctx->opcode) != 0))
1179
gen_set_Rc0(ctx, ret);
1182
static void gen_neg(DisasContext *ctx)
1184
gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
1187
static void gen_nego(DisasContext *ctx)
1189
gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
1192
/* Common subf function */
1193
static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1194
TCGv arg2, int add_ca, int compute_ca,
1199
if ((!compute_ca && !compute_ov) ||
1200
(!TCGV_EQUAL(ret, arg1) && !TCGV_EQUAL(ret, arg2))) {
1203
t0 = tcg_temp_local_new();
1207
t1 = tcg_temp_local_new();
1208
tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
1209
tcg_gen_shri_tl(t1, t1, XER_CA);
1214
if (compute_ca && compute_ov) {
1215
/* Start with XER CA and OV disabled, the most likely case */
1216
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
1217
} else if (compute_ca) {
1218
/* Start with XER CA disabled, the most likely case */
1219
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1220
} else if (compute_ov) {
1221
/* Start with XER OV disabled, the most likely case */
1222
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1226
tcg_gen_not_tl(t0, arg1);
1227
tcg_gen_add_tl(t0, t0, arg2);
1228
gen_op_arith_compute_ca(ctx, t0, arg2, 0);
1229
tcg_gen_add_tl(t0, t0, t1);
1230
gen_op_arith_compute_ca(ctx, t0, t1, 0);
1233
tcg_gen_sub_tl(t0, arg2, arg1);
1235
gen_op_arith_compute_ca(ctx, t0, arg2, 1);
1239
gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1242
if (unlikely(Rc(ctx->opcode) != 0))
1243
gen_set_Rc0(ctx, t0);
1245
if (!TCGV_EQUAL(t0, ret)) {
1246
tcg_gen_mov_tl(ret, t0);
1250
/* Sub functions with Two operands functions */
1251
#define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1252
static void glue(gen_, name)(DisasContext *ctx) \
1254
gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1255
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1256
add_ca, compute_ca, compute_ov); \
1258
/* Sub functions with one operand and one immediate */
1259
#define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1260
add_ca, compute_ca, compute_ov) \
1261
static void glue(gen_, name)(DisasContext *ctx) \
1263
TCGv t0 = tcg_const_local_tl(const_val); \
1264
gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1265
cpu_gpr[rA(ctx->opcode)], t0, \
1266
add_ca, compute_ca, compute_ov); \
1267
tcg_temp_free(t0); \
1269
/* subf subf. subfo subfo. */
1270
GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
1271
GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
1272
/* subfc subfc. subfco subfco. */
1273
GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
1274
GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
1275
/* subfe subfe. subfeo subfo. */
1276
GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
1277
GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
1278
/* subfme subfme. subfmeo subfmeo. */
1279
GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
1280
GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
1281
/* subfze subfze. subfzeo subfzeo.*/
1282
GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
1283
GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
1286
static void gen_subfic(DisasContext *ctx)
1288
/* Start with XER CA and OV disabled, the most likely case */
1289
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1290
TCGv t0 = tcg_temp_local_new();
1291
TCGv t1 = tcg_const_local_tl(SIMM(ctx->opcode));
1292
tcg_gen_sub_tl(t0, t1, cpu_gpr[rA(ctx->opcode)]);
1293
gen_op_arith_compute_ca(ctx, t0, t1, 1);
1295
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
1299
/*** Integer logical ***/
1300
#define GEN_LOGICAL2(name, tcg_op, opc, type) \
1301
static void glue(gen_, name)(DisasContext *ctx) \
1303
tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1304
cpu_gpr[rB(ctx->opcode)]); \
1305
if (unlikely(Rc(ctx->opcode) != 0)) \
1306
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1309
#define GEN_LOGICAL1(name, tcg_op, opc, type) \
1310
static void glue(gen_, name)(DisasContext *ctx) \
1312
tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1313
if (unlikely(Rc(ctx->opcode) != 0)) \
1314
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1318
GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
1320
GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
1323
static void gen_andi_(DisasContext *ctx)
1325
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode));
1326
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1330
static void gen_andis_(DisasContext *ctx)
1332
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16);
1333
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1337
static void gen_cntlzw(DisasContext *ctx)
1339
gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1340
if (unlikely(Rc(ctx->opcode) != 0))
1341
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1344
GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
1345
/* extsb & extsb. */
1346
GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
1347
/* extsh & extsh. */
1348
GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
1350
GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
1352
GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
1355
static void gen_or(DisasContext *ctx)
1359
rs = rS(ctx->opcode);
1360
ra = rA(ctx->opcode);
1361
rb = rB(ctx->opcode);
1362
/* Optimisation for mr. ri case */
1363
if (rs != ra || rs != rb) {
1365
tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
1367
tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
1368
if (unlikely(Rc(ctx->opcode) != 0))
1369
gen_set_Rc0(ctx, cpu_gpr[ra]);
1370
} else if (unlikely(Rc(ctx->opcode) != 0)) {
1371
gen_set_Rc0(ctx, cpu_gpr[rs]);
1372
#if defined(TARGET_PPC64)
1378
/* Set process priority to low */
1382
/* Set process priority to medium-low */
1386
/* Set process priority to normal */
1389
#if !defined(CONFIG_USER_ONLY)
1391
if (ctx->mem_idx > 0) {
1392
/* Set process priority to very low */
1397
if (ctx->mem_idx > 0) {
1398
/* Set process priority to medium-hight */
1403
if (ctx->mem_idx > 0) {
1404
/* Set process priority to high */
1409
if (ctx->mem_idx > 1) {
1410
/* Set process priority to very high */
1420
TCGv t0 = tcg_temp_new();
1421
gen_load_spr(t0, SPR_PPR);
1422
tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
1423
tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
1424
gen_store_spr(SPR_PPR, t0);
1431
GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
1434
static void gen_xor(DisasContext *ctx)
1436
/* Optimisation for "set to zero" case */
1437
if (rS(ctx->opcode) != rB(ctx->opcode))
1438
tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1440
tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1441
if (unlikely(Rc(ctx->opcode) != 0))
1442
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1446
static void gen_ori(DisasContext *ctx)
1448
target_ulong uimm = UIMM(ctx->opcode);
1450
if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1452
/* XXX: should handle special NOPs for POWER series */
1455
tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1459
static void gen_oris(DisasContext *ctx)
1461
target_ulong uimm = UIMM(ctx->opcode);
1463
if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1467
tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1471
static void gen_xori(DisasContext *ctx)
1473
target_ulong uimm = UIMM(ctx->opcode);
1475
if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1479
tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1483
static void gen_xoris(DisasContext *ctx)
1485
target_ulong uimm = UIMM(ctx->opcode);
1487
if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1491
tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1494
/* popcntb : PowerPC 2.03 specification */
1495
static void gen_popcntb(DisasContext *ctx)
1497
gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1500
static void gen_popcntw(DisasContext *ctx)
1502
gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1505
#if defined(TARGET_PPC64)
1506
/* popcntd: PowerPC 2.06 specification */
1507
static void gen_popcntd(DisasContext *ctx)
1509
gen_helper_popcntd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1513
#if defined(TARGET_PPC64)
1514
/* extsw & extsw. */
1515
GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
1518
static void gen_cntlzd(DisasContext *ctx)
1520
gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1521
if (unlikely(Rc(ctx->opcode) != 0))
1522
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1526
/*** Integer rotate ***/
1528
/* rlwimi & rlwimi. */
1529
static void gen_rlwimi(DisasContext *ctx)
1531
uint32_t mb, me, sh;
1533
mb = MB(ctx->opcode);
1534
me = ME(ctx->opcode);
1535
sh = SH(ctx->opcode);
1536
if (likely(sh == 0 && mb == 0 && me == 31)) {
1537
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1541
TCGv t0 = tcg_temp_new();
1542
#if defined(TARGET_PPC64)
1543
TCGv_i32 t2 = tcg_temp_new_i32();
1544
tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]);
1545
tcg_gen_rotli_i32(t2, t2, sh);
1546
tcg_gen_extu_i32_i64(t0, t2);
1547
tcg_temp_free_i32(t2);
1549
tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1551
#if defined(TARGET_PPC64)
1555
mask = MASK(mb, me);
1556
t1 = tcg_temp_new();
1557
tcg_gen_andi_tl(t0, t0, mask);
1558
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1559
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1563
if (unlikely(Rc(ctx->opcode) != 0))
1564
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1567
/* rlwinm & rlwinm. */
1568
static void gen_rlwinm(DisasContext *ctx)
1570
uint32_t mb, me, sh;
1572
sh = SH(ctx->opcode);
1573
mb = MB(ctx->opcode);
1574
me = ME(ctx->opcode);
1576
if (likely(mb == 0 && me == (31 - sh))) {
1577
if (likely(sh == 0)) {
1578
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1580
TCGv t0 = tcg_temp_new();
1581
tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1582
tcg_gen_shli_tl(t0, t0, sh);
1583
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1586
} else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) {
1587
TCGv t0 = tcg_temp_new();
1588
tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1589
tcg_gen_shri_tl(t0, t0, mb);
1590
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1593
TCGv t0 = tcg_temp_new();
1594
#if defined(TARGET_PPC64)
1595
TCGv_i32 t1 = tcg_temp_new_i32();
1596
tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1597
tcg_gen_rotli_i32(t1, t1, sh);
1598
tcg_gen_extu_i32_i64(t0, t1);
1599
tcg_temp_free_i32(t1);
1601
tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1603
#if defined(TARGET_PPC64)
1607
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1610
if (unlikely(Rc(ctx->opcode) != 0))
1611
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1614
/* rlwnm & rlwnm. */
1615
static void gen_rlwnm(DisasContext *ctx)
1619
#if defined(TARGET_PPC64)
1623
mb = MB(ctx->opcode);
1624
me = ME(ctx->opcode);
1625
t0 = tcg_temp_new();
1626
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f);
1627
#if defined(TARGET_PPC64)
1628
t1 = tcg_temp_new_i32();
1629
t2 = tcg_temp_new_i32();
1630
tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1631
tcg_gen_trunc_i64_i32(t2, t0);
1632
tcg_gen_rotl_i32(t1, t1, t2);
1633
tcg_gen_extu_i32_i64(t0, t1);
1634
tcg_temp_free_i32(t1);
1635
tcg_temp_free_i32(t2);
1637
tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0);
1639
if (unlikely(mb != 0 || me != 31)) {
1640
#if defined(TARGET_PPC64)
1644
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1646
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1649
if (unlikely(Rc(ctx->opcode) != 0))
1650
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1653
#if defined(TARGET_PPC64)
1654
#define GEN_PPC64_R2(name, opc1, opc2) \
1655
static void glue(gen_, name##0)(DisasContext *ctx) \
1657
gen_##name(ctx, 0); \
1660
static void glue(gen_, name##1)(DisasContext *ctx) \
1662
gen_##name(ctx, 1); \
1664
#define GEN_PPC64_R4(name, opc1, opc2) \
1665
static void glue(gen_, name##0)(DisasContext *ctx) \
1667
gen_##name(ctx, 0, 0); \
1670
static void glue(gen_, name##1)(DisasContext *ctx) \
1672
gen_##name(ctx, 0, 1); \
1675
static void glue(gen_, name##2)(DisasContext *ctx) \
1677
gen_##name(ctx, 1, 0); \
1680
static void glue(gen_, name##3)(DisasContext *ctx) \
1682
gen_##name(ctx, 1, 1); \
1685
static inline void gen_rldinm(DisasContext *ctx, uint32_t mb, uint32_t me,
1688
if (likely(sh != 0 && mb == 0 && me == (63 - sh))) {
1689
tcg_gen_shli_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1690
} else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) {
1691
tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb);
1693
TCGv t0 = tcg_temp_new();
1694
tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1695
if (likely(mb == 0 && me == 63)) {
1696
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1698
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1702
if (unlikely(Rc(ctx->opcode) != 0))
1703
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1705
/* rldicl - rldicl. */
1706
static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
1710
sh = SH(ctx->opcode) | (shn << 5);
1711
mb = MB(ctx->opcode) | (mbn << 5);
1712
gen_rldinm(ctx, mb, 63, sh);
1714
GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1715
/* rldicr - rldicr. */
1716
static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
1720
sh = SH(ctx->opcode) | (shn << 5);
1721
me = MB(ctx->opcode) | (men << 5);
1722
gen_rldinm(ctx, 0, me, sh);
1724
GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1725
/* rldic - rldic. */
1726
static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
1730
sh = SH(ctx->opcode) | (shn << 5);
1731
mb = MB(ctx->opcode) | (mbn << 5);
1732
gen_rldinm(ctx, mb, 63 - sh, sh);
1734
GEN_PPC64_R4(rldic, 0x1E, 0x04);
1736
static inline void gen_rldnm(DisasContext *ctx, uint32_t mb, uint32_t me)
1740
mb = MB(ctx->opcode);
1741
me = ME(ctx->opcode);
1742
t0 = tcg_temp_new();
1743
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1744
tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1745
if (unlikely(mb != 0 || me != 63)) {
1746
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1748
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1751
if (unlikely(Rc(ctx->opcode) != 0))
1752
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1755
/* rldcl - rldcl. */
1756
static inline void gen_rldcl(DisasContext *ctx, int mbn)
1760
mb = MB(ctx->opcode) | (mbn << 5);
1761
gen_rldnm(ctx, mb, 63);
1763
GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1764
/* rldcr - rldcr. */
1765
static inline void gen_rldcr(DisasContext *ctx, int men)
1769
me = MB(ctx->opcode) | (men << 5);
1770
gen_rldnm(ctx, 0, me);
1772
GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1773
/* rldimi - rldimi. */
1774
static inline void gen_rldimi(DisasContext *ctx, int mbn, int shn)
1776
uint32_t sh, mb, me;
1778
sh = SH(ctx->opcode) | (shn << 5);
1779
mb = MB(ctx->opcode) | (mbn << 5);
1781
if (unlikely(sh == 0 && mb == 0)) {
1782
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1787
t0 = tcg_temp_new();
1788
tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1789
t1 = tcg_temp_new();
1790
mask = MASK(mb, me);
1791
tcg_gen_andi_tl(t0, t0, mask);
1792
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1793
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1797
if (unlikely(Rc(ctx->opcode) != 0))
1798
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1800
GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1803
/*** Integer shift ***/
1806
static void gen_slw(DisasContext *ctx)
1810
t0 = tcg_temp_new();
1811
/* AND rS with a mask that is 0 when rB >= 0x20 */
1812
#if defined(TARGET_PPC64)
1813
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1814
tcg_gen_sari_tl(t0, t0, 0x3f);
1816
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1817
tcg_gen_sari_tl(t0, t0, 0x1f);
1819
tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1820
t1 = tcg_temp_new();
1821
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1822
tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1825
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
1826
if (unlikely(Rc(ctx->opcode) != 0))
1827
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1831
static void gen_sraw(DisasContext *ctx)
1833
gen_helper_sraw(cpu_gpr[rA(ctx->opcode)],
1834
cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1835
if (unlikely(Rc(ctx->opcode) != 0))
1836
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1839
/* srawi & srawi. */
1840
static void gen_srawi(DisasContext *ctx)
1842
int sh = SH(ctx->opcode);
1846
l1 = gen_new_label();
1847
l2 = gen_new_label();
1848
t0 = tcg_temp_local_new();
1849
tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1850
tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
1851
tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
1852
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
1853
tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
1856
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1858
tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1859
tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], t0, sh);
1862
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1863
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1865
if (unlikely(Rc(ctx->opcode) != 0))
1866
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1870
static void gen_srw(DisasContext *ctx)
1874
t0 = tcg_temp_new();
1875
/* AND rS with a mask that is 0 when rB >= 0x20 */
1876
#if defined(TARGET_PPC64)
1877
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1878
tcg_gen_sari_tl(t0, t0, 0x3f);
1880
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1881
tcg_gen_sari_tl(t0, t0, 0x1f);
1883
tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1884
tcg_gen_ext32u_tl(t0, t0);
1885
t1 = tcg_temp_new();
1886
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1887
tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1890
if (unlikely(Rc(ctx->opcode) != 0))
1891
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1894
#if defined(TARGET_PPC64)
1896
static void gen_sld(DisasContext *ctx)
1900
t0 = tcg_temp_new();
1901
/* AND rS with a mask that is 0 when rB >= 0x40 */
1902
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1903
tcg_gen_sari_tl(t0, t0, 0x3f);
1904
tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1905
t1 = tcg_temp_new();
1906
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1907
tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1910
if (unlikely(Rc(ctx->opcode) != 0))
1911
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1915
static void gen_srad(DisasContext *ctx)
1917
gen_helper_srad(cpu_gpr[rA(ctx->opcode)],
1918
cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1919
if (unlikely(Rc(ctx->opcode) != 0))
1920
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1922
/* sradi & sradi. */
1923
static inline void gen_sradi(DisasContext *ctx, int n)
1925
int sh = SH(ctx->opcode) + (n << 5);
1929
l1 = gen_new_label();
1930
l2 = gen_new_label();
1931
t0 = tcg_temp_local_new();
1932
tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
1933
tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
1934
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
1935
tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
1938
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1941
tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1943
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1944
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1946
if (unlikely(Rc(ctx->opcode) != 0))
1947
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1950
static void gen_sradi0(DisasContext *ctx)
1955
static void gen_sradi1(DisasContext *ctx)
1961
static void gen_srd(DisasContext *ctx)
1965
t0 = tcg_temp_new();
1966
/* AND rS with a mask that is 0 when rB >= 0x40 */
1967
tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1968
tcg_gen_sari_tl(t0, t0, 0x3f);
1969
tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1970
t1 = tcg_temp_new();
1971
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1972
tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1975
if (unlikely(Rc(ctx->opcode) != 0))
1976
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1980
/*** Floating-Point arithmetic ***/
1981
#define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
1982
static void gen_f##name(DisasContext *ctx) \
1984
if (unlikely(!ctx->fpu_enabled)) { \
1985
gen_exception(ctx, POWERPC_EXCP_FPU); \
1988
/* NIP cannot be restored if the memory exception comes from an helper */ \
1989
gen_update_nip(ctx, ctx->nip - 4); \
1990
gen_reset_fpstatus(); \
1991
gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
1992
cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
1994
gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
1996
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], set_fprf, \
1997
Rc(ctx->opcode) != 0); \
2000
#define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
2001
_GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
2002
_GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
2004
#define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2005
static void gen_f##name(DisasContext *ctx) \
2007
if (unlikely(!ctx->fpu_enabled)) { \
2008
gen_exception(ctx, POWERPC_EXCP_FPU); \
2011
/* NIP cannot be restored if the memory exception comes from an helper */ \
2012
gen_update_nip(ctx, ctx->nip - 4); \
2013
gen_reset_fpstatus(); \
2014
gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2015
cpu_fpr[rB(ctx->opcode)]); \
2017
gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2019
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2020
set_fprf, Rc(ctx->opcode) != 0); \
2022
#define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
2023
_GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2024
_GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2026
#define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2027
static void gen_f##name(DisasContext *ctx) \
2029
if (unlikely(!ctx->fpu_enabled)) { \
2030
gen_exception(ctx, POWERPC_EXCP_FPU); \
2033
/* NIP cannot be restored if the memory exception comes from an helper */ \
2034
gen_update_nip(ctx, ctx->nip - 4); \
2035
gen_reset_fpstatus(); \
2036
gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2037
cpu_fpr[rC(ctx->opcode)]); \
2039
gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2041
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2042
set_fprf, Rc(ctx->opcode) != 0); \
2044
#define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
2045
_GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2046
_GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2048
#define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
2049
static void gen_f##name(DisasContext *ctx) \
2051
if (unlikely(!ctx->fpu_enabled)) { \
2052
gen_exception(ctx, POWERPC_EXCP_FPU); \
2055
/* NIP cannot be restored if the memory exception comes from an helper */ \
2056
gen_update_nip(ctx, ctx->nip - 4); \
2057
gen_reset_fpstatus(); \
2058
gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2059
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2060
set_fprf, Rc(ctx->opcode) != 0); \
2063
#define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
2064
static void gen_f##name(DisasContext *ctx) \
2066
if (unlikely(!ctx->fpu_enabled)) { \
2067
gen_exception(ctx, POWERPC_EXCP_FPU); \
2070
/* NIP cannot be restored if the memory exception comes from an helper */ \
2071
gen_update_nip(ctx, ctx->nip - 4); \
2072
gen_reset_fpstatus(); \
2073
gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2074
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2075
set_fprf, Rc(ctx->opcode) != 0); \
2079
GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
2081
GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
2083
GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
2086
GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
2089
GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
2092
GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
2095
static void gen_frsqrtes(DisasContext *ctx)
2097
if (unlikely(!ctx->fpu_enabled)) {
2098
gen_exception(ctx, POWERPC_EXCP_FPU);
2101
/* NIP cannot be restored if the memory exception comes from an helper */
2102
gen_update_nip(ctx, ctx->nip - 4);
2103
gen_reset_fpstatus();
2104
gen_helper_frsqrte(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2105
gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2106
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2110
_GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
2112
GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
2116
static void gen_fsqrt(DisasContext *ctx)
2118
if (unlikely(!ctx->fpu_enabled)) {
2119
gen_exception(ctx, POWERPC_EXCP_FPU);
2122
/* NIP cannot be restored if the memory exception comes from an helper */
2123
gen_update_nip(ctx, ctx->nip - 4);
2124
gen_reset_fpstatus();
2125
gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2126
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2129
static void gen_fsqrts(DisasContext *ctx)
2131
if (unlikely(!ctx->fpu_enabled)) {
2132
gen_exception(ctx, POWERPC_EXCP_FPU);
2135
/* NIP cannot be restored if the memory exception comes from an helper */
2136
gen_update_nip(ctx, ctx->nip - 4);
2137
gen_reset_fpstatus();
2138
gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2139
gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2140
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2143
/*** Floating-Point multiply-and-add ***/
2144
/* fmadd - fmadds */
2145
GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
2146
/* fmsub - fmsubs */
2147
GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
2148
/* fnmadd - fnmadds */
2149
GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
2150
/* fnmsub - fnmsubs */
2151
GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
2153
/*** Floating-Point round & convert ***/
2155
GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
2157
GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
2159
GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
2160
#if defined(TARGET_PPC64)
2162
GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
2164
GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
2166
GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
2170
GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
2172
GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
2174
GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
2176
GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
2178
/*** Floating-Point compare ***/
2181
static void gen_fcmpo(DisasContext *ctx)
2184
if (unlikely(!ctx->fpu_enabled)) {
2185
gen_exception(ctx, POWERPC_EXCP_FPU);
2188
/* NIP cannot be restored if the memory exception comes from an helper */
2189
gen_update_nip(ctx, ctx->nip - 4);
2190
gen_reset_fpstatus();
2191
crf = tcg_const_i32(crfD(ctx->opcode));
2192
gen_helper_fcmpo(cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)], crf);
2193
tcg_temp_free_i32(crf);
2194
gen_helper_float_check_status();
2198
static void gen_fcmpu(DisasContext *ctx)
2201
if (unlikely(!ctx->fpu_enabled)) {
2202
gen_exception(ctx, POWERPC_EXCP_FPU);
2205
/* NIP cannot be restored if the memory exception comes from an helper */
2206
gen_update_nip(ctx, ctx->nip - 4);
2207
gen_reset_fpstatus();
2208
crf = tcg_const_i32(crfD(ctx->opcode));
2209
gen_helper_fcmpu(cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)], crf);
2210
tcg_temp_free_i32(crf);
2211
gen_helper_float_check_status();
2214
/*** Floating-point move ***/
2216
/* XXX: beware that fabs never checks for NaNs nor update FPSCR */
2217
GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT);
2220
/* XXX: beware that fmr never checks for NaNs nor update FPSCR */
2221
static void gen_fmr(DisasContext *ctx)
2223
if (unlikely(!ctx->fpu_enabled)) {
2224
gen_exception(ctx, POWERPC_EXCP_FPU);
2227
tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2228
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2232
/* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
2233
GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT);
2235
/* XXX: beware that fneg never checks for NaNs nor update FPSCR */
2236
GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT);
2238
/*** Floating-Point status & ctrl register ***/
2241
static void gen_mcrfs(DisasContext *ctx)
2245
if (unlikely(!ctx->fpu_enabled)) {
2246
gen_exception(ctx, POWERPC_EXCP_FPU);
2249
bfa = 4 * (7 - crfS(ctx->opcode));
2250
tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_fpscr, bfa);
2251
tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 0xf);
2252
tcg_gen_andi_i32(cpu_fpscr, cpu_fpscr, ~(0xF << bfa));
2256
static void gen_mffs(DisasContext *ctx)
2258
if (unlikely(!ctx->fpu_enabled)) {
2259
gen_exception(ctx, POWERPC_EXCP_FPU);
2262
gen_reset_fpstatus();
2263
tcg_gen_extu_i32_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpscr);
2264
gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2268
static void gen_mtfsb0(DisasContext *ctx)
2272
if (unlikely(!ctx->fpu_enabled)) {
2273
gen_exception(ctx, POWERPC_EXCP_FPU);
2276
crb = 31 - crbD(ctx->opcode);
2277
gen_reset_fpstatus();
2278
if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) {
2280
/* NIP cannot be restored if the memory exception comes from an helper */
2281
gen_update_nip(ctx, ctx->nip - 4);
2282
t0 = tcg_const_i32(crb);
2283
gen_helper_fpscr_clrbit(t0);
2284
tcg_temp_free_i32(t0);
2286
if (unlikely(Rc(ctx->opcode) != 0)) {
2287
tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2292
static void gen_mtfsb1(DisasContext *ctx)
2296
if (unlikely(!ctx->fpu_enabled)) {
2297
gen_exception(ctx, POWERPC_EXCP_FPU);
2300
crb = 31 - crbD(ctx->opcode);
2301
gen_reset_fpstatus();
2302
/* XXX: we pretend we can only do IEEE floating-point computations */
2303
if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
2305
/* NIP cannot be restored if the memory exception comes from an helper */
2306
gen_update_nip(ctx, ctx->nip - 4);
2307
t0 = tcg_const_i32(crb);
2308
gen_helper_fpscr_setbit(t0);
2309
tcg_temp_free_i32(t0);
2311
if (unlikely(Rc(ctx->opcode) != 0)) {
2312
tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2314
/* We can raise a differed exception */
2315
gen_helper_float_check_status();
2319
static void gen_mtfsf(DisasContext *ctx)
2322
int L = ctx->opcode & 0x02000000;
2324
if (unlikely(!ctx->fpu_enabled)) {
2325
gen_exception(ctx, POWERPC_EXCP_FPU);
2328
/* NIP cannot be restored if the memory exception comes from an helper */
2329
gen_update_nip(ctx, ctx->nip - 4);
2330
gen_reset_fpstatus();
2332
t0 = tcg_const_i32(0xff);
2334
t0 = tcg_const_i32(FM(ctx->opcode));
2335
gen_helper_store_fpscr(cpu_fpr[rB(ctx->opcode)], t0);
2336
tcg_temp_free_i32(t0);
2337
if (unlikely(Rc(ctx->opcode) != 0)) {
2338
tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2340
/* We can raise a differed exception */
2341
gen_helper_float_check_status();
2345
static void gen_mtfsfi(DisasContext *ctx)
2351
if (unlikely(!ctx->fpu_enabled)) {
2352
gen_exception(ctx, POWERPC_EXCP_FPU);
2355
bf = crbD(ctx->opcode) >> 2;
2357
/* NIP cannot be restored if the memory exception comes from an helper */
2358
gen_update_nip(ctx, ctx->nip - 4);
2359
gen_reset_fpstatus();
2360
t0 = tcg_const_i64(FPIMM(ctx->opcode) << (4 * sh));
2361
t1 = tcg_const_i32(1 << sh);
2362
gen_helper_store_fpscr(t0, t1);
2363
tcg_temp_free_i64(t0);
2364
tcg_temp_free_i32(t1);
2365
if (unlikely(Rc(ctx->opcode) != 0)) {
2366
tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2368
/* We can raise a differed exception */
2369
gen_helper_float_check_status();
2372
/*** Addressing modes ***/
2373
/* Register indirect with immediate index : EA = (rA|0) + SIMM */
2374
static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2377
target_long simm = SIMM(ctx->opcode);
2380
if (rA(ctx->opcode) == 0) {
2381
#if defined(TARGET_PPC64)
2382
if (!ctx->sf_mode) {
2383
tcg_gen_movi_tl(EA, (uint32_t)simm);
2386
tcg_gen_movi_tl(EA, simm);
2387
} else if (likely(simm != 0)) {
2388
tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2389
#if defined(TARGET_PPC64)
2390
if (!ctx->sf_mode) {
2391
tcg_gen_ext32u_tl(EA, EA);
2395
#if defined(TARGET_PPC64)
2396
if (!ctx->sf_mode) {
2397
tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2400
tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2404
static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
2406
if (rA(ctx->opcode) == 0) {
2407
#if defined(TARGET_PPC64)
2408
if (!ctx->sf_mode) {
2409
tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2412
tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2414
tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2415
#if defined(TARGET_PPC64)
2416
if (!ctx->sf_mode) {
2417
tcg_gen_ext32u_tl(EA, EA);
2423
static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
2425
if (rA(ctx->opcode) == 0) {
2426
tcg_gen_movi_tl(EA, 0);
2428
#if defined(TARGET_PPC64)
2429
if (!ctx->sf_mode) {
2430
tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2433
tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2437
static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
2440
tcg_gen_addi_tl(ret, arg1, val);
2441
#if defined(TARGET_PPC64)
2442
if (!ctx->sf_mode) {
2443
tcg_gen_ext32u_tl(ret, ret);
2448
static inline void gen_check_align(DisasContext *ctx, TCGv EA, int mask)
2450
int l1 = gen_new_label();
2451
TCGv t0 = tcg_temp_new();
2453
/* NIP cannot be restored if the memory exception comes from an helper */
2454
gen_update_nip(ctx, ctx->nip - 4);
2455
tcg_gen_andi_tl(t0, EA, mask);
2456
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2457
t1 = tcg_const_i32(POWERPC_EXCP_ALIGN);
2458
t2 = tcg_const_i32(0);
2459
gen_helper_raise_exception_err(t1, t2);
2460
tcg_temp_free_i32(t1);
2461
tcg_temp_free_i32(t2);
2466
/*** Integer load ***/
2467
static inline void gen_qemu_ld8u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2469
tcg_gen_qemu_ld8u(arg1, arg2, ctx->mem_idx);
2472
static inline void gen_qemu_ld8s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2474
tcg_gen_qemu_ld8s(arg1, arg2, ctx->mem_idx);
2477
static inline void gen_qemu_ld16u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2479
tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2480
if (unlikely(ctx->le_mode)) {
2481
tcg_gen_bswap16_tl(arg1, arg1);
2485
static inline void gen_qemu_ld16s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2487
if (unlikely(ctx->le_mode)) {
2488
tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2489
tcg_gen_bswap16_tl(arg1, arg1);
2490
tcg_gen_ext16s_tl(arg1, arg1);
2492
tcg_gen_qemu_ld16s(arg1, arg2, ctx->mem_idx);
2496
static inline void gen_qemu_ld32u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2498
tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2499
if (unlikely(ctx->le_mode)) {
2500
tcg_gen_bswap32_tl(arg1, arg1);
2504
#if defined(TARGET_PPC64)
2505
static inline void gen_qemu_ld32s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2507
if (unlikely(ctx->le_mode)) {
2508
tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2509
tcg_gen_bswap32_tl(arg1, arg1);
2510
tcg_gen_ext32s_tl(arg1, arg1);
2512
tcg_gen_qemu_ld32s(arg1, arg2, ctx->mem_idx);
2516
static inline void gen_qemu_ld64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2518
tcg_gen_qemu_ld64(arg1, arg2, ctx->mem_idx);
2519
if (unlikely(ctx->le_mode)) {
2520
tcg_gen_bswap64_i64(arg1, arg1);
2524
static inline void gen_qemu_st8(DisasContext *ctx, TCGv arg1, TCGv arg2)
2526
tcg_gen_qemu_st8(arg1, arg2, ctx->mem_idx);
2529
static inline void gen_qemu_st16(DisasContext *ctx, TCGv arg1, TCGv arg2)
2531
if (unlikely(ctx->le_mode)) {
2532
TCGv t0 = tcg_temp_new();
2533
tcg_gen_ext16u_tl(t0, arg1);
2534
tcg_gen_bswap16_tl(t0, t0);
2535
tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2538
tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2542
static inline void gen_qemu_st32(DisasContext *ctx, TCGv arg1, TCGv arg2)
2544
if (unlikely(ctx->le_mode)) {
2545
TCGv t0 = tcg_temp_new();
2546
tcg_gen_ext32u_tl(t0, arg1);
2547
tcg_gen_bswap32_tl(t0, t0);
2548
tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2551
tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2555
static inline void gen_qemu_st64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2557
if (unlikely(ctx->le_mode)) {
2558
TCGv_i64 t0 = tcg_temp_new_i64();
2559
tcg_gen_bswap64_i64(t0, arg1);
2560
tcg_gen_qemu_st64(t0, arg2, ctx->mem_idx);
2561
tcg_temp_free_i64(t0);
2563
tcg_gen_qemu_st64(arg1, arg2, ctx->mem_idx);
2566
#define GEN_LD(name, ldop, opc, type) \
2567
static void glue(gen_, name)(DisasContext *ctx) \
2570
gen_set_access_type(ctx, ACCESS_INT); \
2571
EA = tcg_temp_new(); \
2572
gen_addr_imm_index(ctx, EA, 0); \
2573
gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2574
tcg_temp_free(EA); \
2577
#define GEN_LDU(name, ldop, opc, type) \
2578
static void glue(gen_, name##u)(DisasContext *ctx) \
2581
if (unlikely(rA(ctx->opcode) == 0 || \
2582
rA(ctx->opcode) == rD(ctx->opcode))) { \
2583
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2586
gen_set_access_type(ctx, ACCESS_INT); \
2587
EA = tcg_temp_new(); \
2588
if (type == PPC_64B) \
2589
gen_addr_imm_index(ctx, EA, 0x03); \
2591
gen_addr_imm_index(ctx, EA, 0); \
2592
gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2593
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2594
tcg_temp_free(EA); \
2597
#define GEN_LDUX(name, ldop, opc2, opc3, type) \
2598
static void glue(gen_, name##ux)(DisasContext *ctx) \
2601
if (unlikely(rA(ctx->opcode) == 0 || \
2602
rA(ctx->opcode) == rD(ctx->opcode))) { \
2603
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2606
gen_set_access_type(ctx, ACCESS_INT); \
2607
EA = tcg_temp_new(); \
2608
gen_addr_reg_index(ctx, EA); \
2609
gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2610
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2611
tcg_temp_free(EA); \
2614
#define GEN_LDX(name, ldop, opc2, opc3, type) \
2615
static void glue(gen_, name##x)(DisasContext *ctx) \
2618
gen_set_access_type(ctx, ACCESS_INT); \
2619
EA = tcg_temp_new(); \
2620
gen_addr_reg_index(ctx, EA); \
2621
gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2622
tcg_temp_free(EA); \
2625
#define GEN_LDS(name, ldop, op, type) \
2626
GEN_LD(name, ldop, op | 0x20, type); \
2627
GEN_LDU(name, ldop, op | 0x21, type); \
2628
GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
2629
GEN_LDX(name, ldop, 0x17, op | 0x00, type)
2631
/* lbz lbzu lbzux lbzx */
2632
GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
2633
/* lha lhau lhaux lhax */
2634
GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
2635
/* lhz lhzu lhzux lhzx */
2636
GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
2637
/* lwz lwzu lwzux lwzx */
2638
GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
2639
#if defined(TARGET_PPC64)
2641
GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
2643
GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
2645
GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B);
2647
GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B);
2649
static void gen_ld(DisasContext *ctx)
2652
if (Rc(ctx->opcode)) {
2653
if (unlikely(rA(ctx->opcode) == 0 ||
2654
rA(ctx->opcode) == rD(ctx->opcode))) {
2655
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2659
gen_set_access_type(ctx, ACCESS_INT);
2660
EA = tcg_temp_new();
2661
gen_addr_imm_index(ctx, EA, 0x03);
2662
if (ctx->opcode & 0x02) {
2663
/* lwa (lwau is undefined) */
2664
gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2667
gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2669
if (Rc(ctx->opcode))
2670
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2675
static void gen_lq(DisasContext *ctx)
2677
#if defined(CONFIG_USER_ONLY)
2678
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2683
/* Restore CPU state */
2684
if (unlikely(ctx->mem_idx == 0)) {
2685
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2688
ra = rA(ctx->opcode);
2689
rd = rD(ctx->opcode);
2690
if (unlikely((rd & 1) || rd == ra)) {
2691
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2694
if (unlikely(ctx->le_mode)) {
2695
/* Little-endian mode is not handled */
2696
gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2699
gen_set_access_type(ctx, ACCESS_INT);
2700
EA = tcg_temp_new();
2701
gen_addr_imm_index(ctx, EA, 0x0F);
2702
gen_qemu_ld64(ctx, cpu_gpr[rd], EA);
2703
gen_addr_add(ctx, EA, EA, 8);
2704
gen_qemu_ld64(ctx, cpu_gpr[rd+1], EA);
2710
/*** Integer store ***/
2711
#define GEN_ST(name, stop, opc, type) \
2712
static void glue(gen_, name)(DisasContext *ctx) \
2715
gen_set_access_type(ctx, ACCESS_INT); \
2716
EA = tcg_temp_new(); \
2717
gen_addr_imm_index(ctx, EA, 0); \
2718
gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2719
tcg_temp_free(EA); \
2722
#define GEN_STU(name, stop, opc, type) \
2723
static void glue(gen_, stop##u)(DisasContext *ctx) \
2726
if (unlikely(rA(ctx->opcode) == 0)) { \
2727
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2730
gen_set_access_type(ctx, ACCESS_INT); \
2731
EA = tcg_temp_new(); \
2732
if (type == PPC_64B) \
2733
gen_addr_imm_index(ctx, EA, 0x03); \
2735
gen_addr_imm_index(ctx, EA, 0); \
2736
gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2737
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2738
tcg_temp_free(EA); \
2741
#define GEN_STUX(name, stop, opc2, opc3, type) \
2742
static void glue(gen_, name##ux)(DisasContext *ctx) \
2745
if (unlikely(rA(ctx->opcode) == 0)) { \
2746
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2749
gen_set_access_type(ctx, ACCESS_INT); \
2750
EA = tcg_temp_new(); \
2751
gen_addr_reg_index(ctx, EA); \
2752
gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2753
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2754
tcg_temp_free(EA); \
2757
#define GEN_STX(name, stop, opc2, opc3, type) \
2758
static void glue(gen_, name##x)(DisasContext *ctx) \
2761
gen_set_access_type(ctx, ACCESS_INT); \
2762
EA = tcg_temp_new(); \
2763
gen_addr_reg_index(ctx, EA); \
2764
gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2765
tcg_temp_free(EA); \
2768
#define GEN_STS(name, stop, op, type) \
2769
GEN_ST(name, stop, op | 0x20, type); \
2770
GEN_STU(name, stop, op | 0x21, type); \
2771
GEN_STUX(name, stop, 0x17, op | 0x01, type); \
2772
GEN_STX(name, stop, 0x17, op | 0x00, type)
2774
/* stb stbu stbux stbx */
2775
GEN_STS(stb, st8, 0x06, PPC_INTEGER);
2776
/* sth sthu sthux sthx */
2777
GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
2778
/* stw stwu stwux stwx */
2779
GEN_STS(stw, st32, 0x04, PPC_INTEGER);
2780
#if defined(TARGET_PPC64)
2781
GEN_STUX(std, st64, 0x15, 0x05, PPC_64B);
2782
GEN_STX(std, st64, 0x15, 0x04, PPC_64B);
2784
static void gen_std(DisasContext *ctx)
2789
rs = rS(ctx->opcode);
2790
if ((ctx->opcode & 0x3) == 0x2) {
2791
#if defined(CONFIG_USER_ONLY)
2792
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2795
if (unlikely(ctx->mem_idx == 0)) {
2796
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2799
if (unlikely(rs & 1)) {
2800
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2803
if (unlikely(ctx->le_mode)) {
2804
/* Little-endian mode is not handled */
2805
gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2808
gen_set_access_type(ctx, ACCESS_INT);
2809
EA = tcg_temp_new();
2810
gen_addr_imm_index(ctx, EA, 0x03);
2811
gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2812
gen_addr_add(ctx, EA, EA, 8);
2813
gen_qemu_st64(ctx, cpu_gpr[rs+1], EA);
2818
if (Rc(ctx->opcode)) {
2819
if (unlikely(rA(ctx->opcode) == 0)) {
2820
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2824
gen_set_access_type(ctx, ACCESS_INT);
2825
EA = tcg_temp_new();
2826
gen_addr_imm_index(ctx, EA, 0x03);
2827
gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2828
if (Rc(ctx->opcode))
2829
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2834
/*** Integer load and store with byte reverse ***/
2836
static inline void gen_qemu_ld16ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2838
tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2839
if (likely(!ctx->le_mode)) {
2840
tcg_gen_bswap16_tl(arg1, arg1);
2843
GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
2846
static inline void gen_qemu_ld32ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2848
tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2849
if (likely(!ctx->le_mode)) {
2850
tcg_gen_bswap32_tl(arg1, arg1);
2853
GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
2856
static inline void gen_qemu_st16r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2858
if (likely(!ctx->le_mode)) {
2859
TCGv t0 = tcg_temp_new();
2860
tcg_gen_ext16u_tl(t0, arg1);
2861
tcg_gen_bswap16_tl(t0, t0);
2862
tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2865
tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2868
GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
2871
static inline void gen_qemu_st32r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2873
if (likely(!ctx->le_mode)) {
2874
TCGv t0 = tcg_temp_new();
2875
tcg_gen_ext32u_tl(t0, arg1);
2876
tcg_gen_bswap32_tl(t0, t0);
2877
tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2880
tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2883
GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
2885
/*** Integer load and store multiple ***/
2888
static void gen_lmw(DisasContext *ctx)
2892
gen_set_access_type(ctx, ACCESS_INT);
2893
/* NIP cannot be restored if the memory exception comes from an helper */
2894
gen_update_nip(ctx, ctx->nip - 4);
2895
t0 = tcg_temp_new();
2896
t1 = tcg_const_i32(rD(ctx->opcode));
2897
gen_addr_imm_index(ctx, t0, 0);
2898
gen_helper_lmw(t0, t1);
2900
tcg_temp_free_i32(t1);
2904
static void gen_stmw(DisasContext *ctx)
2908
gen_set_access_type(ctx, ACCESS_INT);
2909
/* NIP cannot be restored if the memory exception comes from an helper */
2910
gen_update_nip(ctx, ctx->nip - 4);
2911
t0 = tcg_temp_new();
2912
t1 = tcg_const_i32(rS(ctx->opcode));
2913
gen_addr_imm_index(ctx, t0, 0);
2914
gen_helper_stmw(t0, t1);
2916
tcg_temp_free_i32(t1);
2919
/*** Integer load and store strings ***/
2922
/* PowerPC32 specification says we must generate an exception if
2923
* rA is in the range of registers to be loaded.
2924
* In an other hand, IBM says this is valid, but rA won't be loaded.
2925
* For now, I'll follow the spec...
2927
static void gen_lswi(DisasContext *ctx)
2931
int nb = NB(ctx->opcode);
2932
int start = rD(ctx->opcode);
2933
int ra = rA(ctx->opcode);
2939
if (unlikely(((start + nr) > 32 &&
2940
start <= ra && (start + nr - 32) > ra) ||
2941
((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
2942
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
2945
gen_set_access_type(ctx, ACCESS_INT);
2946
/* NIP cannot be restored if the memory exception comes from an helper */
2947
gen_update_nip(ctx, ctx->nip - 4);
2948
t0 = tcg_temp_new();
2949
gen_addr_register(ctx, t0);
2950
t1 = tcg_const_i32(nb);
2951
t2 = tcg_const_i32(start);
2952
gen_helper_lsw(t0, t1, t2);
2954
tcg_temp_free_i32(t1);
2955
tcg_temp_free_i32(t2);
2959
static void gen_lswx(DisasContext *ctx)
2962
TCGv_i32 t1, t2, t3;
2963
gen_set_access_type(ctx, ACCESS_INT);
2964
/* NIP cannot be restored if the memory exception comes from an helper */
2965
gen_update_nip(ctx, ctx->nip - 4);
2966
t0 = tcg_temp_new();
2967
gen_addr_reg_index(ctx, t0);
2968
t1 = tcg_const_i32(rD(ctx->opcode));
2969
t2 = tcg_const_i32(rA(ctx->opcode));
2970
t3 = tcg_const_i32(rB(ctx->opcode));
2971
gen_helper_lswx(t0, t1, t2, t3);
2973
tcg_temp_free_i32(t1);
2974
tcg_temp_free_i32(t2);
2975
tcg_temp_free_i32(t3);
2979
static void gen_stswi(DisasContext *ctx)
2983
int nb = NB(ctx->opcode);
2984
gen_set_access_type(ctx, ACCESS_INT);
2985
/* NIP cannot be restored if the memory exception comes from an helper */
2986
gen_update_nip(ctx, ctx->nip - 4);
2987
t0 = tcg_temp_new();
2988
gen_addr_register(ctx, t0);
2991
t1 = tcg_const_i32(nb);
2992
t2 = tcg_const_i32(rS(ctx->opcode));
2993
gen_helper_stsw(t0, t1, t2);
2995
tcg_temp_free_i32(t1);
2996
tcg_temp_free_i32(t2);
3000
static void gen_stswx(DisasContext *ctx)
3004
gen_set_access_type(ctx, ACCESS_INT);
3005
/* NIP cannot be restored if the memory exception comes from an helper */
3006
gen_update_nip(ctx, ctx->nip - 4);
3007
t0 = tcg_temp_new();
3008
gen_addr_reg_index(ctx, t0);
3009
t1 = tcg_temp_new_i32();
3010
tcg_gen_trunc_tl_i32(t1, cpu_xer);
3011
tcg_gen_andi_i32(t1, t1, 0x7F);
3012
t2 = tcg_const_i32(rS(ctx->opcode));
3013
gen_helper_stsw(t0, t1, t2);
3015
tcg_temp_free_i32(t1);
3016
tcg_temp_free_i32(t2);
3019
/*** Memory synchronisation ***/
3021
static void gen_eieio(DisasContext *ctx)
3026
static void gen_isync(DisasContext *ctx)
3028
gen_stop_exception(ctx);
3032
static void gen_lwarx(DisasContext *ctx)
3035
TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3036
gen_set_access_type(ctx, ACCESS_RES);
3037
t0 = tcg_temp_local_new();
3038
gen_addr_reg_index(ctx, t0);
3039
gen_check_align(ctx, t0, 0x03);
3040
gen_qemu_ld32u(ctx, gpr, t0);
3041
tcg_gen_mov_tl(cpu_reserve, t0);
3042
tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUState, reserve_val));
3046
#if defined(CONFIG_USER_ONLY)
3047
static void gen_conditional_store (DisasContext *ctx, TCGv EA,
3050
TCGv t0 = tcg_temp_new();
3051
uint32_t save_exception = ctx->exception;
3053
tcg_gen_st_tl(EA, cpu_env, offsetof(CPUState, reserve_ea));
3054
tcg_gen_movi_tl(t0, (size << 5) | reg);
3055
tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, reserve_info));
3057
gen_update_nip(ctx, ctx->nip-4);
3058
ctx->exception = POWERPC_EXCP_BRANCH;
3059
gen_exception(ctx, POWERPC_EXCP_STCX);
3060
ctx->exception = save_exception;
3065
static void gen_stwcx_(DisasContext *ctx)
3068
gen_set_access_type(ctx, ACCESS_RES);
3069
t0 = tcg_temp_local_new();
3070
gen_addr_reg_index(ctx, t0);
3071
gen_check_align(ctx, t0, 0x03);
3072
#if defined(CONFIG_USER_ONLY)
3073
gen_conditional_store(ctx, t0, rS(ctx->opcode), 4);
3078
tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3079
tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3080
tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3081
l1 = gen_new_label();
3082
tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3083
tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3084
gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3086
tcg_gen_movi_tl(cpu_reserve, -1);
3092
#if defined(TARGET_PPC64)
3094
static void gen_ldarx(DisasContext *ctx)
3097
TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3098
gen_set_access_type(ctx, ACCESS_RES);
3099
t0 = tcg_temp_local_new();
3100
gen_addr_reg_index(ctx, t0);
3101
gen_check_align(ctx, t0, 0x07);
3102
gen_qemu_ld64(ctx, gpr, t0);
3103
tcg_gen_mov_tl(cpu_reserve, t0);
3104
tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUState, reserve_val));
3109
static void gen_stdcx_(DisasContext *ctx)
3112
gen_set_access_type(ctx, ACCESS_RES);
3113
t0 = tcg_temp_local_new();
3114
gen_addr_reg_index(ctx, t0);
3115
gen_check_align(ctx, t0, 0x07);
3116
#if defined(CONFIG_USER_ONLY)
3117
gen_conditional_store(ctx, t0, rS(ctx->opcode), 8);
3121
tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3122
tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3123
tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3124
l1 = gen_new_label();
3125
tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3126
tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3127
gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3129
tcg_gen_movi_tl(cpu_reserve, -1);
3134
#endif /* defined(TARGET_PPC64) */
3137
static void gen_sync(DisasContext *ctx)
3142
static void gen_wait(DisasContext *ctx)
3144
TCGv_i32 t0 = tcg_temp_new_i32();
3145
tcg_gen_st_i32(t0, cpu_env, offsetof(CPUState, halted));
3146
tcg_temp_free_i32(t0);
3147
/* Stop translation, as the CPU is supposed to sleep from now */
3148
gen_exception_err(ctx, EXCP_HLT, 1);
3151
/*** Floating-point load ***/
3152
#define GEN_LDF(name, ldop, opc, type) \
3153
static void glue(gen_, name)(DisasContext *ctx) \
3156
if (unlikely(!ctx->fpu_enabled)) { \
3157
gen_exception(ctx, POWERPC_EXCP_FPU); \
3160
gen_set_access_type(ctx, ACCESS_FLOAT); \
3161
EA = tcg_temp_new(); \
3162
gen_addr_imm_index(ctx, EA, 0); \
3163
gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3164
tcg_temp_free(EA); \
3167
#define GEN_LDUF(name, ldop, opc, type) \
3168
static void glue(gen_, name##u)(DisasContext *ctx) \
3171
if (unlikely(!ctx->fpu_enabled)) { \
3172
gen_exception(ctx, POWERPC_EXCP_FPU); \
3175
if (unlikely(rA(ctx->opcode) == 0)) { \
3176
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3179
gen_set_access_type(ctx, ACCESS_FLOAT); \
3180
EA = tcg_temp_new(); \
3181
gen_addr_imm_index(ctx, EA, 0); \
3182
gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3183
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3184
tcg_temp_free(EA); \
3187
#define GEN_LDUXF(name, ldop, opc, type) \
3188
static void glue(gen_, name##ux)(DisasContext *ctx) \
3191
if (unlikely(!ctx->fpu_enabled)) { \
3192
gen_exception(ctx, POWERPC_EXCP_FPU); \
3195
if (unlikely(rA(ctx->opcode) == 0)) { \
3196
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3199
gen_set_access_type(ctx, ACCESS_FLOAT); \
3200
EA = tcg_temp_new(); \
3201
gen_addr_reg_index(ctx, EA); \
3202
gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3203
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3204
tcg_temp_free(EA); \
3207
#define GEN_LDXF(name, ldop, opc2, opc3, type) \
3208
static void glue(gen_, name##x)(DisasContext *ctx) \
3211
if (unlikely(!ctx->fpu_enabled)) { \
3212
gen_exception(ctx, POWERPC_EXCP_FPU); \
3215
gen_set_access_type(ctx, ACCESS_FLOAT); \
3216
EA = tcg_temp_new(); \
3217
gen_addr_reg_index(ctx, EA); \
3218
gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3219
tcg_temp_free(EA); \
3222
#define GEN_LDFS(name, ldop, op, type) \
3223
GEN_LDF(name, ldop, op | 0x20, type); \
3224
GEN_LDUF(name, ldop, op | 0x21, type); \
3225
GEN_LDUXF(name, ldop, op | 0x01, type); \
3226
GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
3228
static inline void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3230
TCGv t0 = tcg_temp_new();
3231
TCGv_i32 t1 = tcg_temp_new_i32();
3232
gen_qemu_ld32u(ctx, t0, arg2);
3233
tcg_gen_trunc_tl_i32(t1, t0);
3235
gen_helper_float32_to_float64(arg1, t1);
3236
tcg_temp_free_i32(t1);
3239
/* lfd lfdu lfdux lfdx */
3240
GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT);
3241
/* lfs lfsu lfsux lfsx */
3242
GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT);
3244
/*** Floating-point store ***/
3245
#define GEN_STF(name, stop, opc, type) \
3246
static void glue(gen_, name)(DisasContext *ctx) \
3249
if (unlikely(!ctx->fpu_enabled)) { \
3250
gen_exception(ctx, POWERPC_EXCP_FPU); \
3253
gen_set_access_type(ctx, ACCESS_FLOAT); \
3254
EA = tcg_temp_new(); \
3255
gen_addr_imm_index(ctx, EA, 0); \
3256
gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3257
tcg_temp_free(EA); \
3260
#define GEN_STUF(name, stop, opc, type) \
3261
static void glue(gen_, name##u)(DisasContext *ctx) \
3264
if (unlikely(!ctx->fpu_enabled)) { \
3265
gen_exception(ctx, POWERPC_EXCP_FPU); \
3268
if (unlikely(rA(ctx->opcode) == 0)) { \
3269
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3272
gen_set_access_type(ctx, ACCESS_FLOAT); \
3273
EA = tcg_temp_new(); \
3274
gen_addr_imm_index(ctx, EA, 0); \
3275
gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3276
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3277
tcg_temp_free(EA); \
3280
#define GEN_STUXF(name, stop, opc, type) \
3281
static void glue(gen_, name##ux)(DisasContext *ctx) \
3284
if (unlikely(!ctx->fpu_enabled)) { \
3285
gen_exception(ctx, POWERPC_EXCP_FPU); \
3288
if (unlikely(rA(ctx->opcode) == 0)) { \
3289
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3292
gen_set_access_type(ctx, ACCESS_FLOAT); \
3293
EA = tcg_temp_new(); \
3294
gen_addr_reg_index(ctx, EA); \
3295
gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3296
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3297
tcg_temp_free(EA); \
3300
#define GEN_STXF(name, stop, opc2, opc3, type) \
3301
static void glue(gen_, name##x)(DisasContext *ctx) \
3304
if (unlikely(!ctx->fpu_enabled)) { \
3305
gen_exception(ctx, POWERPC_EXCP_FPU); \
3308
gen_set_access_type(ctx, ACCESS_FLOAT); \
3309
EA = tcg_temp_new(); \
3310
gen_addr_reg_index(ctx, EA); \
3311
gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3312
tcg_temp_free(EA); \
3315
#define GEN_STFS(name, stop, op, type) \
3316
GEN_STF(name, stop, op | 0x20, type); \
3317
GEN_STUF(name, stop, op | 0x21, type); \
3318
GEN_STUXF(name, stop, op | 0x01, type); \
3319
GEN_STXF(name, stop, 0x17, op | 0x00, type)
3321
static inline void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3323
TCGv_i32 t0 = tcg_temp_new_i32();
3324
TCGv t1 = tcg_temp_new();
3325
gen_helper_float64_to_float32(t0, arg1);
3326
tcg_gen_extu_i32_tl(t1, t0);
3327
tcg_temp_free_i32(t0);
3328
gen_qemu_st32(ctx, t1, arg2);
3332
/* stfd stfdu stfdux stfdx */
3333
GEN_STFS(stfd, st64, 0x16, PPC_FLOAT);
3334
/* stfs stfsu stfsux stfsx */
3335
GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT);
3338
static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3340
TCGv t0 = tcg_temp_new();
3341
tcg_gen_trunc_i64_tl(t0, arg1),
3342
gen_qemu_st32(ctx, t0, arg2);
3346
GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
3349
static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
3351
TranslationBlock *tb;
3353
#if defined(TARGET_PPC64)
3355
dest = (uint32_t) dest;
3357
if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
3358
likely(!ctx->singlestep_enabled)) {
3360
tcg_gen_movi_tl(cpu_nip, dest & ~3);
3361
tcg_gen_exit_tb((tcg_target_long)tb + n);
3363
tcg_gen_movi_tl(cpu_nip, dest & ~3);
3364
if (unlikely(ctx->singlestep_enabled)) {
3365
if ((ctx->singlestep_enabled &
3366
(CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
3367
ctx->exception == POWERPC_EXCP_BRANCH) {
3368
target_ulong tmp = ctx->nip;
3370
gen_exception(ctx, POWERPC_EXCP_TRACE);
3373
if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3374
gen_debug_exception(ctx);
3381
static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
3383
#if defined(TARGET_PPC64)
3384
if (ctx->sf_mode == 0)
3385
tcg_gen_movi_tl(cpu_lr, (uint32_t)nip);
3388
tcg_gen_movi_tl(cpu_lr, nip);
3392
static void gen_b(DisasContext *ctx)
3394
target_ulong li, target;
3396
ctx->exception = POWERPC_EXCP_BRANCH;
3397
/* sign extend LI */
3398
#if defined(TARGET_PPC64)
3400
li = ((int64_t)LI(ctx->opcode) << 38) >> 38;
3403
li = ((int32_t)LI(ctx->opcode) << 6) >> 6;
3404
if (likely(AA(ctx->opcode) == 0))
3405
target = ctx->nip + li - 4;
3408
if (LK(ctx->opcode))
3409
gen_setlr(ctx, ctx->nip);
3410
gen_goto_tb(ctx, 0, target);
3417
static inline void gen_bcond(DisasContext *ctx, int type)
3419
uint32_t bo = BO(ctx->opcode);
3423
ctx->exception = POWERPC_EXCP_BRANCH;
3424
if (type == BCOND_LR || type == BCOND_CTR) {
3425
target = tcg_temp_local_new();
3426
if (type == BCOND_CTR)
3427
tcg_gen_mov_tl(target, cpu_ctr);
3429
tcg_gen_mov_tl(target, cpu_lr);
3431
TCGV_UNUSED(target);
3433
if (LK(ctx->opcode))
3434
gen_setlr(ctx, ctx->nip);
3435
l1 = gen_new_label();
3436
if ((bo & 0x4) == 0) {
3437
/* Decrement and test CTR */
3438
TCGv temp = tcg_temp_new();
3439
if (unlikely(type == BCOND_CTR)) {
3440
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3443
tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3444
#if defined(TARGET_PPC64)
3446
tcg_gen_ext32u_tl(temp, cpu_ctr);
3449
tcg_gen_mov_tl(temp, cpu_ctr);
3451
tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3453
tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3455
tcg_temp_free(temp);
3457
if ((bo & 0x10) == 0) {
3459
uint32_t bi = BI(ctx->opcode);
3460
uint32_t mask = 1 << (3 - (bi & 0x03));
3461
TCGv_i32 temp = tcg_temp_new_i32();
3464
tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3465
tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3467
tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3468
tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3470
tcg_temp_free_i32(temp);
3472
if (type == BCOND_IM) {
3473
target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3474
if (likely(AA(ctx->opcode) == 0)) {
3475
gen_goto_tb(ctx, 0, ctx->nip + li - 4);
3477
gen_goto_tb(ctx, 0, li);
3480
gen_goto_tb(ctx, 1, ctx->nip);
3482
#if defined(TARGET_PPC64)
3483
if (!(ctx->sf_mode))
3484
tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3487
tcg_gen_andi_tl(cpu_nip, target, ~3);
3490
#if defined(TARGET_PPC64)
3491
if (!(ctx->sf_mode))
3492
tcg_gen_movi_tl(cpu_nip, (uint32_t)ctx->nip);
3495
tcg_gen_movi_tl(cpu_nip, ctx->nip);
3500
static void gen_bc(DisasContext *ctx)
3502
gen_bcond(ctx, BCOND_IM);
3505
static void gen_bcctr(DisasContext *ctx)
3507
gen_bcond(ctx, BCOND_CTR);
3510
static void gen_bclr(DisasContext *ctx)
3512
gen_bcond(ctx, BCOND_LR);
3515
/*** Condition register logical ***/
3516
#define GEN_CRLOGIC(name, tcg_op, opc) \
3517
static void glue(gen_, name)(DisasContext *ctx) \
3522
sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3523
t0 = tcg_temp_new_i32(); \
3525
tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
3527
tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
3529
tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
3530
t1 = tcg_temp_new_i32(); \
3531
sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3533
tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
3535
tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
3537
tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
3538
tcg_op(t0, t0, t1); \
3539
bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3540
tcg_gen_andi_i32(t0, t0, bitmask); \
3541
tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3542
tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
3543
tcg_temp_free_i32(t0); \
3544
tcg_temp_free_i32(t1); \
3548
GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3550
GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3552
GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3554
GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3556
GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3558
GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3560
GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3562
GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3565
static void gen_mcrf(DisasContext *ctx)
3567
tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3570
/*** System linkage ***/
3572
/* rfi (mem_idx only) */
3573
static void gen_rfi(DisasContext *ctx)
3575
#if defined(CONFIG_USER_ONLY)
3576
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3578
/* Restore CPU state */
3579
if (unlikely(!ctx->mem_idx)) {
3580
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3584
gen_sync_exception(ctx);
3588
#if defined(TARGET_PPC64)
3589
static void gen_rfid(DisasContext *ctx)
3591
#if defined(CONFIG_USER_ONLY)
3592
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3594
/* Restore CPU state */
3595
if (unlikely(!ctx->mem_idx)) {
3596
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3600
gen_sync_exception(ctx);
3604
static void gen_hrfid(DisasContext *ctx)
3606
#if defined(CONFIG_USER_ONLY)
3607
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3609
/* Restore CPU state */
3610
if (unlikely(ctx->mem_idx <= 1)) {
3611
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3615
gen_sync_exception(ctx);
3621
#if defined(CONFIG_USER_ONLY)
3622
#define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3624
#define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3626
static void gen_sc(DisasContext *ctx)
3630
lev = (ctx->opcode >> 5) & 0x7F;
3631
gen_exception_err(ctx, POWERPC_SYSCALL, lev);
3637
static void gen_tw(DisasContext *ctx)
3639
TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3640
/* Update the nip since this might generate a trap exception */
3641
gen_update_nip(ctx, ctx->nip);
3642
gen_helper_tw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3643
tcg_temp_free_i32(t0);
3647
static void gen_twi(DisasContext *ctx)
3649
TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3650
TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3651
/* Update the nip since this might generate a trap exception */
3652
gen_update_nip(ctx, ctx->nip);
3653
gen_helper_tw(cpu_gpr[rA(ctx->opcode)], t0, t1);
3655
tcg_temp_free_i32(t1);
3658
#if defined(TARGET_PPC64)
3660
static void gen_td(DisasContext *ctx)
3662
TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3663
/* Update the nip since this might generate a trap exception */
3664
gen_update_nip(ctx, ctx->nip);
3665
gen_helper_td(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3666
tcg_temp_free_i32(t0);
3670
static void gen_tdi(DisasContext *ctx)
3672
TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3673
TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3674
/* Update the nip since this might generate a trap exception */
3675
gen_update_nip(ctx, ctx->nip);
3676
gen_helper_td(cpu_gpr[rA(ctx->opcode)], t0, t1);
3678
tcg_temp_free_i32(t1);
3682
/*** Processor control ***/
3685
static void gen_mcrxr(DisasContext *ctx)
3687
tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], cpu_xer);
3688
tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], XER_CA);
3689
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_SO | 1 << XER_OV | 1 << XER_CA));
3693
static void gen_mfcr(DisasContext *ctx)
3697
if (likely(ctx->opcode & 0x00100000)) {
3698
crm = CRM(ctx->opcode);
3699
if (likely(crm && ((crm & (crm - 1)) == 0))) {
3701
tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
3702
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
3703
cpu_gpr[rD(ctx->opcode)], crn * 4);
3706
TCGv_i32 t0 = tcg_temp_new_i32();
3707
tcg_gen_mov_i32(t0, cpu_crf[0]);
3708
tcg_gen_shli_i32(t0, t0, 4);
3709
tcg_gen_or_i32(t0, t0, cpu_crf[1]);
3710
tcg_gen_shli_i32(t0, t0, 4);
3711
tcg_gen_or_i32(t0, t0, cpu_crf[2]);
3712
tcg_gen_shli_i32(t0, t0, 4);
3713
tcg_gen_or_i32(t0, t0, cpu_crf[3]);
3714
tcg_gen_shli_i32(t0, t0, 4);
3715
tcg_gen_or_i32(t0, t0, cpu_crf[4]);
3716
tcg_gen_shli_i32(t0, t0, 4);
3717
tcg_gen_or_i32(t0, t0, cpu_crf[5]);
3718
tcg_gen_shli_i32(t0, t0, 4);
3719
tcg_gen_or_i32(t0, t0, cpu_crf[6]);
3720
tcg_gen_shli_i32(t0, t0, 4);
3721
tcg_gen_or_i32(t0, t0, cpu_crf[7]);
3722
tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
3723
tcg_temp_free_i32(t0);
3728
static void gen_mfmsr(DisasContext *ctx)
3730
#if defined(CONFIG_USER_ONLY)
3731
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3733
if (unlikely(!ctx->mem_idx)) {
3734
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3737
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
3741
static void spr_noaccess(void *opaque, int gprn, int sprn)
3744
sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3745
printf("ERROR: try to access SPR %d !\n", sprn);
3748
#define SPR_NOACCESS (&spr_noaccess)
3751
static inline void gen_op_mfspr(DisasContext *ctx)
3753
void (*read_cb)(void *opaque, int gprn, int sprn);
3754
uint32_t sprn = SPR(ctx->opcode);
3756
#if !defined(CONFIG_USER_ONLY)
3757
if (ctx->mem_idx == 2)
3758
read_cb = ctx->spr_cb[sprn].hea_read;
3759
else if (ctx->mem_idx)
3760
read_cb = ctx->spr_cb[sprn].oea_read;
3763
read_cb = ctx->spr_cb[sprn].uea_read;
3764
if (likely(read_cb != NULL)) {
3765
if (likely(read_cb != SPR_NOACCESS)) {
3766
(*read_cb)(ctx, rD(ctx->opcode), sprn);
3768
/* Privilege exception */
3769
/* This is a hack to avoid warnings when running Linux:
3770
* this OS breaks the PowerPC virtualisation model,
3771
* allowing userland application to read the PVR
3773
if (sprn != SPR_PVR) {
3774
qemu_log("Trying to read privileged spr %d %03x at "
3775
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3776
printf("Trying to read privileged spr %d %03x at "
3777
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3779
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3783
qemu_log("Trying to read invalid spr %d %03x at "
3784
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3785
printf("Trying to read invalid spr %d %03x at " TARGET_FMT_lx "\n",
3786
sprn, sprn, ctx->nip);
3787
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3791
static void gen_mfspr(DisasContext *ctx)
3797
static void gen_mftb(DisasContext *ctx)
3803
static void gen_mtcrf(DisasContext *ctx)
3807
crm = CRM(ctx->opcode);
3808
if (likely((ctx->opcode & 0x00100000))) {
3809
if (crm && ((crm & (crm - 1)) == 0)) {
3810
TCGv_i32 temp = tcg_temp_new_i32();
3812
tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3813
tcg_gen_shri_i32(temp, temp, crn * 4);
3814
tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
3815
tcg_temp_free_i32(temp);
3818
TCGv_i32 temp = tcg_temp_new_i32();
3819
tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3820
for (crn = 0 ; crn < 8 ; crn++) {
3821
if (crm & (1 << crn)) {
3822
tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
3823
tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
3826
tcg_temp_free_i32(temp);
3831
#if defined(TARGET_PPC64)
3832
static void gen_mtmsrd(DisasContext *ctx)
3834
#if defined(CONFIG_USER_ONLY)
3835
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3837
if (unlikely(!ctx->mem_idx)) {
3838
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3841
if (ctx->opcode & 0x00010000) {
3842
/* Special form that does not need any synchronisation */
3843
TCGv t0 = tcg_temp_new();
3844
tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3845
tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
3846
tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3849
/* XXX: we need to update nip before the store
3850
* if we enter power saving mode, we will exit the loop
3851
* directly from ppc_store_msr
3853
gen_update_nip(ctx, ctx->nip);
3854
gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]);
3855
/* Must stop the translation as machine state (may have) changed */
3856
/* Note that mtmsr is not always defined as context-synchronizing */
3857
gen_stop_exception(ctx);
3863
static void gen_mtmsr(DisasContext *ctx)
3865
#if defined(CONFIG_USER_ONLY)
3866
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3868
if (unlikely(!ctx->mem_idx)) {
3869
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3872
if (ctx->opcode & 0x00010000) {
3873
/* Special form that does not need any synchronisation */
3874
TCGv t0 = tcg_temp_new();
3875
tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3876
tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
3877
tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3880
TCGv msr = tcg_temp_new();
3882
/* XXX: we need to update nip before the store
3883
* if we enter power saving mode, we will exit the loop
3884
* directly from ppc_store_msr
3886
gen_update_nip(ctx, ctx->nip);
3887
#if defined(TARGET_PPC64)
3888
tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32);
3890
tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]);
3892
gen_helper_store_msr(msr);
3893
/* Must stop the translation as machine state (may have) changed */
3894
/* Note that mtmsr is not always defined as context-synchronizing */
3895
gen_stop_exception(ctx);
3901
static void gen_mtspr(DisasContext *ctx)
3903
void (*write_cb)(void *opaque, int sprn, int gprn);
3904
uint32_t sprn = SPR(ctx->opcode);
3906
#if !defined(CONFIG_USER_ONLY)
3907
if (ctx->mem_idx == 2)
3908
write_cb = ctx->spr_cb[sprn].hea_write;
3909
else if (ctx->mem_idx)
3910
write_cb = ctx->spr_cb[sprn].oea_write;
3913
write_cb = ctx->spr_cb[sprn].uea_write;
3914
if (likely(write_cb != NULL)) {
3915
if (likely(write_cb != SPR_NOACCESS)) {
3916
(*write_cb)(ctx, sprn, rS(ctx->opcode));
3918
/* Privilege exception */
3919
qemu_log("Trying to write privileged spr %d %03x at "
3920
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3921
printf("Trying to write privileged spr %d %03x at " TARGET_FMT_lx
3922
"\n", sprn, sprn, ctx->nip);
3923
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3927
qemu_log("Trying to write invalid spr %d %03x at "
3928
TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3929
printf("Trying to write invalid spr %d %03x at " TARGET_FMT_lx "\n",
3930
sprn, sprn, ctx->nip);
3931
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3935
/*** Cache management ***/
3938
static void gen_dcbf(DisasContext *ctx)
3940
/* XXX: specification says this is treated as a load by the MMU */
3942
gen_set_access_type(ctx, ACCESS_CACHE);
3943
t0 = tcg_temp_new();
3944
gen_addr_reg_index(ctx, t0);
3945
gen_qemu_ld8u(ctx, t0, t0);
3949
/* dcbi (Supervisor only) */
3950
static void gen_dcbi(DisasContext *ctx)
3952
#if defined(CONFIG_USER_ONLY)
3953
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3956
if (unlikely(!ctx->mem_idx)) {
3957
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3960
EA = tcg_temp_new();
3961
gen_set_access_type(ctx, ACCESS_CACHE);
3962
gen_addr_reg_index(ctx, EA);
3963
val = tcg_temp_new();
3964
/* XXX: specification says this should be treated as a store by the MMU */
3965
gen_qemu_ld8u(ctx, val, EA);
3966
gen_qemu_st8(ctx, val, EA);
3973
static void gen_dcbst(DisasContext *ctx)
3975
/* XXX: specification say this is treated as a load by the MMU */
3977
gen_set_access_type(ctx, ACCESS_CACHE);
3978
t0 = tcg_temp_new();
3979
gen_addr_reg_index(ctx, t0);
3980
gen_qemu_ld8u(ctx, t0, t0);
3985
static void gen_dcbt(DisasContext *ctx)
3987
/* interpreted as no-op */
3988
/* XXX: specification say this is treated as a load by the MMU
3989
* but does not generate any exception
3994
static void gen_dcbtst(DisasContext *ctx)
3996
/* interpreted as no-op */
3997
/* XXX: specification say this is treated as a load by the MMU
3998
* but does not generate any exception
4003
static void gen_dcbz(DisasContext *ctx)
4006
gen_set_access_type(ctx, ACCESS_CACHE);
4007
/* NIP cannot be restored if the memory exception comes from an helper */
4008
gen_update_nip(ctx, ctx->nip - 4);
4009
t0 = tcg_temp_new();
4010
gen_addr_reg_index(ctx, t0);
4011
gen_helper_dcbz(t0);
4015
static void gen_dcbz_970(DisasContext *ctx)
4018
gen_set_access_type(ctx, ACCESS_CACHE);
4019
/* NIP cannot be restored if the memory exception comes from an helper */
4020
gen_update_nip(ctx, ctx->nip - 4);
4021
t0 = tcg_temp_new();
4022
gen_addr_reg_index(ctx, t0);
4023
if (ctx->opcode & 0x00200000)
4024
gen_helper_dcbz(t0);
4026
gen_helper_dcbz_970(t0);
4031
static void gen_dst(DisasContext *ctx)
4033
if (rA(ctx->opcode) == 0) {
4034
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4036
/* interpreted as no-op */
4041
static void gen_dstst(DisasContext *ctx)
4043
if (rA(ctx->opcode) == 0) {
4044
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4046
/* interpreted as no-op */
4052
static void gen_dss(DisasContext *ctx)
4054
/* interpreted as no-op */
4058
static void gen_icbi(DisasContext *ctx)
4061
gen_set_access_type(ctx, ACCESS_CACHE);
4062
/* NIP cannot be restored if the memory exception comes from an helper */
4063
gen_update_nip(ctx, ctx->nip - 4);
4064
t0 = tcg_temp_new();
4065
gen_addr_reg_index(ctx, t0);
4066
gen_helper_icbi(t0);
4072
static void gen_dcba(DisasContext *ctx)
4074
/* interpreted as no-op */
4075
/* XXX: specification say this is treated as a store by the MMU
4076
* but does not generate any exception
4080
/*** Segment register manipulation ***/
4081
/* Supervisor only: */
4084
static void gen_mfsr(DisasContext *ctx)
4086
#if defined(CONFIG_USER_ONLY)
4087
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4090
if (unlikely(!ctx->mem_idx)) {
4091
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4094
t0 = tcg_const_tl(SR(ctx->opcode));
4095
gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4101
static void gen_mfsrin(DisasContext *ctx)
4103
#if defined(CONFIG_USER_ONLY)
4104
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4107
if (unlikely(!ctx->mem_idx)) {
4108
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4111
t0 = tcg_temp_new();
4112
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4113
tcg_gen_andi_tl(t0, t0, 0xF);
4114
gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4120
static void gen_mtsr(DisasContext *ctx)
4122
#if defined(CONFIG_USER_ONLY)
4123
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4126
if (unlikely(!ctx->mem_idx)) {
4127
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4130
t0 = tcg_const_tl(SR(ctx->opcode));
4131
gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4137
static void gen_mtsrin(DisasContext *ctx)
4139
#if defined(CONFIG_USER_ONLY)
4140
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4143
if (unlikely(!ctx->mem_idx)) {
4144
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4147
t0 = tcg_temp_new();
4148
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4149
tcg_gen_andi_tl(t0, t0, 0xF);
4150
gen_helper_store_sr(t0, cpu_gpr[rD(ctx->opcode)]);
4155
#if defined(TARGET_PPC64)
4156
/* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4159
static void gen_mfsr_64b(DisasContext *ctx)
4161
#if defined(CONFIG_USER_ONLY)
4162
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4165
if (unlikely(!ctx->mem_idx)) {
4166
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4169
t0 = tcg_const_tl(SR(ctx->opcode));
4170
gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4176
static void gen_mfsrin_64b(DisasContext *ctx)
4178
#if defined(CONFIG_USER_ONLY)
4179
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4182
if (unlikely(!ctx->mem_idx)) {
4183
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4186
t0 = tcg_temp_new();
4187
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4188
tcg_gen_andi_tl(t0, t0, 0xF);
4189
gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4195
static void gen_mtsr_64b(DisasContext *ctx)
4197
#if defined(CONFIG_USER_ONLY)
4198
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4201
if (unlikely(!ctx->mem_idx)) {
4202
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4205
t0 = tcg_const_tl(SR(ctx->opcode));
4206
gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4212
static void gen_mtsrin_64b(DisasContext *ctx)
4214
#if defined(CONFIG_USER_ONLY)
4215
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4218
if (unlikely(!ctx->mem_idx)) {
4219
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4222
t0 = tcg_temp_new();
4223
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4224
tcg_gen_andi_tl(t0, t0, 0xF);
4225
gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4231
static void gen_slbmte(DisasContext *ctx)
4233
#if defined(CONFIG_USER_ONLY)
4234
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4236
if (unlikely(!ctx->mem_idx)) {
4237
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4240
gen_helper_store_slb(cpu_gpr[rB(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
4244
static void gen_slbmfee(DisasContext *ctx)
4246
#if defined(CONFIG_USER_ONLY)
4247
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4249
if (unlikely(!ctx->mem_idx)) {
4250
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4253
gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)],
4254
cpu_gpr[rB(ctx->opcode)]);
4258
static void gen_slbmfev(DisasContext *ctx)
4260
#if defined(CONFIG_USER_ONLY)
4261
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4263
if (unlikely(!ctx->mem_idx)) {
4264
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4267
gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)],
4268
cpu_gpr[rB(ctx->opcode)]);
4271
#endif /* defined(TARGET_PPC64) */
4273
/*** Lookaside buffer management ***/
4274
/* Optional & mem_idx only: */
4277
static void gen_tlbia(DisasContext *ctx)
4279
#if defined(CONFIG_USER_ONLY)
4280
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4282
if (unlikely(!ctx->mem_idx)) {
4283
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4291
static void gen_tlbiel(DisasContext *ctx)
4293
#if defined(CONFIG_USER_ONLY)
4294
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4296
if (unlikely(!ctx->mem_idx)) {
4297
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4300
gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
4305
static void gen_tlbie(DisasContext *ctx)
4307
#if defined(CONFIG_USER_ONLY)
4308
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4310
if (unlikely(!ctx->mem_idx)) {
4311
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4314
#if defined(TARGET_PPC64)
4315
if (!ctx->sf_mode) {
4316
TCGv t0 = tcg_temp_new();
4317
tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
4318
gen_helper_tlbie(t0);
4322
gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
4327
static void gen_tlbsync(DisasContext *ctx)
4329
#if defined(CONFIG_USER_ONLY)
4330
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4332
if (unlikely(!ctx->mem_idx)) {
4333
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4336
/* This has no effect: it should ensure that all previous
4337
* tlbie have completed
4339
gen_stop_exception(ctx);
4343
#if defined(TARGET_PPC64)
4345
static void gen_slbia(DisasContext *ctx)
4347
#if defined(CONFIG_USER_ONLY)
4348
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4350
if (unlikely(!ctx->mem_idx)) {
4351
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4359
static void gen_slbie(DisasContext *ctx)
4361
#if defined(CONFIG_USER_ONLY)
4362
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4364
if (unlikely(!ctx->mem_idx)) {
4365
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4368
gen_helper_slbie(cpu_gpr[rB(ctx->opcode)]);
4373
/*** External control ***/
4377
static void gen_eciwx(DisasContext *ctx)
4380
/* Should check EAR[E] ! */
4381
gen_set_access_type(ctx, ACCESS_EXT);
4382
t0 = tcg_temp_new();
4383
gen_addr_reg_index(ctx, t0);
4384
gen_check_align(ctx, t0, 0x03);
4385
gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4390
static void gen_ecowx(DisasContext *ctx)
4393
/* Should check EAR[E] ! */
4394
gen_set_access_type(ctx, ACCESS_EXT);
4395
t0 = tcg_temp_new();
4396
gen_addr_reg_index(ctx, t0);
4397
gen_check_align(ctx, t0, 0x03);
4398
gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4402
/* PowerPC 601 specific instructions */
4405
static void gen_abs(DisasContext *ctx)
4407
int l1 = gen_new_label();
4408
int l2 = gen_new_label();
4409
tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1);
4410
tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4413
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4415
if (unlikely(Rc(ctx->opcode) != 0))
4416
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4420
static void gen_abso(DisasContext *ctx)
4422
int l1 = gen_new_label();
4423
int l2 = gen_new_label();
4424
int l3 = gen_new_label();
4425
/* Start with XER OV disabled, the most likely case */
4426
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4427
tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2);
4428
tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1);
4429
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4432
tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4435
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4437
if (unlikely(Rc(ctx->opcode) != 0))
4438
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4442
static void gen_clcs(DisasContext *ctx)
4444
TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
4445
gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], t0);
4446
tcg_temp_free_i32(t0);
4447
/* Rc=1 sets CR0 to an undefined state */
4451
static void gen_div(DisasContext *ctx)
4453
gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4454
if (unlikely(Rc(ctx->opcode) != 0))
4455
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4459
static void gen_divo(DisasContext *ctx)
4461
gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4462
if (unlikely(Rc(ctx->opcode) != 0))
4463
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4467
static void gen_divs(DisasContext *ctx)
4469
gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4470
if (unlikely(Rc(ctx->opcode) != 0))
4471
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4474
/* divso - divso. */
4475
static void gen_divso(DisasContext *ctx)
4477
gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4478
if (unlikely(Rc(ctx->opcode) != 0))
4479
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4483
static void gen_doz(DisasContext *ctx)
4485
int l1 = gen_new_label();
4486
int l2 = gen_new_label();
4487
tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4488
tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4491
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4493
if (unlikely(Rc(ctx->opcode) != 0))
4494
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4498
static void gen_dozo(DisasContext *ctx)
4500
int l1 = gen_new_label();
4501
int l2 = gen_new_label();
4502
TCGv t0 = tcg_temp_new();
4503
TCGv t1 = tcg_temp_new();
4504
TCGv t2 = tcg_temp_new();
4505
/* Start with XER OV disabled, the most likely case */
4506
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4507
tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4508
tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4509
tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4510
tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
4511
tcg_gen_andc_tl(t1, t1, t2);
4512
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
4513
tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4514
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4517
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4522
if (unlikely(Rc(ctx->opcode) != 0))
4523
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4527
static void gen_dozi(DisasContext *ctx)
4529
target_long simm = SIMM(ctx->opcode);
4530
int l1 = gen_new_label();
4531
int l2 = gen_new_label();
4532
tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
4533
tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
4536
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4538
if (unlikely(Rc(ctx->opcode) != 0))
4539
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4542
/* lscbx - lscbx. */
4543
static void gen_lscbx(DisasContext *ctx)
4545
TCGv t0 = tcg_temp_new();
4546
TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
4547
TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
4548
TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
4550
gen_addr_reg_index(ctx, t0);
4551
/* NIP cannot be restored if the memory exception comes from an helper */
4552
gen_update_nip(ctx, ctx->nip - 4);
4553
gen_helper_lscbx(t0, t0, t1, t2, t3);
4554
tcg_temp_free_i32(t1);
4555
tcg_temp_free_i32(t2);
4556
tcg_temp_free_i32(t3);
4557
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
4558
tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
4559
if (unlikely(Rc(ctx->opcode) != 0))
4560
gen_set_Rc0(ctx, t0);
4564
/* maskg - maskg. */
4565
static void gen_maskg(DisasContext *ctx)
4567
int l1 = gen_new_label();
4568
TCGv t0 = tcg_temp_new();
4569
TCGv t1 = tcg_temp_new();
4570
TCGv t2 = tcg_temp_new();
4571
TCGv t3 = tcg_temp_new();
4572
tcg_gen_movi_tl(t3, 0xFFFFFFFF);
4573
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4574
tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
4575
tcg_gen_addi_tl(t2, t0, 1);
4576
tcg_gen_shr_tl(t2, t3, t2);
4577
tcg_gen_shr_tl(t3, t3, t1);
4578
tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
4579
tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
4580
tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4586
if (unlikely(Rc(ctx->opcode) != 0))
4587
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4590
/* maskir - maskir. */
4591
static void gen_maskir(DisasContext *ctx)
4593
TCGv t0 = tcg_temp_new();
4594
TCGv t1 = tcg_temp_new();
4595
tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4596
tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4597
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4600
if (unlikely(Rc(ctx->opcode) != 0))
4601
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4605
static void gen_mul(DisasContext *ctx)
4607
TCGv_i64 t0 = tcg_temp_new_i64();
4608
TCGv_i64 t1 = tcg_temp_new_i64();
4609
TCGv t2 = tcg_temp_new();
4610
tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4611
tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4612
tcg_gen_mul_i64(t0, t0, t1);
4613
tcg_gen_trunc_i64_tl(t2, t0);
4614
gen_store_spr(SPR_MQ, t2);
4615
tcg_gen_shri_i64(t1, t0, 32);
4616
tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4617
tcg_temp_free_i64(t0);
4618
tcg_temp_free_i64(t1);
4620
if (unlikely(Rc(ctx->opcode) != 0))
4621
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4625
static void gen_mulo(DisasContext *ctx)
4627
int l1 = gen_new_label();
4628
TCGv_i64 t0 = tcg_temp_new_i64();
4629
TCGv_i64 t1 = tcg_temp_new_i64();
4630
TCGv t2 = tcg_temp_new();
4631
/* Start with XER OV disabled, the most likely case */
4632
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4633
tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4634
tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4635
tcg_gen_mul_i64(t0, t0, t1);
4636
tcg_gen_trunc_i64_tl(t2, t0);
4637
gen_store_spr(SPR_MQ, t2);
4638
tcg_gen_shri_i64(t1, t0, 32);
4639
tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4640
tcg_gen_ext32s_i64(t1, t0);
4641
tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
4642
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4644
tcg_temp_free_i64(t0);
4645
tcg_temp_free_i64(t1);
4647
if (unlikely(Rc(ctx->opcode) != 0))
4648
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4652
static void gen_nabs(DisasContext *ctx)
4654
int l1 = gen_new_label();
4655
int l2 = gen_new_label();
4656
tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4657
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4660
tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4662
if (unlikely(Rc(ctx->opcode) != 0))
4663
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4666
/* nabso - nabso. */
4667
static void gen_nabso(DisasContext *ctx)
4669
int l1 = gen_new_label();
4670
int l2 = gen_new_label();
4671
tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4672
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4675
tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4677
/* nabs never overflows */
4678
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4679
if (unlikely(Rc(ctx->opcode) != 0))
4680
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4684
static void gen_rlmi(DisasContext *ctx)
4686
uint32_t mb = MB(ctx->opcode);
4687
uint32_t me = ME(ctx->opcode);
4688
TCGv t0 = tcg_temp_new();
4689
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4690
tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4691
tcg_gen_andi_tl(t0, t0, MASK(mb, me));
4692
tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me));
4693
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
4695
if (unlikely(Rc(ctx->opcode) != 0))
4696
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4700
static void gen_rrib(DisasContext *ctx)
4702
TCGv t0 = tcg_temp_new();
4703
TCGv t1 = tcg_temp_new();
4704
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4705
tcg_gen_movi_tl(t1, 0x80000000);
4706
tcg_gen_shr_tl(t1, t1, t0);
4707
tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4708
tcg_gen_and_tl(t0, t0, t1);
4709
tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
4710
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4713
if (unlikely(Rc(ctx->opcode) != 0))
4714
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4718
static void gen_sle(DisasContext *ctx)
4720
TCGv t0 = tcg_temp_new();
4721
TCGv t1 = tcg_temp_new();
4722
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4723
tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4724
tcg_gen_subfi_tl(t1, 32, t1);
4725
tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4726
tcg_gen_or_tl(t1, t0, t1);
4727
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4728
gen_store_spr(SPR_MQ, t1);
4731
if (unlikely(Rc(ctx->opcode) != 0))
4732
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4736
static void gen_sleq(DisasContext *ctx)
4738
TCGv t0 = tcg_temp_new();
4739
TCGv t1 = tcg_temp_new();
4740
TCGv t2 = tcg_temp_new();
4741
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4742
tcg_gen_movi_tl(t2, 0xFFFFFFFF);
4743
tcg_gen_shl_tl(t2, t2, t0);
4744
tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4745
gen_load_spr(t1, SPR_MQ);
4746
gen_store_spr(SPR_MQ, t0);
4747
tcg_gen_and_tl(t0, t0, t2);
4748
tcg_gen_andc_tl(t1, t1, t2);
4749
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4753
if (unlikely(Rc(ctx->opcode) != 0))
4754
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4758
static void gen_sliq(DisasContext *ctx)
4760
int sh = SH(ctx->opcode);
4761
TCGv t0 = tcg_temp_new();
4762
TCGv t1 = tcg_temp_new();
4763
tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4764
tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4765
tcg_gen_or_tl(t1, t0, t1);
4766
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4767
gen_store_spr(SPR_MQ, t1);
4770
if (unlikely(Rc(ctx->opcode) != 0))
4771
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4774
/* slliq - slliq. */
4775
static void gen_slliq(DisasContext *ctx)
4777
int sh = SH(ctx->opcode);
4778
TCGv t0 = tcg_temp_new();
4779
TCGv t1 = tcg_temp_new();
4780
tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4781
gen_load_spr(t1, SPR_MQ);
4782
gen_store_spr(SPR_MQ, t0);
4783
tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
4784
tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
4785
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4788
if (unlikely(Rc(ctx->opcode) != 0))
4789
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4793
static void gen_sllq(DisasContext *ctx)
4795
int l1 = gen_new_label();
4796
int l2 = gen_new_label();
4797
TCGv t0 = tcg_temp_local_new();
4798
TCGv t1 = tcg_temp_local_new();
4799
TCGv t2 = tcg_temp_local_new();
4800
tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4801
tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4802
tcg_gen_shl_tl(t1, t1, t2);
4803
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4804
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
4805
gen_load_spr(t0, SPR_MQ);
4806
tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4809
tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4810
gen_load_spr(t2, SPR_MQ);
4811
tcg_gen_andc_tl(t1, t2, t1);
4812
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4817
if (unlikely(Rc(ctx->opcode) != 0))
4818
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4822
static void gen_slq(DisasContext *ctx)
4824
int l1 = gen_new_label();
4825
TCGv t0 = tcg_temp_new();
4826
TCGv t1 = tcg_temp_new();
4827
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4828
tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4829
tcg_gen_subfi_tl(t1, 32, t1);
4830
tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4831
tcg_gen_or_tl(t1, t0, t1);
4832
gen_store_spr(SPR_MQ, t1);
4833
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
4834
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4835
tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4836
tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
4840
if (unlikely(Rc(ctx->opcode) != 0))
4841
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4844
/* sraiq - sraiq. */
4845
static void gen_sraiq(DisasContext *ctx)
4847
int sh = SH(ctx->opcode);
4848
int l1 = gen_new_label();
4849
TCGv t0 = tcg_temp_new();
4850
TCGv t1 = tcg_temp_new();
4851
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4852
tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4853
tcg_gen_or_tl(t0, t0, t1);
4854
gen_store_spr(SPR_MQ, t0);
4855
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4856
tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4857
tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
4858
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4860
tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
4863
if (unlikely(Rc(ctx->opcode) != 0))
4864
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4868
static void gen_sraq(DisasContext *ctx)
4870
int l1 = gen_new_label();
4871
int l2 = gen_new_label();
4872
TCGv t0 = tcg_temp_new();
4873
TCGv t1 = tcg_temp_local_new();
4874
TCGv t2 = tcg_temp_local_new();
4875
tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4876
tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4877
tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
4878
tcg_gen_subfi_tl(t2, 32, t2);
4879
tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
4880
tcg_gen_or_tl(t0, t0, t2);
4881
gen_store_spr(SPR_MQ, t0);
4882
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4883
tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
4884
tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
4885
tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
4888
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
4889
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4890
tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4891
tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
4892
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4896
if (unlikely(Rc(ctx->opcode) != 0))
4897
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4901
static void gen_sre(DisasContext *ctx)
4903
TCGv t0 = tcg_temp_new();
4904
TCGv t1 = tcg_temp_new();
4905
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4906
tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4907
tcg_gen_subfi_tl(t1, 32, t1);
4908
tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4909
tcg_gen_or_tl(t1, t0, t1);
4910
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4911
gen_store_spr(SPR_MQ, t1);
4914
if (unlikely(Rc(ctx->opcode) != 0))
4915
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4919
static void gen_srea(DisasContext *ctx)
4921
TCGv t0 = tcg_temp_new();
4922
TCGv t1 = tcg_temp_new();
4923
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4924
tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4925
gen_store_spr(SPR_MQ, t0);
4926
tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
4929
if (unlikely(Rc(ctx->opcode) != 0))
4930
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4934
static void gen_sreq(DisasContext *ctx)
4936
TCGv t0 = tcg_temp_new();
4937
TCGv t1 = tcg_temp_new();
4938
TCGv t2 = tcg_temp_new();
4939
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4940
tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4941
tcg_gen_shr_tl(t1, t1, t0);
4942
tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4943
gen_load_spr(t2, SPR_MQ);
4944
gen_store_spr(SPR_MQ, t0);
4945
tcg_gen_and_tl(t0, t0, t1);
4946
tcg_gen_andc_tl(t2, t2, t1);
4947
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
4951
if (unlikely(Rc(ctx->opcode) != 0))
4952
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4956
static void gen_sriq(DisasContext *ctx)
4958
int sh = SH(ctx->opcode);
4959
TCGv t0 = tcg_temp_new();
4960
TCGv t1 = tcg_temp_new();
4961
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4962
tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4963
tcg_gen_or_tl(t1, t0, t1);
4964
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4965
gen_store_spr(SPR_MQ, t1);
4968
if (unlikely(Rc(ctx->opcode) != 0))
4969
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4973
static void gen_srliq(DisasContext *ctx)
4975
int sh = SH(ctx->opcode);
4976
TCGv t0 = tcg_temp_new();
4977
TCGv t1 = tcg_temp_new();
4978
tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4979
gen_load_spr(t1, SPR_MQ);
4980
gen_store_spr(SPR_MQ, t0);
4981
tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
4982
tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
4983
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4986
if (unlikely(Rc(ctx->opcode) != 0))
4987
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4991
static void gen_srlq(DisasContext *ctx)
4993
int l1 = gen_new_label();
4994
int l2 = gen_new_label();
4995
TCGv t0 = tcg_temp_local_new();
4996
TCGv t1 = tcg_temp_local_new();
4997
TCGv t2 = tcg_temp_local_new();
4998
tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4999
tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5000
tcg_gen_shr_tl(t2, t1, t2);
5001
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5002
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5003
gen_load_spr(t0, SPR_MQ);
5004
tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5007
tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5008
tcg_gen_and_tl(t0, t0, t2);
5009
gen_load_spr(t1, SPR_MQ);
5010
tcg_gen_andc_tl(t1, t1, t2);
5011
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5016
if (unlikely(Rc(ctx->opcode) != 0))
5017
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5021
static void gen_srq(DisasContext *ctx)
5023
int l1 = gen_new_label();
5024
TCGv t0 = tcg_temp_new();
5025
TCGv t1 = tcg_temp_new();
5026
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5027
tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5028
tcg_gen_subfi_tl(t1, 32, t1);
5029
tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5030
tcg_gen_or_tl(t1, t0, t1);
5031
gen_store_spr(SPR_MQ, t1);
5032
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5033
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5034
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5035
tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5039
if (unlikely(Rc(ctx->opcode) != 0))
5040
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5043
/* PowerPC 602 specific instructions */
5046
static void gen_dsa(DisasContext *ctx)
5049
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5053
static void gen_esa(DisasContext *ctx)
5056
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5060
static void gen_mfrom(DisasContext *ctx)
5062
#if defined(CONFIG_USER_ONLY)
5063
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5065
if (unlikely(!ctx->mem_idx)) {
5066
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5069
gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5073
/* 602 - 603 - G2 TLB management */
5076
static void gen_tlbld_6xx(DisasContext *ctx)
5078
#if defined(CONFIG_USER_ONLY)
5079
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5081
if (unlikely(!ctx->mem_idx)) {
5082
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5085
gen_helper_6xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5090
static void gen_tlbli_6xx(DisasContext *ctx)
5092
#if defined(CONFIG_USER_ONLY)
5093
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5095
if (unlikely(!ctx->mem_idx)) {
5096
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5099
gen_helper_6xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5103
/* 74xx TLB management */
5106
static void gen_tlbld_74xx(DisasContext *ctx)
5108
#if defined(CONFIG_USER_ONLY)
5109
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5111
if (unlikely(!ctx->mem_idx)) {
5112
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5115
gen_helper_74xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5120
static void gen_tlbli_74xx(DisasContext *ctx)
5122
#if defined(CONFIG_USER_ONLY)
5123
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5125
if (unlikely(!ctx->mem_idx)) {
5126
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5129
gen_helper_74xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5133
/* POWER instructions not in PowerPC 601 */
5136
static void gen_clf(DisasContext *ctx)
5138
/* Cache line flush: implemented as no-op */
5142
static void gen_cli(DisasContext *ctx)
5144
/* Cache line invalidate: privileged and treated as no-op */
5145
#if defined(CONFIG_USER_ONLY)
5146
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5148
if (unlikely(!ctx->mem_idx)) {
5149
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5156
static void gen_dclst(DisasContext *ctx)
5158
/* Data cache line store: treated as no-op */
5161
static void gen_mfsri(DisasContext *ctx)
5163
#if defined(CONFIG_USER_ONLY)
5164
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5166
int ra = rA(ctx->opcode);
5167
int rd = rD(ctx->opcode);
5169
if (unlikely(!ctx->mem_idx)) {
5170
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5173
t0 = tcg_temp_new();
5174
gen_addr_reg_index(ctx, t0);
5175
tcg_gen_shri_tl(t0, t0, 28);
5176
tcg_gen_andi_tl(t0, t0, 0xF);
5177
gen_helper_load_sr(cpu_gpr[rd], t0);
5179
if (ra != 0 && ra != rd)
5180
tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
5184
static void gen_rac(DisasContext *ctx)
5186
#if defined(CONFIG_USER_ONLY)
5187
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5190
if (unlikely(!ctx->mem_idx)) {
5191
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5194
t0 = tcg_temp_new();
5195
gen_addr_reg_index(ctx, t0);
5196
gen_helper_rac(cpu_gpr[rD(ctx->opcode)], t0);
5201
static void gen_rfsvc(DisasContext *ctx)
5203
#if defined(CONFIG_USER_ONLY)
5204
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5206
if (unlikely(!ctx->mem_idx)) {
5207
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5211
gen_sync_exception(ctx);
5215
/* svc is not implemented for now */
5217
/* POWER2 specific instructions */
5218
/* Quad manipulation (load/store two floats at a time) */
5221
static void gen_lfq(DisasContext *ctx)
5223
int rd = rD(ctx->opcode);
5225
gen_set_access_type(ctx, ACCESS_FLOAT);
5226
t0 = tcg_temp_new();
5227
gen_addr_imm_index(ctx, t0, 0);
5228
gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5229
gen_addr_add(ctx, t0, t0, 8);
5230
gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5235
static void gen_lfqu(DisasContext *ctx)
5237
int ra = rA(ctx->opcode);
5238
int rd = rD(ctx->opcode);
5240
gen_set_access_type(ctx, ACCESS_FLOAT);
5241
t0 = tcg_temp_new();
5242
t1 = tcg_temp_new();
5243
gen_addr_imm_index(ctx, t0, 0);
5244
gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5245
gen_addr_add(ctx, t1, t0, 8);
5246
gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5248
tcg_gen_mov_tl(cpu_gpr[ra], t0);
5254
static void gen_lfqux(DisasContext *ctx)
5256
int ra = rA(ctx->opcode);
5257
int rd = rD(ctx->opcode);
5258
gen_set_access_type(ctx, ACCESS_FLOAT);
5260
t0 = tcg_temp_new();
5261
gen_addr_reg_index(ctx, t0);
5262
gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5263
t1 = tcg_temp_new();
5264
gen_addr_add(ctx, t1, t0, 8);
5265
gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5268
tcg_gen_mov_tl(cpu_gpr[ra], t0);
5273
static void gen_lfqx(DisasContext *ctx)
5275
int rd = rD(ctx->opcode);
5277
gen_set_access_type(ctx, ACCESS_FLOAT);
5278
t0 = tcg_temp_new();
5279
gen_addr_reg_index(ctx, t0);
5280
gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5281
gen_addr_add(ctx, t0, t0, 8);
5282
gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5287
static void gen_stfq(DisasContext *ctx)
5289
int rd = rD(ctx->opcode);
5291
gen_set_access_type(ctx, ACCESS_FLOAT);
5292
t0 = tcg_temp_new();
5293
gen_addr_imm_index(ctx, t0, 0);
5294
gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5295
gen_addr_add(ctx, t0, t0, 8);
5296
gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5301
static void gen_stfqu(DisasContext *ctx)
5303
int ra = rA(ctx->opcode);
5304
int rd = rD(ctx->opcode);
5306
gen_set_access_type(ctx, ACCESS_FLOAT);
5307
t0 = tcg_temp_new();
5308
gen_addr_imm_index(ctx, t0, 0);
5309
gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5310
t1 = tcg_temp_new();
5311
gen_addr_add(ctx, t1, t0, 8);
5312
gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5315
tcg_gen_mov_tl(cpu_gpr[ra], t0);
5320
static void gen_stfqux(DisasContext *ctx)
5322
int ra = rA(ctx->opcode);
5323
int rd = rD(ctx->opcode);
5325
gen_set_access_type(ctx, ACCESS_FLOAT);
5326
t0 = tcg_temp_new();
5327
gen_addr_reg_index(ctx, t0);
5328
gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5329
t1 = tcg_temp_new();
5330
gen_addr_add(ctx, t1, t0, 8);
5331
gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5334
tcg_gen_mov_tl(cpu_gpr[ra], t0);
5339
static void gen_stfqx(DisasContext *ctx)
5341
int rd = rD(ctx->opcode);
5343
gen_set_access_type(ctx, ACCESS_FLOAT);
5344
t0 = tcg_temp_new();
5345
gen_addr_reg_index(ctx, t0);
5346
gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5347
gen_addr_add(ctx, t0, t0, 8);
5348
gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5352
/* BookE specific instructions */
5354
/* XXX: not implemented on 440 ? */
5355
static void gen_mfapidi(DisasContext *ctx)
5358
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5361
/* XXX: not implemented on 440 ? */
5362
static void gen_tlbiva(DisasContext *ctx)
5364
#if defined(CONFIG_USER_ONLY)
5365
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5368
if (unlikely(!ctx->mem_idx)) {
5369
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5372
t0 = tcg_temp_new();
5373
gen_addr_reg_index(ctx, t0);
5374
gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
5379
/* All 405 MAC instructions are translated here */
5380
static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5381
int ra, int rb, int rt, int Rc)
5385
t0 = tcg_temp_local_new();
5386
t1 = tcg_temp_local_new();
5388
switch (opc3 & 0x0D) {
5390
/* macchw - macchw. - macchwo - macchwo. */
5391
/* macchws - macchws. - macchwso - macchwso. */
5392
/* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
5393
/* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
5394
/* mulchw - mulchw. */
5395
tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5396
tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5397
tcg_gen_ext16s_tl(t1, t1);
5400
/* macchwu - macchwu. - macchwuo - macchwuo. */
5401
/* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
5402
/* mulchwu - mulchwu. */
5403
tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5404
tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5405
tcg_gen_ext16u_tl(t1, t1);
5408
/* machhw - machhw. - machhwo - machhwo. */
5409
/* machhws - machhws. - machhwso - machhwso. */
5410
/* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
5411
/* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
5412
/* mulhhw - mulhhw. */
5413
tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5414
tcg_gen_ext16s_tl(t0, t0);
5415
tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5416
tcg_gen_ext16s_tl(t1, t1);
5419
/* machhwu - machhwu. - machhwuo - machhwuo. */
5420
/* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
5421
/* mulhhwu - mulhhwu. */
5422
tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5423
tcg_gen_ext16u_tl(t0, t0);
5424
tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5425
tcg_gen_ext16u_tl(t1, t1);
5428
/* maclhw - maclhw. - maclhwo - maclhwo. */
5429
/* maclhws - maclhws. - maclhwso - maclhwso. */
5430
/* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
5431
/* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
5432
/* mullhw - mullhw. */
5433
tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5434
tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5437
/* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
5438
/* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
5439
/* mullhwu - mullhwu. */
5440
tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5441
tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5445
/* (n)multiply-and-accumulate (0x0C / 0x0E) */
5446
tcg_gen_mul_tl(t1, t0, t1);
5448
/* nmultiply-and-accumulate (0x0E) */
5449
tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5451
/* multiply-and-accumulate (0x0C) */
5452
tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5456
/* Check overflow and/or saturate */
5457
int l1 = gen_new_label();
5460
/* Start with XER OV disabled, the most likely case */
5461
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
5465
tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5466
tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5467
tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5468
tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5471
tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5472
tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5476
tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5479
tcg_gen_movi_tl(t0, UINT32_MAX);
5483
/* Check overflow */
5484
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
5487
tcg_gen_mov_tl(cpu_gpr[rt], t0);
5490
tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5494
if (unlikely(Rc) != 0) {
5496
gen_set_Rc0(ctx, cpu_gpr[rt]);
5500
#define GEN_MAC_HANDLER(name, opc2, opc3) \
5501
static void glue(gen_, name)(DisasContext *ctx) \
5503
gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
5504
rD(ctx->opcode), Rc(ctx->opcode)); \
5507
/* macchw - macchw. */
5508
GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5509
/* macchwo - macchwo. */
5510
GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5511
/* macchws - macchws. */
5512
GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5513
/* macchwso - macchwso. */
5514
GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5515
/* macchwsu - macchwsu. */
5516
GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5517
/* macchwsuo - macchwsuo. */
5518
GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5519
/* macchwu - macchwu. */
5520
GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5521
/* macchwuo - macchwuo. */
5522
GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5523
/* machhw - machhw. */
5524
GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5525
/* machhwo - machhwo. */
5526
GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5527
/* machhws - machhws. */
5528
GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5529
/* machhwso - machhwso. */
5530
GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5531
/* machhwsu - machhwsu. */
5532
GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5533
/* machhwsuo - machhwsuo. */
5534
GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5535
/* machhwu - machhwu. */
5536
GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5537
/* machhwuo - machhwuo. */
5538
GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5539
/* maclhw - maclhw. */
5540
GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5541
/* maclhwo - maclhwo. */
5542
GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5543
/* maclhws - maclhws. */
5544
GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5545
/* maclhwso - maclhwso. */
5546
GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5547
/* maclhwu - maclhwu. */
5548
GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5549
/* maclhwuo - maclhwuo. */
5550
GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5551
/* maclhwsu - maclhwsu. */
5552
GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5553
/* maclhwsuo - maclhwsuo. */
5554
GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5555
/* nmacchw - nmacchw. */
5556
GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5557
/* nmacchwo - nmacchwo. */
5558
GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5559
/* nmacchws - nmacchws. */
5560
GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5561
/* nmacchwso - nmacchwso. */
5562
GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5563
/* nmachhw - nmachhw. */
5564
GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5565
/* nmachhwo - nmachhwo. */
5566
GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5567
/* nmachhws - nmachhws. */
5568
GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5569
/* nmachhwso - nmachhwso. */
5570
GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5571
/* nmaclhw - nmaclhw. */
5572
GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5573
/* nmaclhwo - nmaclhwo. */
5574
GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5575
/* nmaclhws - nmaclhws. */
5576
GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5577
/* nmaclhwso - nmaclhwso. */
5578
GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5580
/* mulchw - mulchw. */
5581
GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5582
/* mulchwu - mulchwu. */
5583
GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5584
/* mulhhw - mulhhw. */
5585
GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5586
/* mulhhwu - mulhhwu. */
5587
GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5588
/* mullhw - mullhw. */
5589
GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5590
/* mullhwu - mullhwu. */
5591
GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5594
static void gen_mfdcr(DisasContext *ctx)
5596
#if defined(CONFIG_USER_ONLY)
5597
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5600
if (unlikely(!ctx->mem_idx)) {
5601
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5604
/* NIP cannot be restored if the memory exception comes from an helper */
5605
gen_update_nip(ctx, ctx->nip - 4);
5606
dcrn = tcg_const_tl(SPR(ctx->opcode));
5607
gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], dcrn);
5608
tcg_temp_free(dcrn);
5613
static void gen_mtdcr(DisasContext *ctx)
5615
#if defined(CONFIG_USER_ONLY)
5616
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5619
if (unlikely(!ctx->mem_idx)) {
5620
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5623
/* NIP cannot be restored if the memory exception comes from an helper */
5624
gen_update_nip(ctx, ctx->nip - 4);
5625
dcrn = tcg_const_tl(SPR(ctx->opcode));
5626
gen_helper_store_dcr(dcrn, cpu_gpr[rS(ctx->opcode)]);
5627
tcg_temp_free(dcrn);
5632
/* XXX: not implemented on 440 ? */
5633
static void gen_mfdcrx(DisasContext *ctx)
5635
#if defined(CONFIG_USER_ONLY)
5636
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5638
if (unlikely(!ctx->mem_idx)) {
5639
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5642
/* NIP cannot be restored if the memory exception comes from an helper */
5643
gen_update_nip(ctx, ctx->nip - 4);
5644
gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5645
/* Note: Rc update flag set leads to undefined state of Rc0 */
5650
/* XXX: not implemented on 440 ? */
5651
static void gen_mtdcrx(DisasContext *ctx)
5653
#if defined(CONFIG_USER_ONLY)
5654
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5656
if (unlikely(!ctx->mem_idx)) {
5657
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5660
/* NIP cannot be restored if the memory exception comes from an helper */
5661
gen_update_nip(ctx, ctx->nip - 4);
5662
gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5663
/* Note: Rc update flag set leads to undefined state of Rc0 */
5667
/* mfdcrux (PPC 460) : user-mode access to DCR */
5668
static void gen_mfdcrux(DisasContext *ctx)
5670
/* NIP cannot be restored if the memory exception comes from an helper */
5671
gen_update_nip(ctx, ctx->nip - 4);
5672
gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5673
/* Note: Rc update flag set leads to undefined state of Rc0 */
5676
/* mtdcrux (PPC 460) : user-mode access to DCR */
5677
static void gen_mtdcrux(DisasContext *ctx)
5679
/* NIP cannot be restored if the memory exception comes from an helper */
5680
gen_update_nip(ctx, ctx->nip - 4);
5681
gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5682
/* Note: Rc update flag set leads to undefined state of Rc0 */
5686
static void gen_dccci(DisasContext *ctx)
5688
#if defined(CONFIG_USER_ONLY)
5689
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5691
if (unlikely(!ctx->mem_idx)) {
5692
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5695
/* interpreted as no-op */
5700
static void gen_dcread(DisasContext *ctx)
5702
#if defined(CONFIG_USER_ONLY)
5703
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5706
if (unlikely(!ctx->mem_idx)) {
5707
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5710
gen_set_access_type(ctx, ACCESS_CACHE);
5711
EA = tcg_temp_new();
5712
gen_addr_reg_index(ctx, EA);
5713
val = tcg_temp_new();
5714
gen_qemu_ld32u(ctx, val, EA);
5716
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5722
static void gen_icbt_40x(DisasContext *ctx)
5724
/* interpreted as no-op */
5725
/* XXX: specification say this is treated as a load by the MMU
5726
* but does not generate any exception
5731
static void gen_iccci(DisasContext *ctx)
5733
#if defined(CONFIG_USER_ONLY)
5734
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5736
if (unlikely(!ctx->mem_idx)) {
5737
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5740
/* interpreted as no-op */
5745
static void gen_icread(DisasContext *ctx)
5747
#if defined(CONFIG_USER_ONLY)
5748
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5750
if (unlikely(!ctx->mem_idx)) {
5751
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5754
/* interpreted as no-op */
5758
/* rfci (mem_idx only) */
5759
static void gen_rfci_40x(DisasContext *ctx)
5761
#if defined(CONFIG_USER_ONLY)
5762
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5764
if (unlikely(!ctx->mem_idx)) {
5765
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5768
/* Restore CPU state */
5769
gen_helper_40x_rfci();
5770
gen_sync_exception(ctx);
5774
static void gen_rfci(DisasContext *ctx)
5776
#if defined(CONFIG_USER_ONLY)
5777
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5779
if (unlikely(!ctx->mem_idx)) {
5780
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5783
/* Restore CPU state */
5785
gen_sync_exception(ctx);
5789
/* BookE specific */
5791
/* XXX: not implemented on 440 ? */
5792
static void gen_rfdi(DisasContext *ctx)
5794
#if defined(CONFIG_USER_ONLY)
5795
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5797
if (unlikely(!ctx->mem_idx)) {
5798
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5801
/* Restore CPU state */
5803
gen_sync_exception(ctx);
5807
/* XXX: not implemented on 440 ? */
5808
static void gen_rfmci(DisasContext *ctx)
5810
#if defined(CONFIG_USER_ONLY)
5811
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5813
if (unlikely(!ctx->mem_idx)) {
5814
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5817
/* Restore CPU state */
5819
gen_sync_exception(ctx);
5823
/* TLB management - PowerPC 405 implementation */
5826
static void gen_tlbre_40x(DisasContext *ctx)
5828
#if defined(CONFIG_USER_ONLY)
5829
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5831
if (unlikely(!ctx->mem_idx)) {
5832
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5835
switch (rB(ctx->opcode)) {
5837
gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5840
gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5843
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5849
/* tlbsx - tlbsx. */
5850
static void gen_tlbsx_40x(DisasContext *ctx)
5852
#if defined(CONFIG_USER_ONLY)
5853
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5856
if (unlikely(!ctx->mem_idx)) {
5857
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5860
t0 = tcg_temp_new();
5861
gen_addr_reg_index(ctx, t0);
5862
gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], t0);
5864
if (Rc(ctx->opcode)) {
5865
int l1 = gen_new_label();
5866
tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
5867
tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
5868
tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
5869
tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5870
tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5877
static void gen_tlbwe_40x(DisasContext *ctx)
5879
#if defined(CONFIG_USER_ONLY)
5880
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5882
if (unlikely(!ctx->mem_idx)) {
5883
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5886
switch (rB(ctx->opcode)) {
5888
gen_helper_4xx_tlbwe_hi(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5891
gen_helper_4xx_tlbwe_lo(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5894
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5900
/* TLB management - PowerPC 440 implementation */
5903
static void gen_tlbre_440(DisasContext *ctx)
5905
#if defined(CONFIG_USER_ONLY)
5906
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5908
if (unlikely(!ctx->mem_idx)) {
5909
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5912
switch (rB(ctx->opcode)) {
5917
TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5918
gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], t0, cpu_gpr[rA(ctx->opcode)]);
5919
tcg_temp_free_i32(t0);
5923
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5929
/* tlbsx - tlbsx. */
5930
static void gen_tlbsx_440(DisasContext *ctx)
5932
#if defined(CONFIG_USER_ONLY)
5933
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5936
if (unlikely(!ctx->mem_idx)) {
5937
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5940
t0 = tcg_temp_new();
5941
gen_addr_reg_index(ctx, t0);
5942
gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], t0);
5944
if (Rc(ctx->opcode)) {
5945
int l1 = gen_new_label();
5946
tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
5947
tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
5948
tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
5949
tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5950
tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5957
static void gen_tlbwe_440(DisasContext *ctx)
5959
#if defined(CONFIG_USER_ONLY)
5960
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5962
if (unlikely(!ctx->mem_idx)) {
5963
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5966
switch (rB(ctx->opcode)) {
5971
TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5972
gen_helper_440_tlbwe(t0, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5973
tcg_temp_free_i32(t0);
5977
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5983
/* TLB management - PowerPC BookE 2.06 implementation */
5986
static void gen_tlbre_booke206(DisasContext *ctx)
5988
#if defined(CONFIG_USER_ONLY)
5989
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5991
if (unlikely(!ctx->mem_idx)) {
5992
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5996
gen_helper_booke206_tlbre();
6000
/* tlbsx - tlbsx. */
6001
static void gen_tlbsx_booke206(DisasContext *ctx)
6003
#if defined(CONFIG_USER_ONLY)
6004
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6007
if (unlikely(!ctx->mem_idx)) {
6008
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6012
if (rA(ctx->opcode)) {
6013
t0 = tcg_temp_new();
6014
tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
6016
t0 = tcg_const_tl(0);
6019
tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
6020
gen_helper_booke206_tlbsx(t0);
6025
static void gen_tlbwe_booke206(DisasContext *ctx)
6027
#if defined(CONFIG_USER_ONLY)
6028
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6030
if (unlikely(!ctx->mem_idx)) {
6031
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6034
gen_helper_booke206_tlbwe();
6038
static void gen_tlbivax_booke206(DisasContext *ctx)
6040
#if defined(CONFIG_USER_ONLY)
6041
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6044
if (unlikely(!ctx->mem_idx)) {
6045
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6049
t0 = tcg_temp_new();
6050
gen_addr_reg_index(ctx, t0);
6052
gen_helper_booke206_tlbivax(t0);
6058
static void gen_wrtee(DisasContext *ctx)
6060
#if defined(CONFIG_USER_ONLY)
6061
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6064
if (unlikely(!ctx->mem_idx)) {
6065
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6068
t0 = tcg_temp_new();
6069
tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6070
tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6071
tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6073
/* Stop translation to have a chance to raise an exception
6074
* if we just set msr_ee to 1
6076
gen_stop_exception(ctx);
6081
static void gen_wrteei(DisasContext *ctx)
6083
#if defined(CONFIG_USER_ONLY)
6084
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6086
if (unlikely(!ctx->mem_idx)) {
6087
gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6090
if (ctx->opcode & 0x00008000) {
6091
tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6092
/* Stop translation to have a chance to raise an exception */
6093
gen_stop_exception(ctx);
6095
tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6100
/* PowerPC 440 specific instructions */
6103
static void gen_dlmzb(DisasContext *ctx)
6105
TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6106
gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
6107
cpu_gpr[rB(ctx->opcode)], t0);
6108
tcg_temp_free_i32(t0);
6111
/* mbar replaces eieio on 440 */
6112
static void gen_mbar(DisasContext *ctx)
6114
/* interpreted as no-op */
6117
/* msync replaces sync on 440 */
6118
static void gen_msync(DisasContext *ctx)
6120
/* interpreted as no-op */
6124
static void gen_icbt_440(DisasContext *ctx)
6126
/* interpreted as no-op */
6127
/* XXX: specification say this is treated as a load by the MMU
6128
* but does not generate any exception
6132
/*** Altivec vector extension ***/
6133
/* Altivec registers moves */
6135
static inline TCGv_ptr gen_avr_ptr(int reg)
6137
TCGv_ptr r = tcg_temp_new_ptr();
6138
tcg_gen_addi_ptr(r, cpu_env, offsetof(CPUPPCState, avr[reg]));
6142
#define GEN_VR_LDX(name, opc2, opc3) \
6143
static void glue(gen_, name)(DisasContext *ctx) \
6146
if (unlikely(!ctx->altivec_enabled)) { \
6147
gen_exception(ctx, POWERPC_EXCP_VPU); \
6150
gen_set_access_type(ctx, ACCESS_INT); \
6151
EA = tcg_temp_new(); \
6152
gen_addr_reg_index(ctx, EA); \
6153
tcg_gen_andi_tl(EA, EA, ~0xf); \
6154
if (ctx->le_mode) { \
6155
gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6156
tcg_gen_addi_tl(EA, EA, 8); \
6157
gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6159
gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6160
tcg_gen_addi_tl(EA, EA, 8); \
6161
gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6163
tcg_temp_free(EA); \
6166
#define GEN_VR_STX(name, opc2, opc3) \
6167
static void gen_st##name(DisasContext *ctx) \
6170
if (unlikely(!ctx->altivec_enabled)) { \
6171
gen_exception(ctx, POWERPC_EXCP_VPU); \
6174
gen_set_access_type(ctx, ACCESS_INT); \
6175
EA = tcg_temp_new(); \
6176
gen_addr_reg_index(ctx, EA); \
6177
tcg_gen_andi_tl(EA, EA, ~0xf); \
6178
if (ctx->le_mode) { \
6179
gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6180
tcg_gen_addi_tl(EA, EA, 8); \
6181
gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6183
gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6184
tcg_gen_addi_tl(EA, EA, 8); \
6185
gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6187
tcg_temp_free(EA); \
6190
#define GEN_VR_LVE(name, opc2, opc3) \
6191
static void gen_lve##name(DisasContext *ctx) \
6195
if (unlikely(!ctx->altivec_enabled)) { \
6196
gen_exception(ctx, POWERPC_EXCP_VPU); \
6199
gen_set_access_type(ctx, ACCESS_INT); \
6200
EA = tcg_temp_new(); \
6201
gen_addr_reg_index(ctx, EA); \
6202
rs = gen_avr_ptr(rS(ctx->opcode)); \
6203
gen_helper_lve##name (rs, EA); \
6204
tcg_temp_free(EA); \
6205
tcg_temp_free_ptr(rs); \
6208
#define GEN_VR_STVE(name, opc2, opc3) \
6209
static void gen_stve##name(DisasContext *ctx) \
6213
if (unlikely(!ctx->altivec_enabled)) { \
6214
gen_exception(ctx, POWERPC_EXCP_VPU); \
6217
gen_set_access_type(ctx, ACCESS_INT); \
6218
EA = tcg_temp_new(); \
6219
gen_addr_reg_index(ctx, EA); \
6220
rs = gen_avr_ptr(rS(ctx->opcode)); \
6221
gen_helper_stve##name (rs, EA); \
6222
tcg_temp_free(EA); \
6223
tcg_temp_free_ptr(rs); \
6226
GEN_VR_LDX(lvx, 0x07, 0x03);
6227
/* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
6228
GEN_VR_LDX(lvxl, 0x07, 0x0B);
6230
GEN_VR_LVE(bx, 0x07, 0x00);
6231
GEN_VR_LVE(hx, 0x07, 0x01);
6232
GEN_VR_LVE(wx, 0x07, 0x02);
6234
GEN_VR_STX(svx, 0x07, 0x07);
6235
/* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
6236
GEN_VR_STX(svxl, 0x07, 0x0F);
6238
GEN_VR_STVE(bx, 0x07, 0x04);
6239
GEN_VR_STVE(hx, 0x07, 0x05);
6240
GEN_VR_STVE(wx, 0x07, 0x06);
6242
static void gen_lvsl(DisasContext *ctx)
6246
if (unlikely(!ctx->altivec_enabled)) {
6247
gen_exception(ctx, POWERPC_EXCP_VPU);
6250
EA = tcg_temp_new();
6251
gen_addr_reg_index(ctx, EA);
6252
rd = gen_avr_ptr(rD(ctx->opcode));
6253
gen_helper_lvsl(rd, EA);
6255
tcg_temp_free_ptr(rd);
6258
static void gen_lvsr(DisasContext *ctx)
6262
if (unlikely(!ctx->altivec_enabled)) {
6263
gen_exception(ctx, POWERPC_EXCP_VPU);
6266
EA = tcg_temp_new();
6267
gen_addr_reg_index(ctx, EA);
6268
rd = gen_avr_ptr(rD(ctx->opcode));
6269
gen_helper_lvsr(rd, EA);
6271
tcg_temp_free_ptr(rd);
6274
static void gen_mfvscr(DisasContext *ctx)
6277
if (unlikely(!ctx->altivec_enabled)) {
6278
gen_exception(ctx, POWERPC_EXCP_VPU);
6281
tcg_gen_movi_i64(cpu_avrh[rD(ctx->opcode)], 0);
6282
t = tcg_temp_new_i32();
6283
tcg_gen_ld_i32(t, cpu_env, offsetof(CPUState, vscr));
6284
tcg_gen_extu_i32_i64(cpu_avrl[rD(ctx->opcode)], t);
6285
tcg_temp_free_i32(t);
6288
static void gen_mtvscr(DisasContext *ctx)
6291
if (unlikely(!ctx->altivec_enabled)) {
6292
gen_exception(ctx, POWERPC_EXCP_VPU);
6295
p = gen_avr_ptr(rD(ctx->opcode));
6296
gen_helper_mtvscr(p);
6297
tcg_temp_free_ptr(p);
6300
/* Logical operations */
6301
#define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
6302
static void glue(gen_, name)(DisasContext *ctx) \
6304
if (unlikely(!ctx->altivec_enabled)) { \
6305
gen_exception(ctx, POWERPC_EXCP_VPU); \
6308
tcg_op(cpu_avrh[rD(ctx->opcode)], cpu_avrh[rA(ctx->opcode)], cpu_avrh[rB(ctx->opcode)]); \
6309
tcg_op(cpu_avrl[rD(ctx->opcode)], cpu_avrl[rA(ctx->opcode)], cpu_avrl[rB(ctx->opcode)]); \
6312
GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16);
6313
GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17);
6314
GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18);
6315
GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19);
6316
GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20);
6318
#define GEN_VXFORM(name, opc2, opc3) \
6319
static void glue(gen_, name)(DisasContext *ctx) \
6321
TCGv_ptr ra, rb, rd; \
6322
if (unlikely(!ctx->altivec_enabled)) { \
6323
gen_exception(ctx, POWERPC_EXCP_VPU); \
6326
ra = gen_avr_ptr(rA(ctx->opcode)); \
6327
rb = gen_avr_ptr(rB(ctx->opcode)); \
6328
rd = gen_avr_ptr(rD(ctx->opcode)); \
6329
gen_helper_##name (rd, ra, rb); \
6330
tcg_temp_free_ptr(ra); \
6331
tcg_temp_free_ptr(rb); \
6332
tcg_temp_free_ptr(rd); \
6335
GEN_VXFORM(vaddubm, 0, 0);
6336
GEN_VXFORM(vadduhm, 0, 1);
6337
GEN_VXFORM(vadduwm, 0, 2);
6338
GEN_VXFORM(vsububm, 0, 16);
6339
GEN_VXFORM(vsubuhm, 0, 17);
6340
GEN_VXFORM(vsubuwm, 0, 18);
6341
GEN_VXFORM(vmaxub, 1, 0);
6342
GEN_VXFORM(vmaxuh, 1, 1);
6343
GEN_VXFORM(vmaxuw, 1, 2);
6344
GEN_VXFORM(vmaxsb, 1, 4);
6345
GEN_VXFORM(vmaxsh, 1, 5);
6346
GEN_VXFORM(vmaxsw, 1, 6);
6347
GEN_VXFORM(vminub, 1, 8);
6348
GEN_VXFORM(vminuh, 1, 9);
6349
GEN_VXFORM(vminuw, 1, 10);
6350
GEN_VXFORM(vminsb, 1, 12);
6351
GEN_VXFORM(vminsh, 1, 13);
6352
GEN_VXFORM(vminsw, 1, 14);
6353
GEN_VXFORM(vavgub, 1, 16);
6354
GEN_VXFORM(vavguh, 1, 17);
6355
GEN_VXFORM(vavguw, 1, 18);
6356
GEN_VXFORM(vavgsb, 1, 20);
6357
GEN_VXFORM(vavgsh, 1, 21);
6358
GEN_VXFORM(vavgsw, 1, 22);
6359
GEN_VXFORM(vmrghb, 6, 0);
6360
GEN_VXFORM(vmrghh, 6, 1);
6361
GEN_VXFORM(vmrghw, 6, 2);
6362
GEN_VXFORM(vmrglb, 6, 4);
6363
GEN_VXFORM(vmrglh, 6, 5);
6364
GEN_VXFORM(vmrglw, 6, 6);
6365
GEN_VXFORM(vmuloub, 4, 0);
6366
GEN_VXFORM(vmulouh, 4, 1);
6367
GEN_VXFORM(vmulosb, 4, 4);
6368
GEN_VXFORM(vmulosh, 4, 5);
6369
GEN_VXFORM(vmuleub, 4, 8);
6370
GEN_VXFORM(vmuleuh, 4, 9);
6371
GEN_VXFORM(vmulesb, 4, 12);
6372
GEN_VXFORM(vmulesh, 4, 13);
6373
GEN_VXFORM(vslb, 2, 4);
6374
GEN_VXFORM(vslh, 2, 5);
6375
GEN_VXFORM(vslw, 2, 6);
6376
GEN_VXFORM(vsrb, 2, 8);
6377
GEN_VXFORM(vsrh, 2, 9);
6378
GEN_VXFORM(vsrw, 2, 10);
6379
GEN_VXFORM(vsrab, 2, 12);
6380
GEN_VXFORM(vsrah, 2, 13);
6381
GEN_VXFORM(vsraw, 2, 14);
6382
GEN_VXFORM(vslo, 6, 16);
6383
GEN_VXFORM(vsro, 6, 17);
6384
GEN_VXFORM(vaddcuw, 0, 6);
6385
GEN_VXFORM(vsubcuw, 0, 22);
6386
GEN_VXFORM(vaddubs, 0, 8);
6387
GEN_VXFORM(vadduhs, 0, 9);
6388
GEN_VXFORM(vadduws, 0, 10);
6389
GEN_VXFORM(vaddsbs, 0, 12);
6390
GEN_VXFORM(vaddshs, 0, 13);
6391
GEN_VXFORM(vaddsws, 0, 14);
6392
GEN_VXFORM(vsububs, 0, 24);
6393
GEN_VXFORM(vsubuhs, 0, 25);
6394
GEN_VXFORM(vsubuws, 0, 26);
6395
GEN_VXFORM(vsubsbs, 0, 28);
6396
GEN_VXFORM(vsubshs, 0, 29);
6397
GEN_VXFORM(vsubsws, 0, 30);
6398
GEN_VXFORM(vrlb, 2, 0);
6399
GEN_VXFORM(vrlh, 2, 1);
6400
GEN_VXFORM(vrlw, 2, 2);
6401
GEN_VXFORM(vsl, 2, 7);
6402
GEN_VXFORM(vsr, 2, 11);
6403
GEN_VXFORM(vpkuhum, 7, 0);
6404
GEN_VXFORM(vpkuwum, 7, 1);
6405
GEN_VXFORM(vpkuhus, 7, 2);
6406
GEN_VXFORM(vpkuwus, 7, 3);
6407
GEN_VXFORM(vpkshus, 7, 4);
6408
GEN_VXFORM(vpkswus, 7, 5);
6409
GEN_VXFORM(vpkshss, 7, 6);
6410
GEN_VXFORM(vpkswss, 7, 7);
6411
GEN_VXFORM(vpkpx, 7, 12);
6412
GEN_VXFORM(vsum4ubs, 4, 24);
6413
GEN_VXFORM(vsum4sbs, 4, 28);
6414
GEN_VXFORM(vsum4shs, 4, 25);
6415
GEN_VXFORM(vsum2sws, 4, 26);
6416
GEN_VXFORM(vsumsws, 4, 30);
6417
GEN_VXFORM(vaddfp, 5, 0);
6418
GEN_VXFORM(vsubfp, 5, 1);
6419
GEN_VXFORM(vmaxfp, 5, 16);
6420
GEN_VXFORM(vminfp, 5, 17);
6422
#define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
6423
static void glue(gen_, name)(DisasContext *ctx) \
6425
TCGv_ptr ra, rb, rd; \
6426
if (unlikely(!ctx->altivec_enabled)) { \
6427
gen_exception(ctx, POWERPC_EXCP_VPU); \
6430
ra = gen_avr_ptr(rA(ctx->opcode)); \
6431
rb = gen_avr_ptr(rB(ctx->opcode)); \
6432
rd = gen_avr_ptr(rD(ctx->opcode)); \
6433
gen_helper_##opname (rd, ra, rb); \
6434
tcg_temp_free_ptr(ra); \
6435
tcg_temp_free_ptr(rb); \
6436
tcg_temp_free_ptr(rd); \
6439
#define GEN_VXRFORM(name, opc2, opc3) \
6440
GEN_VXRFORM1(name, name, #name, opc2, opc3) \
6441
GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
6443
GEN_VXRFORM(vcmpequb, 3, 0)
6444
GEN_VXRFORM(vcmpequh, 3, 1)
6445
GEN_VXRFORM(vcmpequw, 3, 2)
6446
GEN_VXRFORM(vcmpgtsb, 3, 12)
6447
GEN_VXRFORM(vcmpgtsh, 3, 13)
6448
GEN_VXRFORM(vcmpgtsw, 3, 14)
6449
GEN_VXRFORM(vcmpgtub, 3, 8)
6450
GEN_VXRFORM(vcmpgtuh, 3, 9)
6451
GEN_VXRFORM(vcmpgtuw, 3, 10)
6452
GEN_VXRFORM(vcmpeqfp, 3, 3)
6453
GEN_VXRFORM(vcmpgefp, 3, 7)
6454
GEN_VXRFORM(vcmpgtfp, 3, 11)
6455
GEN_VXRFORM(vcmpbfp, 3, 15)
6457
#define GEN_VXFORM_SIMM(name, opc2, opc3) \
6458
static void glue(gen_, name)(DisasContext *ctx) \
6462
if (unlikely(!ctx->altivec_enabled)) { \
6463
gen_exception(ctx, POWERPC_EXCP_VPU); \
6466
simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6467
rd = gen_avr_ptr(rD(ctx->opcode)); \
6468
gen_helper_##name (rd, simm); \
6469
tcg_temp_free_i32(simm); \
6470
tcg_temp_free_ptr(rd); \
6473
GEN_VXFORM_SIMM(vspltisb, 6, 12);
6474
GEN_VXFORM_SIMM(vspltish, 6, 13);
6475
GEN_VXFORM_SIMM(vspltisw, 6, 14);
6477
#define GEN_VXFORM_NOA(name, opc2, opc3) \
6478
static void glue(gen_, name)(DisasContext *ctx) \
6481
if (unlikely(!ctx->altivec_enabled)) { \
6482
gen_exception(ctx, POWERPC_EXCP_VPU); \
6485
rb = gen_avr_ptr(rB(ctx->opcode)); \
6486
rd = gen_avr_ptr(rD(ctx->opcode)); \
6487
gen_helper_##name (rd, rb); \
6488
tcg_temp_free_ptr(rb); \
6489
tcg_temp_free_ptr(rd); \
6492
GEN_VXFORM_NOA(vupkhsb, 7, 8);
6493
GEN_VXFORM_NOA(vupkhsh, 7, 9);
6494
GEN_VXFORM_NOA(vupklsb, 7, 10);
6495
GEN_VXFORM_NOA(vupklsh, 7, 11);
6496
GEN_VXFORM_NOA(vupkhpx, 7, 13);
6497
GEN_VXFORM_NOA(vupklpx, 7, 15);
6498
GEN_VXFORM_NOA(vrefp, 5, 4);
6499
GEN_VXFORM_NOA(vrsqrtefp, 5, 5);
6500
GEN_VXFORM_NOA(vexptefp, 5, 6);
6501
GEN_VXFORM_NOA(vlogefp, 5, 7);
6502
GEN_VXFORM_NOA(vrfim, 5, 8);
6503
GEN_VXFORM_NOA(vrfin, 5, 9);
6504
GEN_VXFORM_NOA(vrfip, 5, 10);
6505
GEN_VXFORM_NOA(vrfiz, 5, 11);
6507
#define GEN_VXFORM_SIMM(name, opc2, opc3) \
6508
static void glue(gen_, name)(DisasContext *ctx) \
6512
if (unlikely(!ctx->altivec_enabled)) { \
6513
gen_exception(ctx, POWERPC_EXCP_VPU); \
6516
simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6517
rd = gen_avr_ptr(rD(ctx->opcode)); \
6518
gen_helper_##name (rd, simm); \
6519
tcg_temp_free_i32(simm); \
6520
tcg_temp_free_ptr(rd); \
6523
#define GEN_VXFORM_UIMM(name, opc2, opc3) \
6524
static void glue(gen_, name)(DisasContext *ctx) \
6528
if (unlikely(!ctx->altivec_enabled)) { \
6529
gen_exception(ctx, POWERPC_EXCP_VPU); \
6532
uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
6533
rb = gen_avr_ptr(rB(ctx->opcode)); \
6534
rd = gen_avr_ptr(rD(ctx->opcode)); \
6535
gen_helper_##name (rd, rb, uimm); \
6536
tcg_temp_free_i32(uimm); \
6537
tcg_temp_free_ptr(rb); \
6538
tcg_temp_free_ptr(rd); \
6541
GEN_VXFORM_UIMM(vspltb, 6, 8);
6542
GEN_VXFORM_UIMM(vsplth, 6, 9);
6543
GEN_VXFORM_UIMM(vspltw, 6, 10);
6544
GEN_VXFORM_UIMM(vcfux, 5, 12);
6545
GEN_VXFORM_UIMM(vcfsx, 5, 13);
6546
GEN_VXFORM_UIMM(vctuxs, 5, 14);
6547
GEN_VXFORM_UIMM(vctsxs, 5, 15);
6549
static void gen_vsldoi(DisasContext *ctx)
6551
TCGv_ptr ra, rb, rd;
6553
if (unlikely(!ctx->altivec_enabled)) {
6554
gen_exception(ctx, POWERPC_EXCP_VPU);
6557
ra = gen_avr_ptr(rA(ctx->opcode));
6558
rb = gen_avr_ptr(rB(ctx->opcode));
6559
rd = gen_avr_ptr(rD(ctx->opcode));
6560
sh = tcg_const_i32(VSH(ctx->opcode));
6561
gen_helper_vsldoi (rd, ra, rb, sh);
6562
tcg_temp_free_ptr(ra);
6563
tcg_temp_free_ptr(rb);
6564
tcg_temp_free_ptr(rd);
6565
tcg_temp_free_i32(sh);
6568
#define GEN_VAFORM_PAIRED(name0, name1, opc2) \
6569
static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
6571
TCGv_ptr ra, rb, rc, rd; \
6572
if (unlikely(!ctx->altivec_enabled)) { \
6573
gen_exception(ctx, POWERPC_EXCP_VPU); \
6576
ra = gen_avr_ptr(rA(ctx->opcode)); \
6577
rb = gen_avr_ptr(rB(ctx->opcode)); \
6578
rc = gen_avr_ptr(rC(ctx->opcode)); \
6579
rd = gen_avr_ptr(rD(ctx->opcode)); \
6580
if (Rc(ctx->opcode)) { \
6581
gen_helper_##name1 (rd, ra, rb, rc); \
6583
gen_helper_##name0 (rd, ra, rb, rc); \
6585
tcg_temp_free_ptr(ra); \
6586
tcg_temp_free_ptr(rb); \
6587
tcg_temp_free_ptr(rc); \
6588
tcg_temp_free_ptr(rd); \
6591
GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16)
6593
static void gen_vmladduhm(DisasContext *ctx)
6595
TCGv_ptr ra, rb, rc, rd;
6596
if (unlikely(!ctx->altivec_enabled)) {
6597
gen_exception(ctx, POWERPC_EXCP_VPU);
6600
ra = gen_avr_ptr(rA(ctx->opcode));
6601
rb = gen_avr_ptr(rB(ctx->opcode));
6602
rc = gen_avr_ptr(rC(ctx->opcode));
6603
rd = gen_avr_ptr(rD(ctx->opcode));
6604
gen_helper_vmladduhm(rd, ra, rb, rc);
6605
tcg_temp_free_ptr(ra);
6606
tcg_temp_free_ptr(rb);
6607
tcg_temp_free_ptr(rc);
6608
tcg_temp_free_ptr(rd);
6611
GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18)
6612
GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19)
6613
GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20)
6614
GEN_VAFORM_PAIRED(vsel, vperm, 21)
6615
GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23)
6617
/*** SPE extension ***/
6618
/* Register moves */
6621
static inline void gen_evmra(DisasContext *ctx)
6624
if (unlikely(!ctx->spe_enabled)) {
6625
gen_exception(ctx, POWERPC_EXCP_APU);
6629
#if defined(TARGET_PPC64)
6631
tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6634
tcg_gen_st_i64(cpu_gpr[rA(ctx->opcode)],
6636
offsetof(CPUState, spe_acc));
6638
TCGv_i64 tmp = tcg_temp_new_i64();
6640
/* tmp := rA_lo + rA_hi << 32 */
6641
tcg_gen_concat_i32_i64(tmp, cpu_gpr[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6643
/* spe_acc := tmp */
6644
tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
6645
tcg_temp_free_i64(tmp);
6648
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6649
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6653
static inline void gen_load_gpr64(TCGv_i64 t, int reg)
6655
#if defined(TARGET_PPC64)
6656
tcg_gen_mov_i64(t, cpu_gpr[reg]);
6658
tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]);
6662
static inline void gen_store_gpr64(int reg, TCGv_i64 t)
6664
#if defined(TARGET_PPC64)
6665
tcg_gen_mov_i64(cpu_gpr[reg], t);
6667
TCGv_i64 tmp = tcg_temp_new_i64();
6668
tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
6669
tcg_gen_shri_i64(tmp, t, 32);
6670
tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
6671
tcg_temp_free_i64(tmp);
6675
#define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
6676
static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
6678
if (Rc(ctx->opcode)) \
6684
/* Handler for undefined SPE opcodes */
6685
static inline void gen_speundef(DisasContext *ctx)
6687
gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6691
#if defined(TARGET_PPC64)
6692
#define GEN_SPEOP_LOGIC2(name, tcg_op) \
6693
static inline void gen_##name(DisasContext *ctx) \
6695
if (unlikely(!ctx->spe_enabled)) { \
6696
gen_exception(ctx, POWERPC_EXCP_APU); \
6699
tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6700
cpu_gpr[rB(ctx->opcode)]); \
6703
#define GEN_SPEOP_LOGIC2(name, tcg_op) \
6704
static inline void gen_##name(DisasContext *ctx) \
6706
if (unlikely(!ctx->spe_enabled)) { \
6707
gen_exception(ctx, POWERPC_EXCP_APU); \
6710
tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6711
cpu_gpr[rB(ctx->opcode)]); \
6712
tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6713
cpu_gprh[rB(ctx->opcode)]); \
6717
GEN_SPEOP_LOGIC2(evand, tcg_gen_and_tl);
6718
GEN_SPEOP_LOGIC2(evandc, tcg_gen_andc_tl);
6719
GEN_SPEOP_LOGIC2(evxor, tcg_gen_xor_tl);
6720
GEN_SPEOP_LOGIC2(evor, tcg_gen_or_tl);
6721
GEN_SPEOP_LOGIC2(evnor, tcg_gen_nor_tl);
6722
GEN_SPEOP_LOGIC2(eveqv, tcg_gen_eqv_tl);
6723
GEN_SPEOP_LOGIC2(evorc, tcg_gen_orc_tl);
6724
GEN_SPEOP_LOGIC2(evnand, tcg_gen_nand_tl);
6726
/* SPE logic immediate */
6727
#if defined(TARGET_PPC64)
6728
#define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6729
static inline void gen_##name(DisasContext *ctx) \
6731
if (unlikely(!ctx->spe_enabled)) { \
6732
gen_exception(ctx, POWERPC_EXCP_APU); \
6735
TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6736
TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6737
TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6738
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6739
tcg_opi(t0, t0, rB(ctx->opcode)); \
6740
tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6741
tcg_gen_trunc_i64_i32(t1, t2); \
6742
tcg_temp_free_i64(t2); \
6743
tcg_opi(t1, t1, rB(ctx->opcode)); \
6744
tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6745
tcg_temp_free_i32(t0); \
6746
tcg_temp_free_i32(t1); \
6749
#define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6750
static inline void gen_##name(DisasContext *ctx) \
6752
if (unlikely(!ctx->spe_enabled)) { \
6753
gen_exception(ctx, POWERPC_EXCP_APU); \
6756
tcg_opi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6758
tcg_opi(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6762
GEN_SPEOP_TCG_LOGIC_IMM2(evslwi, tcg_gen_shli_i32);
6763
GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu, tcg_gen_shri_i32);
6764
GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis, tcg_gen_sari_i32);
6765
GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi, tcg_gen_rotli_i32);
6767
/* SPE arithmetic */
6768
#if defined(TARGET_PPC64)
6769
#define GEN_SPEOP_ARITH1(name, tcg_op) \
6770
static inline void gen_##name(DisasContext *ctx) \
6772
if (unlikely(!ctx->spe_enabled)) { \
6773
gen_exception(ctx, POWERPC_EXCP_APU); \
6776
TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6777
TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6778
TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6779
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6781
tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6782
tcg_gen_trunc_i64_i32(t1, t2); \
6783
tcg_temp_free_i64(t2); \
6785
tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6786
tcg_temp_free_i32(t0); \
6787
tcg_temp_free_i32(t1); \
6790
#define GEN_SPEOP_ARITH1(name, tcg_op) \
6791
static inline void gen_##name(DisasContext *ctx) \
6793
if (unlikely(!ctx->spe_enabled)) { \
6794
gen_exception(ctx, POWERPC_EXCP_APU); \
6797
tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \
6798
tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \
6802
static inline void gen_op_evabs(TCGv_i32 ret, TCGv_i32 arg1)
6804
int l1 = gen_new_label();
6805
int l2 = gen_new_label();
6807
tcg_gen_brcondi_i32(TCG_COND_GE, arg1, 0, l1);
6808
tcg_gen_neg_i32(ret, arg1);
6811
tcg_gen_mov_i32(ret, arg1);
6814
GEN_SPEOP_ARITH1(evabs, gen_op_evabs);
6815
GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32);
6816
GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32);
6817
GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32);
6818
static inline void gen_op_evrndw(TCGv_i32 ret, TCGv_i32 arg1)
6820
tcg_gen_addi_i32(ret, arg1, 0x8000);
6821
tcg_gen_ext16u_i32(ret, ret);
6823
GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw);
6824
GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32);
6825
GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32);
6827
#if defined(TARGET_PPC64)
6828
#define GEN_SPEOP_ARITH2(name, tcg_op) \
6829
static inline void gen_##name(DisasContext *ctx) \
6831
if (unlikely(!ctx->spe_enabled)) { \
6832
gen_exception(ctx, POWERPC_EXCP_APU); \
6835
TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6836
TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6837
TCGv_i32 t2 = tcg_temp_local_new_i32(); \
6838
TCGv_i64 t3 = tcg_temp_local_new_i64(); \
6839
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6840
tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \
6841
tcg_op(t0, t0, t2); \
6842
tcg_gen_shri_i64(t3, cpu_gpr[rA(ctx->opcode)], 32); \
6843
tcg_gen_trunc_i64_i32(t1, t3); \
6844
tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \
6845
tcg_gen_trunc_i64_i32(t2, t3); \
6846
tcg_temp_free_i64(t3); \
6847
tcg_op(t1, t1, t2); \
6848
tcg_temp_free_i32(t2); \
6849
tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6850
tcg_temp_free_i32(t0); \
6851
tcg_temp_free_i32(t1); \
6854
#define GEN_SPEOP_ARITH2(name, tcg_op) \
6855
static inline void gen_##name(DisasContext *ctx) \
6857
if (unlikely(!ctx->spe_enabled)) { \
6858
gen_exception(ctx, POWERPC_EXCP_APU); \
6861
tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6862
cpu_gpr[rB(ctx->opcode)]); \
6863
tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6864
cpu_gprh[rB(ctx->opcode)]); \
6868
static inline void gen_op_evsrwu(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6873
l1 = gen_new_label();
6874
l2 = gen_new_label();
6875
t0 = tcg_temp_local_new_i32();
6876
/* No error here: 6 bits are used */
6877
tcg_gen_andi_i32(t0, arg2, 0x3F);
6878
tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6879
tcg_gen_shr_i32(ret, arg1, t0);
6882
tcg_gen_movi_i32(ret, 0);
6884
tcg_temp_free_i32(t0);
6886
GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu);
6887
static inline void gen_op_evsrws(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6892
l1 = gen_new_label();
6893
l2 = gen_new_label();
6894
t0 = tcg_temp_local_new_i32();
6895
/* No error here: 6 bits are used */
6896
tcg_gen_andi_i32(t0, arg2, 0x3F);
6897
tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6898
tcg_gen_sar_i32(ret, arg1, t0);
6901
tcg_gen_movi_i32(ret, 0);
6903
tcg_temp_free_i32(t0);
6905
GEN_SPEOP_ARITH2(evsrws, gen_op_evsrws);
6906
static inline void gen_op_evslw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6911
l1 = gen_new_label();
6912
l2 = gen_new_label();
6913
t0 = tcg_temp_local_new_i32();
6914
/* No error here: 6 bits are used */
6915
tcg_gen_andi_i32(t0, arg2, 0x3F);
6916
tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6917
tcg_gen_shl_i32(ret, arg1, t0);
6920
tcg_gen_movi_i32(ret, 0);
6922
tcg_temp_free_i32(t0);
6924
GEN_SPEOP_ARITH2(evslw, gen_op_evslw);
6925
static inline void gen_op_evrlw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6927
TCGv_i32 t0 = tcg_temp_new_i32();
6928
tcg_gen_andi_i32(t0, arg2, 0x1F);
6929
tcg_gen_rotl_i32(ret, arg1, t0);
6930
tcg_temp_free_i32(t0);
6932
GEN_SPEOP_ARITH2(evrlw, gen_op_evrlw);
6933
static inline void gen_evmergehi(DisasContext *ctx)
6935
if (unlikely(!ctx->spe_enabled)) {
6936
gen_exception(ctx, POWERPC_EXCP_APU);
6939
#if defined(TARGET_PPC64)
6940
TCGv t0 = tcg_temp_new();
6941
TCGv t1 = tcg_temp_new();
6942
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
6943
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
6944
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6948
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
6949
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6952
GEN_SPEOP_ARITH2(evaddw, tcg_gen_add_i32);
6953
static inline void gen_op_evsubf(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6955
tcg_gen_sub_i32(ret, arg2, arg1);
6957
GEN_SPEOP_ARITH2(evsubfw, gen_op_evsubf);
6959
/* SPE arithmetic immediate */
6960
#if defined(TARGET_PPC64)
6961
#define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6962
static inline void gen_##name(DisasContext *ctx) \
6964
if (unlikely(!ctx->spe_enabled)) { \
6965
gen_exception(ctx, POWERPC_EXCP_APU); \
6968
TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6969
TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6970
TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6971
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
6972
tcg_op(t0, t0, rA(ctx->opcode)); \
6973
tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
6974
tcg_gen_trunc_i64_i32(t1, t2); \
6975
tcg_temp_free_i64(t2); \
6976
tcg_op(t1, t1, rA(ctx->opcode)); \
6977
tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6978
tcg_temp_free_i32(t0); \
6979
tcg_temp_free_i32(t1); \
6982
#define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6983
static inline void gen_##name(DisasContext *ctx) \
6985
if (unlikely(!ctx->spe_enabled)) { \
6986
gen_exception(ctx, POWERPC_EXCP_APU); \
6989
tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
6991
tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)], \
6995
GEN_SPEOP_ARITH_IMM2(evaddiw, tcg_gen_addi_i32);
6996
GEN_SPEOP_ARITH_IMM2(evsubifw, tcg_gen_subi_i32);
6998
/* SPE comparison */
6999
#if defined(TARGET_PPC64)
7000
#define GEN_SPEOP_COMP(name, tcg_cond) \
7001
static inline void gen_##name(DisasContext *ctx) \
7003
if (unlikely(!ctx->spe_enabled)) { \
7004
gen_exception(ctx, POWERPC_EXCP_APU); \
7007
int l1 = gen_new_label(); \
7008
int l2 = gen_new_label(); \
7009
int l3 = gen_new_label(); \
7010
int l4 = gen_new_label(); \
7011
TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7012
TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7013
TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7014
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7015
tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7016
tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \
7017
tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \
7019
gen_set_label(l1); \
7020
tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7021
CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7022
gen_set_label(l2); \
7023
tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
7024
tcg_gen_trunc_i64_i32(t0, t2); \
7025
tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
7026
tcg_gen_trunc_i64_i32(t1, t2); \
7027
tcg_temp_free_i64(t2); \
7028
tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \
7029
tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7030
~(CRF_CH | CRF_CH_AND_CL)); \
7032
gen_set_label(l3); \
7033
tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7034
CRF_CH | CRF_CH_OR_CL); \
7035
gen_set_label(l4); \
7036
tcg_temp_free_i32(t0); \
7037
tcg_temp_free_i32(t1); \
7040
#define GEN_SPEOP_COMP(name, tcg_cond) \
7041
static inline void gen_##name(DisasContext *ctx) \
7043
if (unlikely(!ctx->spe_enabled)) { \
7044
gen_exception(ctx, POWERPC_EXCP_APU); \
7047
int l1 = gen_new_label(); \
7048
int l2 = gen_new_label(); \
7049
int l3 = gen_new_label(); \
7050
int l4 = gen_new_label(); \
7052
tcg_gen_brcond_i32(tcg_cond, cpu_gpr[rA(ctx->opcode)], \
7053
cpu_gpr[rB(ctx->opcode)], l1); \
7054
tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \
7056
gen_set_label(l1); \
7057
tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7058
CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7059
gen_set_label(l2); \
7060
tcg_gen_brcond_i32(tcg_cond, cpu_gprh[rA(ctx->opcode)], \
7061
cpu_gprh[rB(ctx->opcode)], l3); \
7062
tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7063
~(CRF_CH | CRF_CH_AND_CL)); \
7065
gen_set_label(l3); \
7066
tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7067
CRF_CH | CRF_CH_OR_CL); \
7068
gen_set_label(l4); \
7071
GEN_SPEOP_COMP(evcmpgtu, TCG_COND_GTU);
7072
GEN_SPEOP_COMP(evcmpgts, TCG_COND_GT);
7073
GEN_SPEOP_COMP(evcmpltu, TCG_COND_LTU);
7074
GEN_SPEOP_COMP(evcmplts, TCG_COND_LT);
7075
GEN_SPEOP_COMP(evcmpeq, TCG_COND_EQ);
7078
static inline void gen_brinc(DisasContext *ctx)
7080
/* Note: brinc is usable even if SPE is disabled */
7081
gen_helper_brinc(cpu_gpr[rD(ctx->opcode)],
7082
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7084
static inline void gen_evmergelo(DisasContext *ctx)
7086
if (unlikely(!ctx->spe_enabled)) {
7087
gen_exception(ctx, POWERPC_EXCP_APU);
7090
#if defined(TARGET_PPC64)
7091
TCGv t0 = tcg_temp_new();
7092
TCGv t1 = tcg_temp_new();
7093
tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7094
tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7095
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7099
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7100
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7103
static inline void gen_evmergehilo(DisasContext *ctx)
7105
if (unlikely(!ctx->spe_enabled)) {
7106
gen_exception(ctx, POWERPC_EXCP_APU);
7109
#if defined(TARGET_PPC64)
7110
TCGv t0 = tcg_temp_new();
7111
TCGv t1 = tcg_temp_new();
7112
tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7113
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
7114
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7118
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7119
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7122
static inline void gen_evmergelohi(DisasContext *ctx)
7124
if (unlikely(!ctx->spe_enabled)) {
7125
gen_exception(ctx, POWERPC_EXCP_APU);
7128
#if defined(TARGET_PPC64)
7129
TCGv t0 = tcg_temp_new();
7130
TCGv t1 = tcg_temp_new();
7131
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
7132
tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7133
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7137
if (rD(ctx->opcode) == rA(ctx->opcode)) {
7138
TCGv_i32 tmp = tcg_temp_new_i32();
7139
tcg_gen_mov_i32(tmp, cpu_gpr[rA(ctx->opcode)]);
7140
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7141
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], tmp);
7142
tcg_temp_free_i32(tmp);
7144
tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7145
tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7149
static inline void gen_evsplati(DisasContext *ctx)
7151
uint64_t imm = ((int32_t)(rA(ctx->opcode) << 27)) >> 27;
7153
#if defined(TARGET_PPC64)
7154
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7156
tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7157
tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7160
static inline void gen_evsplatfi(DisasContext *ctx)
7162
uint64_t imm = rA(ctx->opcode) << 27;
7164
#if defined(TARGET_PPC64)
7165
tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7167
tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7168
tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7172
static inline void gen_evsel(DisasContext *ctx)
7174
int l1 = gen_new_label();
7175
int l2 = gen_new_label();
7176
int l3 = gen_new_label();
7177
int l4 = gen_new_label();
7178
TCGv_i32 t0 = tcg_temp_local_new_i32();
7179
#if defined(TARGET_PPC64)
7180
TCGv t1 = tcg_temp_local_new();
7181
TCGv t2 = tcg_temp_local_new();
7183
tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 3);
7184
tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
7185
#if defined(TARGET_PPC64)
7186
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7188
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7192
#if defined(TARGET_PPC64)
7193
tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7195
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7198
tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 2);
7199
tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l3);
7200
#if defined(TARGET_PPC64)
7201
tcg_gen_ext32u_tl(t2, cpu_gpr[rA(ctx->opcode)]);
7203
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7207
#if defined(TARGET_PPC64)
7208
tcg_gen_ext32u_tl(t2, cpu_gpr[rB(ctx->opcode)]);
7210
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7213
tcg_temp_free_i32(t0);
7214
#if defined(TARGET_PPC64)
7215
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t1, t2);
7221
static void gen_evsel0(DisasContext *ctx)
7226
static void gen_evsel1(DisasContext *ctx)
7231
static void gen_evsel2(DisasContext *ctx)
7236
static void gen_evsel3(DisasContext *ctx)
7243
static inline void gen_evmwumi(DisasContext *ctx)
7247
if (unlikely(!ctx->spe_enabled)) {
7248
gen_exception(ctx, POWERPC_EXCP_APU);
7252
t0 = tcg_temp_new_i64();
7253
t1 = tcg_temp_new_i64();
7255
/* t0 := rA; t1 := rB */
7256
#if defined(TARGET_PPC64)
7257
tcg_gen_ext32u_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7258
tcg_gen_ext32u_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7260
tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7261
tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7264
tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7266
gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7268
tcg_temp_free_i64(t0);
7269
tcg_temp_free_i64(t1);
7272
static inline void gen_evmwumia(DisasContext *ctx)
7276
if (unlikely(!ctx->spe_enabled)) {
7277
gen_exception(ctx, POWERPC_EXCP_APU);
7281
gen_evmwumi(ctx); /* rD := rA * rB */
7283
tmp = tcg_temp_new_i64();
7286
gen_load_gpr64(tmp, rD(ctx->opcode));
7287
tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
7288
tcg_temp_free_i64(tmp);
7291
static inline void gen_evmwumiaa(DisasContext *ctx)
7296
if (unlikely(!ctx->spe_enabled)) {
7297
gen_exception(ctx, POWERPC_EXCP_APU);
7301
gen_evmwumi(ctx); /* rD := rA * rB */
7303
acc = tcg_temp_new_i64();
7304
tmp = tcg_temp_new_i64();
7307
gen_load_gpr64(tmp, rD(ctx->opcode));
7310
tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7312
/* acc := tmp + acc */
7313
tcg_gen_add_i64(acc, acc, tmp);
7316
tcg_gen_st_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7319
gen_store_gpr64(rD(ctx->opcode), acc);
7321
tcg_temp_free_i64(acc);
7322
tcg_temp_free_i64(tmp);
7325
static inline void gen_evmwsmi(DisasContext *ctx)
7329
if (unlikely(!ctx->spe_enabled)) {
7330
gen_exception(ctx, POWERPC_EXCP_APU);
7334
t0 = tcg_temp_new_i64();
7335
t1 = tcg_temp_new_i64();
7337
/* t0 := rA; t1 := rB */
7338
#if defined(TARGET_PPC64)
7339
tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7340
tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7342
tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7343
tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7346
tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7348
gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7350
tcg_temp_free_i64(t0);
7351
tcg_temp_free_i64(t1);
7354
static inline void gen_evmwsmia(DisasContext *ctx)
7358
gen_evmwsmi(ctx); /* rD := rA * rB */
7360
tmp = tcg_temp_new_i64();
7363
gen_load_gpr64(tmp, rD(ctx->opcode));
7364
tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
7366
tcg_temp_free_i64(tmp);
7369
static inline void gen_evmwsmiaa(DisasContext *ctx)
7371
TCGv_i64 acc = tcg_temp_new_i64();
7372
TCGv_i64 tmp = tcg_temp_new_i64();
7374
gen_evmwsmi(ctx); /* rD := rA * rB */
7376
acc = tcg_temp_new_i64();
7377
tmp = tcg_temp_new_i64();
7380
gen_load_gpr64(tmp, rD(ctx->opcode));
7383
tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7385
/* acc := tmp + acc */
7386
tcg_gen_add_i64(acc, acc, tmp);
7389
tcg_gen_st_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7392
gen_store_gpr64(rD(ctx->opcode), acc);
7394
tcg_temp_free_i64(acc);
7395
tcg_temp_free_i64(tmp);
7398
GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE); ////
7399
GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE);
7400
GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE); ////
7401
GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE);
7402
GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE); ////
7403
GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE); ////
7404
GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE); ////
7405
GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE); //
7406
GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, PPC_SPE);
7407
GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE); ////
7408
GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE); ////
7409
GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE); ////
7410
GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE); ////
7411
GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE);
7412
GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE);
7413
GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE);
7414
GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE); ////
7415
GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE); ////
7416
GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE); ////
7417
GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE);
7418
GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE); ////
7419
GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE);
7420
GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE); //
7421
GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE);
7422
GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE); ////
7423
GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE); ////
7424
GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); ////
7425
GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); ////
7426
GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); ////
7428
/* SPE load and stores */
7429
static inline void gen_addr_spe_imm_index(DisasContext *ctx, TCGv EA, int sh)
7431
target_ulong uimm = rB(ctx->opcode);
7433
if (rA(ctx->opcode) == 0) {
7434
tcg_gen_movi_tl(EA, uimm << sh);
7436
tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], uimm << sh);
7437
#if defined(TARGET_PPC64)
7438
if (!ctx->sf_mode) {
7439
tcg_gen_ext32u_tl(EA, EA);
7445
static inline void gen_op_evldd(DisasContext *ctx, TCGv addr)
7447
#if defined(TARGET_PPC64)
7448
gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7450
TCGv_i64 t0 = tcg_temp_new_i64();
7451
gen_qemu_ld64(ctx, t0, addr);
7452
tcg_gen_trunc_i64_i32(cpu_gpr[rD(ctx->opcode)], t0);
7453
tcg_gen_shri_i64(t0, t0, 32);
7454
tcg_gen_trunc_i64_i32(cpu_gprh[rD(ctx->opcode)], t0);
7455
tcg_temp_free_i64(t0);
7459
static inline void gen_op_evldw(DisasContext *ctx, TCGv addr)
7461
#if defined(TARGET_PPC64)
7462
TCGv t0 = tcg_temp_new();
7463
gen_qemu_ld32u(ctx, t0, addr);
7464
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7465
gen_addr_add(ctx, addr, addr, 4);
7466
gen_qemu_ld32u(ctx, t0, addr);
7467
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7470
gen_qemu_ld32u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7471
gen_addr_add(ctx, addr, addr, 4);
7472
gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7476
static inline void gen_op_evldh(DisasContext *ctx, TCGv addr)
7478
TCGv t0 = tcg_temp_new();
7479
#if defined(TARGET_PPC64)
7480
gen_qemu_ld16u(ctx, t0, addr);
7481
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7482
gen_addr_add(ctx, addr, addr, 2);
7483
gen_qemu_ld16u(ctx, t0, addr);
7484
tcg_gen_shli_tl(t0, t0, 32);
7485
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7486
gen_addr_add(ctx, addr, addr, 2);
7487
gen_qemu_ld16u(ctx, t0, addr);
7488
tcg_gen_shli_tl(t0, t0, 16);
7489
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7490
gen_addr_add(ctx, addr, addr, 2);
7491
gen_qemu_ld16u(ctx, t0, addr);
7492
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7494
gen_qemu_ld16u(ctx, t0, addr);
7495
tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7496
gen_addr_add(ctx, addr, addr, 2);
7497
gen_qemu_ld16u(ctx, t0, addr);
7498
tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7499
gen_addr_add(ctx, addr, addr, 2);
7500
gen_qemu_ld16u(ctx, t0, addr);
7501
tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7502
gen_addr_add(ctx, addr, addr, 2);
7503
gen_qemu_ld16u(ctx, t0, addr);
7504
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7509
static inline void gen_op_evlhhesplat(DisasContext *ctx, TCGv addr)
7511
TCGv t0 = tcg_temp_new();
7512
gen_qemu_ld16u(ctx, t0, addr);
7513
#if defined(TARGET_PPC64)
7514
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7515
tcg_gen_shli_tl(t0, t0, 16);
7516
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7518
tcg_gen_shli_tl(t0, t0, 16);
7519
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7520
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7525
static inline void gen_op_evlhhousplat(DisasContext *ctx, TCGv addr)
7527
TCGv t0 = tcg_temp_new();
7528
gen_qemu_ld16u(ctx, t0, addr);
7529
#if defined(TARGET_PPC64)
7530
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7531
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7533
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7534
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7539
static inline void gen_op_evlhhossplat(DisasContext *ctx, TCGv addr)
7541
TCGv t0 = tcg_temp_new();
7542
gen_qemu_ld16s(ctx, t0, addr);
7543
#if defined(TARGET_PPC64)
7544
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7545
tcg_gen_ext32u_tl(t0, t0);
7546
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7548
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7549
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7554
static inline void gen_op_evlwhe(DisasContext *ctx, TCGv addr)
7556
TCGv t0 = tcg_temp_new();
7557
#if defined(TARGET_PPC64)
7558
gen_qemu_ld16u(ctx, t0, addr);
7559
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7560
gen_addr_add(ctx, addr, addr, 2);
7561
gen_qemu_ld16u(ctx, t0, addr);
7562
tcg_gen_shli_tl(t0, t0, 16);
7563
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7565
gen_qemu_ld16u(ctx, t0, addr);
7566
tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7567
gen_addr_add(ctx, addr, addr, 2);
7568
gen_qemu_ld16u(ctx, t0, addr);
7569
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
7574
static inline void gen_op_evlwhou(DisasContext *ctx, TCGv addr)
7576
#if defined(TARGET_PPC64)
7577
TCGv t0 = tcg_temp_new();
7578
gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7579
gen_addr_add(ctx, addr, addr, 2);
7580
gen_qemu_ld16u(ctx, t0, addr);
7581
tcg_gen_shli_tl(t0, t0, 32);
7582
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7585
gen_qemu_ld16u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7586
gen_addr_add(ctx, addr, addr, 2);
7587
gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7591
static inline void gen_op_evlwhos(DisasContext *ctx, TCGv addr)
7593
#if defined(TARGET_PPC64)
7594
TCGv t0 = tcg_temp_new();
7595
gen_qemu_ld16s(ctx, t0, addr);
7596
tcg_gen_ext32u_tl(cpu_gpr[rD(ctx->opcode)], t0);
7597
gen_addr_add(ctx, addr, addr, 2);
7598
gen_qemu_ld16s(ctx, t0, addr);
7599
tcg_gen_shli_tl(t0, t0, 32);
7600
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7603
gen_qemu_ld16s(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7604
gen_addr_add(ctx, addr, addr, 2);
7605
gen_qemu_ld16s(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7609
static inline void gen_op_evlwwsplat(DisasContext *ctx, TCGv addr)
7611
TCGv t0 = tcg_temp_new();
7612
gen_qemu_ld32u(ctx, t0, addr);
7613
#if defined(TARGET_PPC64)
7614
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7615
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7617
tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7618
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7623
static inline void gen_op_evlwhsplat(DisasContext *ctx, TCGv addr)
7625
TCGv t0 = tcg_temp_new();
7626
#if defined(TARGET_PPC64)
7627
gen_qemu_ld16u(ctx, t0, addr);
7628
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7629
tcg_gen_shli_tl(t0, t0, 32);
7630
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7631
gen_addr_add(ctx, addr, addr, 2);
7632
gen_qemu_ld16u(ctx, t0, addr);
7633
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7634
tcg_gen_shli_tl(t0, t0, 16);
7635
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7637
gen_qemu_ld16u(ctx, t0, addr);
7638
tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7639
tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7640
gen_addr_add(ctx, addr, addr, 2);
7641
gen_qemu_ld16u(ctx, t0, addr);
7642
tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
7643
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7648
static inline void gen_op_evstdd(DisasContext *ctx, TCGv addr)
7650
#if defined(TARGET_PPC64)
7651
gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7653
TCGv_i64 t0 = tcg_temp_new_i64();
7654
tcg_gen_concat_i32_i64(t0, cpu_gpr[rS(ctx->opcode)], cpu_gprh[rS(ctx->opcode)]);
7655
gen_qemu_st64(ctx, t0, addr);
7656
tcg_temp_free_i64(t0);
7660
static inline void gen_op_evstdw(DisasContext *ctx, TCGv addr)
7662
#if defined(TARGET_PPC64)
7663
TCGv t0 = tcg_temp_new();
7664
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7665
gen_qemu_st32(ctx, t0, addr);
7668
gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7670
gen_addr_add(ctx, addr, addr, 4);
7671
gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7674
static inline void gen_op_evstdh(DisasContext *ctx, TCGv addr)
7676
TCGv t0 = tcg_temp_new();
7677
#if defined(TARGET_PPC64)
7678
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
7680
tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
7682
gen_qemu_st16(ctx, t0, addr);
7683
gen_addr_add(ctx, addr, addr, 2);
7684
#if defined(TARGET_PPC64)
7685
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7686
gen_qemu_st16(ctx, t0, addr);
7688
gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7690
gen_addr_add(ctx, addr, addr, 2);
7691
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7692
gen_qemu_st16(ctx, t0, addr);
7694
gen_addr_add(ctx, addr, addr, 2);
7695
gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7698
static inline void gen_op_evstwhe(DisasContext *ctx, TCGv addr)
7700
TCGv t0 = tcg_temp_new();
7701
#if defined(TARGET_PPC64)
7702
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
7704
tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
7706
gen_qemu_st16(ctx, t0, addr);
7707
gen_addr_add(ctx, addr, addr, 2);
7708
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7709
gen_qemu_st16(ctx, t0, addr);
7713
static inline void gen_op_evstwho(DisasContext *ctx, TCGv addr)
7715
#if defined(TARGET_PPC64)
7716
TCGv t0 = tcg_temp_new();
7717
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7718
gen_qemu_st16(ctx, t0, addr);
7721
gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7723
gen_addr_add(ctx, addr, addr, 2);
7724
gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7727
static inline void gen_op_evstwwe(DisasContext *ctx, TCGv addr)
7729
#if defined(TARGET_PPC64)
7730
TCGv t0 = tcg_temp_new();
7731
tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7732
gen_qemu_st32(ctx, t0, addr);
7735
gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7739
static inline void gen_op_evstwwo(DisasContext *ctx, TCGv addr)
7741
gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7744
#define GEN_SPEOP_LDST(name, opc2, sh) \
7745
static void glue(gen_, name)(DisasContext *ctx) \
7748
if (unlikely(!ctx->spe_enabled)) { \
7749
gen_exception(ctx, POWERPC_EXCP_APU); \
7752
gen_set_access_type(ctx, ACCESS_INT); \
7753
t0 = tcg_temp_new(); \
7754
if (Rc(ctx->opcode)) { \
7755
gen_addr_spe_imm_index(ctx, t0, sh); \
7757
gen_addr_reg_index(ctx, t0); \
7759
gen_op_##name(ctx, t0); \
7760
tcg_temp_free(t0); \
7763
GEN_SPEOP_LDST(evldd, 0x00, 3);
7764
GEN_SPEOP_LDST(evldw, 0x01, 3);
7765
GEN_SPEOP_LDST(evldh, 0x02, 3);
7766
GEN_SPEOP_LDST(evlhhesplat, 0x04, 1);
7767
GEN_SPEOP_LDST(evlhhousplat, 0x06, 1);
7768
GEN_SPEOP_LDST(evlhhossplat, 0x07, 1);
7769
GEN_SPEOP_LDST(evlwhe, 0x08, 2);
7770
GEN_SPEOP_LDST(evlwhou, 0x0A, 2);
7771
GEN_SPEOP_LDST(evlwhos, 0x0B, 2);
7772
GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2);
7773
GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2);
7775
GEN_SPEOP_LDST(evstdd, 0x10, 3);
7776
GEN_SPEOP_LDST(evstdw, 0x11, 3);
7777
GEN_SPEOP_LDST(evstdh, 0x12, 3);
7778
GEN_SPEOP_LDST(evstwhe, 0x18, 2);
7779
GEN_SPEOP_LDST(evstwho, 0x1A, 2);
7780
GEN_SPEOP_LDST(evstwwe, 0x1C, 2);
7781
GEN_SPEOP_LDST(evstwwo, 0x1E, 2);
7783
/* Multiply and add - TODO */
7785
GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0x00000000, PPC_SPE);
7786
GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0x00000000, PPC_SPE);
7787
GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, PPC_SPE);
7788
GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0x00000000, PPC_SPE);
7789
GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, PPC_SPE);
7790
GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0x00000000, PPC_SPE);
7791
GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0x00000000, PPC_SPE);
7792
GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0x00000000, PPC_SPE);
7793
GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, PPC_SPE);
7794
GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0x00000000, PPC_SPE);
7795
GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, PPC_SPE);
7796
GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0x00000000, PPC_SPE);
7798
GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0x00000000, PPC_SPE);
7799
GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, PPC_SPE);
7800
GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, PPC_SPE);
7801
GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0x00000000, PPC_SPE);
7802
GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0x00000000, PPC_SPE);
7803
GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0x00000000, PPC_SPE);
7804
GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0x00000000, PPC_SPE);
7805
GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, PPC_SPE);
7806
GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, PPC_SPE);
7807
GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0x00000000, PPC_SPE);
7808
GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0x00000000, PPC_SPE);
7809
GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0x00000000, PPC_SPE);
7811
GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, PPC_SPE);
7812
GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, PPC_SPE);
7813
GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, PPC_SPE);
7814
GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, PPC_SPE);
7815
GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, PPC_SPE);
7817
GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, PPC_SPE);
7818
GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0x00000000, PPC_SPE);
7819
GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, PPC_SPE);
7820
GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0x00000000, PPC_SPE);
7821
GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, PPC_SPE);
7822
GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0x00000000, PPC_SPE);
7823
GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, PPC_SPE);
7824
GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0x00000000, PPC_SPE);
7825
GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, PPC_SPE);
7826
GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0x00000000, PPC_SPE);
7827
GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, PPC_SPE);
7828
GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0x00000000, PPC_SPE);
7830
GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, PPC_SPE);
7831
GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, PPC_SPE);
7832
GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0x00000000, PPC_SPE);
7833
GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0x00000000, PPC_SPE);
7835
GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, PPC_SPE);
7836
GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0x00000000, PPC_SPE);
7837
GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, PPC_SPE);
7838
GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0x00000000, PPC_SPE);
7839
GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, PPC_SPE);
7840
GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0x00000000, PPC_SPE);
7841
GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, PPC_SPE);
7842
GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0x00000000, PPC_SPE);
7843
GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, PPC_SPE);
7844
GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0x00000000, PPC_SPE);
7845
GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, PPC_SPE);
7846
GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0x00000000, PPC_SPE);
7848
GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, PPC_SPE);
7849
GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, PPC_SPE);
7850
GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0x00000000, PPC_SPE);
7851
GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, PPC_SPE);
7852
GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE);
7855
/*** SPE floating-point extension ***/
7856
#if defined(TARGET_PPC64)
7857
#define GEN_SPEFPUOP_CONV_32_32(name) \
7858
static inline void gen_##name(DisasContext *ctx) \
7862
t0 = tcg_temp_new_i32(); \
7863
tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7864
gen_helper_##name(t0, t0); \
7865
t1 = tcg_temp_new(); \
7866
tcg_gen_extu_i32_tl(t1, t0); \
7867
tcg_temp_free_i32(t0); \
7868
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7869
0xFFFFFFFF00000000ULL); \
7870
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7871
tcg_temp_free(t1); \
7873
#define GEN_SPEFPUOP_CONV_32_64(name) \
7874
static inline void gen_##name(DisasContext *ctx) \
7878
t0 = tcg_temp_new_i32(); \
7879
gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
7880
t1 = tcg_temp_new(); \
7881
tcg_gen_extu_i32_tl(t1, t0); \
7882
tcg_temp_free_i32(t0); \
7883
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7884
0xFFFFFFFF00000000ULL); \
7885
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7886
tcg_temp_free(t1); \
7888
#define GEN_SPEFPUOP_CONV_64_32(name) \
7889
static inline void gen_##name(DisasContext *ctx) \
7891
TCGv_i32 t0 = tcg_temp_new_i32(); \
7892
tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7893
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
7894
tcg_temp_free_i32(t0); \
7896
#define GEN_SPEFPUOP_CONV_64_64(name) \
7897
static inline void gen_##name(DisasContext *ctx) \
7899
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7901
#define GEN_SPEFPUOP_ARITH2_32_32(name) \
7902
static inline void gen_##name(DisasContext *ctx) \
7906
if (unlikely(!ctx->spe_enabled)) { \
7907
gen_exception(ctx, POWERPC_EXCP_APU); \
7910
t0 = tcg_temp_new_i32(); \
7911
t1 = tcg_temp_new_i32(); \
7912
tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7913
tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7914
gen_helper_##name(t0, t0, t1); \
7915
tcg_temp_free_i32(t1); \
7916
t2 = tcg_temp_new(); \
7917
tcg_gen_extu_i32_tl(t2, t0); \
7918
tcg_temp_free_i32(t0); \
7919
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7920
0xFFFFFFFF00000000ULL); \
7921
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t2); \
7922
tcg_temp_free(t2); \
7924
#define GEN_SPEFPUOP_ARITH2_64_64(name) \
7925
static inline void gen_##name(DisasContext *ctx) \
7927
if (unlikely(!ctx->spe_enabled)) { \
7928
gen_exception(ctx, POWERPC_EXCP_APU); \
7931
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7932
cpu_gpr[rB(ctx->opcode)]); \
7934
#define GEN_SPEFPUOP_COMP_32(name) \
7935
static inline void gen_##name(DisasContext *ctx) \
7938
if (unlikely(!ctx->spe_enabled)) { \
7939
gen_exception(ctx, POWERPC_EXCP_APU); \
7942
t0 = tcg_temp_new_i32(); \
7943
t1 = tcg_temp_new_i32(); \
7944
tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7945
tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7946
gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
7947
tcg_temp_free_i32(t0); \
7948
tcg_temp_free_i32(t1); \
7950
#define GEN_SPEFPUOP_COMP_64(name) \
7951
static inline void gen_##name(DisasContext *ctx) \
7953
if (unlikely(!ctx->spe_enabled)) { \
7954
gen_exception(ctx, POWERPC_EXCP_APU); \
7957
gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
7958
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7961
#define GEN_SPEFPUOP_CONV_32_32(name) \
7962
static inline void gen_##name(DisasContext *ctx) \
7964
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7966
#define GEN_SPEFPUOP_CONV_32_64(name) \
7967
static inline void gen_##name(DisasContext *ctx) \
7969
TCGv_i64 t0 = tcg_temp_new_i64(); \
7970
gen_load_gpr64(t0, rB(ctx->opcode)); \
7971
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
7972
tcg_temp_free_i64(t0); \
7974
#define GEN_SPEFPUOP_CONV_64_32(name) \
7975
static inline void gen_##name(DisasContext *ctx) \
7977
TCGv_i64 t0 = tcg_temp_new_i64(); \
7978
gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
7979
gen_store_gpr64(rD(ctx->opcode), t0); \
7980
tcg_temp_free_i64(t0); \
7982
#define GEN_SPEFPUOP_CONV_64_64(name) \
7983
static inline void gen_##name(DisasContext *ctx) \
7985
TCGv_i64 t0 = tcg_temp_new_i64(); \
7986
gen_load_gpr64(t0, rB(ctx->opcode)); \
7987
gen_helper_##name(t0, t0); \
7988
gen_store_gpr64(rD(ctx->opcode), t0); \
7989
tcg_temp_free_i64(t0); \
7991
#define GEN_SPEFPUOP_ARITH2_32_32(name) \
7992
static inline void gen_##name(DisasContext *ctx) \
7994
if (unlikely(!ctx->spe_enabled)) { \
7995
gen_exception(ctx, POWERPC_EXCP_APU); \
7998
gen_helper_##name(cpu_gpr[rD(ctx->opcode)], \
7999
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8001
#define GEN_SPEFPUOP_ARITH2_64_64(name) \
8002
static inline void gen_##name(DisasContext *ctx) \
8005
if (unlikely(!ctx->spe_enabled)) { \
8006
gen_exception(ctx, POWERPC_EXCP_APU); \
8009
t0 = tcg_temp_new_i64(); \
8010
t1 = tcg_temp_new_i64(); \
8011
gen_load_gpr64(t0, rA(ctx->opcode)); \
8012
gen_load_gpr64(t1, rB(ctx->opcode)); \
8013
gen_helper_##name(t0, t0, t1); \
8014
gen_store_gpr64(rD(ctx->opcode), t0); \
8015
tcg_temp_free_i64(t0); \
8016
tcg_temp_free_i64(t1); \
8018
#define GEN_SPEFPUOP_COMP_32(name) \
8019
static inline void gen_##name(DisasContext *ctx) \
8021
if (unlikely(!ctx->spe_enabled)) { \
8022
gen_exception(ctx, POWERPC_EXCP_APU); \
8025
gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
8026
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8028
#define GEN_SPEFPUOP_COMP_64(name) \
8029
static inline void gen_##name(DisasContext *ctx) \
8032
if (unlikely(!ctx->spe_enabled)) { \
8033
gen_exception(ctx, POWERPC_EXCP_APU); \
8036
t0 = tcg_temp_new_i64(); \
8037
t1 = tcg_temp_new_i64(); \
8038
gen_load_gpr64(t0, rA(ctx->opcode)); \
8039
gen_load_gpr64(t1, rB(ctx->opcode)); \
8040
gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
8041
tcg_temp_free_i64(t0); \
8042
tcg_temp_free_i64(t1); \
8046
/* Single precision floating-point vectors operations */
8048
GEN_SPEFPUOP_ARITH2_64_64(evfsadd);
8049
GEN_SPEFPUOP_ARITH2_64_64(evfssub);
8050
GEN_SPEFPUOP_ARITH2_64_64(evfsmul);
8051
GEN_SPEFPUOP_ARITH2_64_64(evfsdiv);
8052
static inline void gen_evfsabs(DisasContext *ctx)
8054
if (unlikely(!ctx->spe_enabled)) {
8055
gen_exception(ctx, POWERPC_EXCP_APU);
8058
#if defined(TARGET_PPC64)
8059
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000080000000LL);
8061
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x80000000);
8062
tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
8065
static inline void gen_evfsnabs(DisasContext *ctx)
8067
if (unlikely(!ctx->spe_enabled)) {
8068
gen_exception(ctx, POWERPC_EXCP_APU);
8071
#if defined(TARGET_PPC64)
8072
tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
8074
tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8075
tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8078
static inline void gen_evfsneg(DisasContext *ctx)
8080
if (unlikely(!ctx->spe_enabled)) {
8081
gen_exception(ctx, POWERPC_EXCP_APU);
8084
#if defined(TARGET_PPC64)
8085
tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
8087
tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8088
tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8093
GEN_SPEFPUOP_CONV_64_64(evfscfui);
8094
GEN_SPEFPUOP_CONV_64_64(evfscfsi);
8095
GEN_SPEFPUOP_CONV_64_64(evfscfuf);
8096
GEN_SPEFPUOP_CONV_64_64(evfscfsf);
8097
GEN_SPEFPUOP_CONV_64_64(evfsctui);
8098
GEN_SPEFPUOP_CONV_64_64(evfsctsi);
8099
GEN_SPEFPUOP_CONV_64_64(evfsctuf);
8100
GEN_SPEFPUOP_CONV_64_64(evfsctsf);
8101
GEN_SPEFPUOP_CONV_64_64(evfsctuiz);
8102
GEN_SPEFPUOP_CONV_64_64(evfsctsiz);
8105
GEN_SPEFPUOP_COMP_64(evfscmpgt);
8106
GEN_SPEFPUOP_COMP_64(evfscmplt);
8107
GEN_SPEFPUOP_COMP_64(evfscmpeq);
8108
GEN_SPEFPUOP_COMP_64(evfststgt);
8109
GEN_SPEFPUOP_COMP_64(evfststlt);
8110
GEN_SPEFPUOP_COMP_64(evfststeq);
8112
/* Opcodes definitions */
8113
GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPE_SINGLE); //
8114
GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPE_SINGLE); //
8115
GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPE_SINGLE); //
8116
GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPE_SINGLE); //
8117
GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8118
GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8119
GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8120
GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8121
GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8122
GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8123
GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8124
GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8125
GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8126
GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8128
/* Single precision floating-point operations */
8130
GEN_SPEFPUOP_ARITH2_32_32(efsadd);
8131
GEN_SPEFPUOP_ARITH2_32_32(efssub);
8132
GEN_SPEFPUOP_ARITH2_32_32(efsmul);
8133
GEN_SPEFPUOP_ARITH2_32_32(efsdiv);
8134
static inline void gen_efsabs(DisasContext *ctx)
8136
if (unlikely(!ctx->spe_enabled)) {
8137
gen_exception(ctx, POWERPC_EXCP_APU);
8140
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], (target_long)~0x80000000LL);
8142
static inline void gen_efsnabs(DisasContext *ctx)
8144
if (unlikely(!ctx->spe_enabled)) {
8145
gen_exception(ctx, POWERPC_EXCP_APU);
8148
tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8150
static inline void gen_efsneg(DisasContext *ctx)
8152
if (unlikely(!ctx->spe_enabled)) {
8153
gen_exception(ctx, POWERPC_EXCP_APU);
8156
tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8160
GEN_SPEFPUOP_CONV_32_32(efscfui);
8161
GEN_SPEFPUOP_CONV_32_32(efscfsi);
8162
GEN_SPEFPUOP_CONV_32_32(efscfuf);
8163
GEN_SPEFPUOP_CONV_32_32(efscfsf);
8164
GEN_SPEFPUOP_CONV_32_32(efsctui);
8165
GEN_SPEFPUOP_CONV_32_32(efsctsi);
8166
GEN_SPEFPUOP_CONV_32_32(efsctuf);
8167
GEN_SPEFPUOP_CONV_32_32(efsctsf);
8168
GEN_SPEFPUOP_CONV_32_32(efsctuiz);
8169
GEN_SPEFPUOP_CONV_32_32(efsctsiz);
8170
GEN_SPEFPUOP_CONV_32_64(efscfd);
8173
GEN_SPEFPUOP_COMP_32(efscmpgt);
8174
GEN_SPEFPUOP_COMP_32(efscmplt);
8175
GEN_SPEFPUOP_COMP_32(efscmpeq);
8176
GEN_SPEFPUOP_COMP_32(efststgt);
8177
GEN_SPEFPUOP_COMP_32(efststlt);
8178
GEN_SPEFPUOP_COMP_32(efststeq);
8180
/* Opcodes definitions */
8181
GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPE_SINGLE); //
8182
GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPE_SINGLE); //
8183
GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPE_SINGLE); //
8184
GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPE_SINGLE); //
8185
GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8186
GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8187
GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8188
GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8189
GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8190
GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8191
GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8192
GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8193
GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8194
GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8196
/* Double precision floating-point operations */
8198
GEN_SPEFPUOP_ARITH2_64_64(efdadd);
8199
GEN_SPEFPUOP_ARITH2_64_64(efdsub);
8200
GEN_SPEFPUOP_ARITH2_64_64(efdmul);
8201
GEN_SPEFPUOP_ARITH2_64_64(efddiv);
8202
static inline void gen_efdabs(DisasContext *ctx)
8204
if (unlikely(!ctx->spe_enabled)) {
8205
gen_exception(ctx, POWERPC_EXCP_APU);
8208
#if defined(TARGET_PPC64)
8209
tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000000000000LL);
8211
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8212
tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
8215
static inline void gen_efdnabs(DisasContext *ctx)
8217
if (unlikely(!ctx->spe_enabled)) {
8218
gen_exception(ctx, POWERPC_EXCP_APU);
8221
#if defined(TARGET_PPC64)
8222
tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8224
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8225
tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8228
static inline void gen_efdneg(DisasContext *ctx)
8230
if (unlikely(!ctx->spe_enabled)) {
8231
gen_exception(ctx, POWERPC_EXCP_APU);
8234
#if defined(TARGET_PPC64)
8235
tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8237
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8238
tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8243
GEN_SPEFPUOP_CONV_64_32(efdcfui);
8244
GEN_SPEFPUOP_CONV_64_32(efdcfsi);
8245
GEN_SPEFPUOP_CONV_64_32(efdcfuf);
8246
GEN_SPEFPUOP_CONV_64_32(efdcfsf);
8247
GEN_SPEFPUOP_CONV_32_64(efdctui);
8248
GEN_SPEFPUOP_CONV_32_64(efdctsi);
8249
GEN_SPEFPUOP_CONV_32_64(efdctuf);
8250
GEN_SPEFPUOP_CONV_32_64(efdctsf);
8251
GEN_SPEFPUOP_CONV_32_64(efdctuiz);
8252
GEN_SPEFPUOP_CONV_32_64(efdctsiz);
8253
GEN_SPEFPUOP_CONV_64_32(efdcfs);
8254
GEN_SPEFPUOP_CONV_64_64(efdcfuid);
8255
GEN_SPEFPUOP_CONV_64_64(efdcfsid);
8256
GEN_SPEFPUOP_CONV_64_64(efdctuidz);
8257
GEN_SPEFPUOP_CONV_64_64(efdctsidz);
8260
GEN_SPEFPUOP_COMP_64(efdcmpgt);
8261
GEN_SPEFPUOP_COMP_64(efdcmplt);
8262
GEN_SPEFPUOP_COMP_64(efdcmpeq);
8263
GEN_SPEFPUOP_COMP_64(efdtstgt);
8264
GEN_SPEFPUOP_COMP_64(efdtstlt);
8265
GEN_SPEFPUOP_COMP_64(efdtsteq);
8267
/* Opcodes definitions */
8268
GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPE_DOUBLE); //
8269
GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8270
GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPE_DOUBLE); //
8271
GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPE_DOUBLE); //
8272
GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPE_DOUBLE); //
8273
GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8274
GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8275
GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8276
GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8277
GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8278
GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8279
GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8280
GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8281
GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8282
GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8283
GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8285
static opcode_t opcodes[] = {
8286
GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
8287
GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER),
8288
GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8289
GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER),
8290
GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8291
GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
8292
GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8293
GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8294
GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8295
GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8296
GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
8297
GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
8298
GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
8299
GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
8300
GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8301
#if defined(TARGET_PPC64)
8302
GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
8304
GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
8305
GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
8306
GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8307
GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8308
GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8309
GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
8310
GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
8311
GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
8312
GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8313
GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8314
GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8315
GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8316
GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB),
8317
GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
8318
#if defined(TARGET_PPC64)
8319
GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
8320
GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
8322
GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8323
GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8324
GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8325
GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
8326
GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
8327
GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
8328
GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
8329
#if defined(TARGET_PPC64)
8330
GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
8331
GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
8332
GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
8333
GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
8334
GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
8336
GEN_HANDLER(frsqrtes, 0x3B, 0x1A, 0xFF, 0x001F07C0, PPC_FLOAT_FRSQRTES),
8337
GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8338
GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8339
GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT),
8340
GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT),
8341
GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT),
8342
GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT),
8343
GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT),
8344
GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT),
8345
GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT),
8346
GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x00010000, PPC_FLOAT),
8347
GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT),
8348
#if defined(TARGET_PPC64)
8349
GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B),
8350
GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX),
8351
GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B),
8353
GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8354
GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8355
GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
8356
GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
8357
GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
8358
GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
8359
GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO),
8360
GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
8361
GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
8362
GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
8363
#if defined(TARGET_PPC64)
8364
GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
8365
GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
8367
GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
8368
GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
8369
GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8370
GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8371
GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
8372
GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
8373
GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
8374
GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
8375
#if defined(TARGET_PPC64)
8376
GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
8377
GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
8379
GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW),
8380
GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
8381
GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8382
#if defined(TARGET_PPC64)
8383
GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
8384
GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
8386
GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
8387
GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
8388
GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
8389
GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
8390
GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
8391
GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
8392
#if defined(TARGET_PPC64)
8393
GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
8395
GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC),
8396
GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC),
8397
GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
8398
GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
8399
GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
8400
GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE),
8401
GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE),
8402
GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03E00001, PPC_CACHE_DCBZ),
8403
GEN_HANDLER2(dcbz_970, "dcbz", 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZT),
8404
GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
8405
GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC),
8406
GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
8407
GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
8408
GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
8409
GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
8410
GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
8411
GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
8412
GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
8413
#if defined(TARGET_PPC64)
8414
GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
8415
GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
8417
GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
8418
GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
8420
GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
8421
GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
8422
GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
8424
GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
8425
GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x03FF0001, PPC_MEM_TLBIE),
8426
GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE),
8427
GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
8428
#if defined(TARGET_PPC64)
8429
GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI),
8430
GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
8432
GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
8433
GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
8434
GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR),
8435
GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR),
8436
GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR),
8437
GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR),
8438
GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR),
8439
GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR),
8440
GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR),
8441
GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR),
8442
GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR),
8443
GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8444
GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR),
8445
GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR),
8446
GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR),
8447
GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR),
8448
GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR),
8449
GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR),
8450
GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR),
8451
GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8452
GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR),
8453
GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR),
8454
GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR),
8455
GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR),
8456
GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR),
8457
GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR),
8458
GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR),
8459
GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR),
8460
GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR),
8461
GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR),
8462
GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR),
8463
GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR),
8464
GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR),
8465
GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR),
8466
GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR),
8467
GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR),
8468
GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC),
8469
GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC),
8470
GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC),
8471
GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
8472
GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
8473
GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB),
8474
GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB),
8475
GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER),
8476
GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER),
8477
GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER),
8478
GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER),
8479
GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER),
8480
GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER),
8481
GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8482
GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8483
GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2),
8484
GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2),
8485
GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8486
GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8487
GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2),
8488
GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2),
8489
GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
8490
GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
8491
GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
8492
GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
8493
GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
8494
GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
8495
GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
8496
GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
8497
GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
8498
GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
8499
GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
8500
GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
8501
GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
8502
GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
8503
GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
8504
GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
8505
GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
8506
GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
8507
GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
8508
GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
8509
GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
8510
GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
8511
GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
8512
GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
8513
PPC_NONE, PPC2_BOOKE206),
8514
GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
8515
PPC_NONE, PPC2_BOOKE206),
8516
GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
8517
PPC_NONE, PPC2_BOOKE206),
8518
GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
8519
PPC_NONE, PPC2_BOOKE206),
8520
GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
8521
GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
8522
GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
8523
GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
8524
PPC_BOOKE, PPC2_BOOKE206),
8525
GEN_HANDLER_E(msync, 0x1F, 0x16, 0x12, 0x03FFF801,
8526
PPC_BOOKE, PPC2_BOOKE206),
8527
GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
8528
PPC_BOOKE, PPC2_BOOKE206),
8529
GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
8530
GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
8531
GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
8532
GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
8533
GEN_HANDLER(vsldoi, 0x04, 0x16, 0xFF, 0x00000400, PPC_ALTIVEC),
8534
GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
8535
GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE),
8536
GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE),
8537
GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE),
8538
GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE),
8540
#undef GEN_INT_ARITH_ADD
8541
#undef GEN_INT_ARITH_ADD_CONST
8542
#define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
8543
GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
8544
#define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
8545
add_ca, compute_ca, compute_ov) \
8546
GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
8547
GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
8548
GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
8549
GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
8550
GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
8551
GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
8552
GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
8553
GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
8554
GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
8555
GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
8556
GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
8558
#undef GEN_INT_ARITH_DIVW
8559
#define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
8560
GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
8561
GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
8562
GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
8563
GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
8564
GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
8566
#if defined(TARGET_PPC64)
8567
#undef GEN_INT_ARITH_DIVD
8568
#define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
8569
GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8570
GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
8571
GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
8572
GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
8573
GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
8575
#undef GEN_INT_ARITH_MUL_HELPER
8576
#define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
8577
GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8578
GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
8579
GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
8580
GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
8583
#undef GEN_INT_ARITH_SUBF
8584
#undef GEN_INT_ARITH_SUBF_CONST
8585
#define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
8586
GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
8587
#define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
8588
add_ca, compute_ca, compute_ov) \
8589
GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
8590
GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
8591
GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
8592
GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
8593
GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
8594
GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
8595
GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
8596
GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
8597
GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
8598
GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
8599
GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
8603
#define GEN_LOGICAL2(name, tcg_op, opc, type) \
8604
GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
8605
#define GEN_LOGICAL1(name, tcg_op, opc, type) \
8606
GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
8607
GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
8608
GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
8609
GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
8610
GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
8611
GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
8612
GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
8613
GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
8614
GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
8615
#if defined(TARGET_PPC64)
8616
GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
8619
#if defined(TARGET_PPC64)
8622
#define GEN_PPC64_R2(name, opc1, opc2) \
8623
GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8624
GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8626
#define GEN_PPC64_R4(name, opc1, opc2) \
8627
GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8628
GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
8630
GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8632
GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
8634
GEN_PPC64_R4(rldicl, 0x1E, 0x00),
8635
GEN_PPC64_R4(rldicr, 0x1E, 0x02),
8636
GEN_PPC64_R4(rldic, 0x1E, 0x04),
8637
GEN_PPC64_R2(rldcl, 0x1E, 0x08),
8638
GEN_PPC64_R2(rldcr, 0x1E, 0x09),
8639
GEN_PPC64_R4(rldimi, 0x1E, 0x06),
8642
#undef _GEN_FLOAT_ACB
8643
#undef GEN_FLOAT_ACB
8644
#undef _GEN_FLOAT_AB
8646
#undef _GEN_FLOAT_AC
8650
#define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
8651
GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type)
8652
#define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
8653
_GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type), \
8654
_GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type)
8655
#define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
8656
GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
8657
#define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
8658
_GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
8659
_GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
8660
#define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
8661
GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
8662
#define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
8663
_GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
8664
_GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
8665
#define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
8666
GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type)
8667
#define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
8668
GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type)
8670
GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT),
8671
GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT),
8672
GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT),
8673
GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT),
8674
GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES),
8675
GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE),
8676
_GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL),
8677
GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT),
8678
GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT),
8679
GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT),
8680
GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT),
8681
GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT),
8682
GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT),
8683
GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT),
8684
GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT),
8685
#if defined(TARGET_PPC64)
8686
GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B),
8687
GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B),
8688
GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B),
8690
GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT),
8691
GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT),
8692
GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT),
8693
GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT),
8694
GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT),
8695
GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT),
8696
GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT),
8703
#define GEN_LD(name, ldop, opc, type) \
8704
GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8705
#define GEN_LDU(name, ldop, opc, type) \
8706
GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8707
#define GEN_LDUX(name, ldop, opc2, opc3, type) \
8708
GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8709
#define GEN_LDX(name, ldop, opc2, opc3, type) \
8710
GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8711
#define GEN_LDS(name, ldop, op, type) \
8712
GEN_LD(name, ldop, op | 0x20, type) \
8713
GEN_LDU(name, ldop, op | 0x21, type) \
8714
GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
8715
GEN_LDX(name, ldop, 0x17, op | 0x00, type)
8717
GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER)
8718
GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER)
8719
GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER)
8720
GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER)
8721
#if defined(TARGET_PPC64)
8722
GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B)
8723
GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B)
8724
GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B)
8725
GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B)
8727
GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
8728
GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
8735
#define GEN_ST(name, stop, opc, type) \
8736
GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8737
#define GEN_STU(name, stop, opc, type) \
8738
GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
8739
#define GEN_STUX(name, stop, opc2, opc3, type) \
8740
GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8741
#define GEN_STX(name, stop, opc2, opc3, type) \
8742
GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8743
#define GEN_STS(name, stop, op, type) \
8744
GEN_ST(name, stop, op | 0x20, type) \
8745
GEN_STU(name, stop, op | 0x21, type) \
8746
GEN_STUX(name, stop, 0x17, op | 0x01, type) \
8747
GEN_STX(name, stop, 0x17, op | 0x00, type)
8749
GEN_STS(stb, st8, 0x06, PPC_INTEGER)
8750
GEN_STS(sth, st16, 0x0C, PPC_INTEGER)
8751
GEN_STS(stw, st32, 0x04, PPC_INTEGER)
8752
#if defined(TARGET_PPC64)
8753
GEN_STUX(std, st64, 0x15, 0x05, PPC_64B)
8754
GEN_STX(std, st64, 0x15, 0x04, PPC_64B)
8756
GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
8757
GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
8764
#define GEN_LDF(name, ldop, opc, type) \
8765
GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8766
#define GEN_LDUF(name, ldop, opc, type) \
8767
GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8768
#define GEN_LDUXF(name, ldop, opc, type) \
8769
GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
8770
#define GEN_LDXF(name, ldop, opc2, opc3, type) \
8771
GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8772
#define GEN_LDFS(name, ldop, op, type) \
8773
GEN_LDF(name, ldop, op | 0x20, type) \
8774
GEN_LDUF(name, ldop, op | 0x21, type) \
8775
GEN_LDUXF(name, ldop, op | 0x01, type) \
8776
GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
8778
GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT)
8779
GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT)
8786
#define GEN_STF(name, stop, opc, type) \
8787
GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8788
#define GEN_STUF(name, stop, opc, type) \
8789
GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8790
#define GEN_STUXF(name, stop, opc, type) \
8791
GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
8792
#define GEN_STXF(name, stop, opc2, opc3, type) \
8793
GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8794
#define GEN_STFS(name, stop, op, type) \
8795
GEN_STF(name, stop, op | 0x20, type) \
8796
GEN_STUF(name, stop, op | 0x21, type) \
8797
GEN_STUXF(name, stop, op | 0x01, type) \
8798
GEN_STXF(name, stop, 0x17, op | 0x00, type)
8800
GEN_STFS(stfd, st64, 0x16, PPC_FLOAT)
8801
GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT)
8802
GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX)
8805
#define GEN_CRLOGIC(name, tcg_op, opc) \
8806
GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
8807
GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
8808
GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
8809
GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
8810
GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
8811
GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
8812
GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
8813
GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
8814
GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
8816
#undef GEN_MAC_HANDLER
8817
#define GEN_MAC_HANDLER(name, opc2, opc3) \
8818
GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
8819
GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
8820
GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
8821
GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
8822
GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
8823
GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
8824
GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
8825
GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
8826
GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
8827
GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
8828
GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
8829
GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
8830
GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
8831
GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
8832
GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
8833
GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
8834
GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
8835
GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
8836
GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
8837
GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
8838
GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
8839
GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
8840
GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
8841
GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
8842
GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
8843
GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
8844
GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
8845
GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
8846
GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
8847
GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
8848
GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
8849
GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
8850
GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
8851
GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
8852
GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
8853
GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
8854
GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
8855
GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
8856
GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
8857
GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
8858
GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
8859
GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
8860
GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
8866
#define GEN_VR_LDX(name, opc2, opc3) \
8867
GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8868
#define GEN_VR_STX(name, opc2, opc3) \
8869
GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8870
#define GEN_VR_LVE(name, opc2, opc3) \
8871
GEN_HANDLER(lve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8872
#define GEN_VR_STVE(name, opc2, opc3) \
8873
GEN_HANDLER(stve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8874
GEN_VR_LDX(lvx, 0x07, 0x03),
8875
GEN_VR_LDX(lvxl, 0x07, 0x0B),
8876
GEN_VR_LVE(bx, 0x07, 0x00),
8877
GEN_VR_LVE(hx, 0x07, 0x01),
8878
GEN_VR_LVE(wx, 0x07, 0x02),
8879
GEN_VR_STX(svx, 0x07, 0x07),
8880
GEN_VR_STX(svxl, 0x07, 0x0F),
8881
GEN_VR_STVE(bx, 0x07, 0x04),
8882
GEN_VR_STVE(hx, 0x07, 0x05),
8883
GEN_VR_STVE(wx, 0x07, 0x06),
8885
#undef GEN_VX_LOGICAL
8886
#define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
8887
GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
8888
GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16),
8889
GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17),
8890
GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18),
8891
GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19),
8892
GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20),
8895
#define GEN_VXFORM(name, opc2, opc3) \
8896
GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
8897
GEN_VXFORM(vaddubm, 0, 0),
8898
GEN_VXFORM(vadduhm, 0, 1),
8899
GEN_VXFORM(vadduwm, 0, 2),
8900
GEN_VXFORM(vsububm, 0, 16),
8901
GEN_VXFORM(vsubuhm, 0, 17),
8902
GEN_VXFORM(vsubuwm, 0, 18),
8903
GEN_VXFORM(vmaxub, 1, 0),
8904
GEN_VXFORM(vmaxuh, 1, 1),
8905
GEN_VXFORM(vmaxuw, 1, 2),
8906
GEN_VXFORM(vmaxsb, 1, 4),
8907
GEN_VXFORM(vmaxsh, 1, 5),
8908
GEN_VXFORM(vmaxsw, 1, 6),
8909
GEN_VXFORM(vminub, 1, 8),
8910
GEN_VXFORM(vminuh, 1, 9),
8911
GEN_VXFORM(vminuw, 1, 10),
8912
GEN_VXFORM(vminsb, 1, 12),
8913
GEN_VXFORM(vminsh, 1, 13),
8914
GEN_VXFORM(vminsw, 1, 14),
8915
GEN_VXFORM(vavgub, 1, 16),
8916
GEN_VXFORM(vavguh, 1, 17),
8917
GEN_VXFORM(vavguw, 1, 18),
8918
GEN_VXFORM(vavgsb, 1, 20),
8919
GEN_VXFORM(vavgsh, 1, 21),
8920
GEN_VXFORM(vavgsw, 1, 22),
8921
GEN_VXFORM(vmrghb, 6, 0),
8922
GEN_VXFORM(vmrghh, 6, 1),
8923
GEN_VXFORM(vmrghw, 6, 2),
8924
GEN_VXFORM(vmrglb, 6, 4),
8925
GEN_VXFORM(vmrglh, 6, 5),
8926
GEN_VXFORM(vmrglw, 6, 6),
8927
GEN_VXFORM(vmuloub, 4, 0),
8928
GEN_VXFORM(vmulouh, 4, 1),
8929
GEN_VXFORM(vmulosb, 4, 4),
8930
GEN_VXFORM(vmulosh, 4, 5),
8931
GEN_VXFORM(vmuleub, 4, 8),
8932
GEN_VXFORM(vmuleuh, 4, 9),
8933
GEN_VXFORM(vmulesb, 4, 12),
8934
GEN_VXFORM(vmulesh, 4, 13),
8935
GEN_VXFORM(vslb, 2, 4),
8936
GEN_VXFORM(vslh, 2, 5),
8937
GEN_VXFORM(vslw, 2, 6),
8938
GEN_VXFORM(vsrb, 2, 8),
8939
GEN_VXFORM(vsrh, 2, 9),
8940
GEN_VXFORM(vsrw, 2, 10),
8941
GEN_VXFORM(vsrab, 2, 12),
8942
GEN_VXFORM(vsrah, 2, 13),
8943
GEN_VXFORM(vsraw, 2, 14),
8944
GEN_VXFORM(vslo, 6, 16),
8945
GEN_VXFORM(vsro, 6, 17),
8946
GEN_VXFORM(vaddcuw, 0, 6),
8947
GEN_VXFORM(vsubcuw, 0, 22),
8948
GEN_VXFORM(vaddubs, 0, 8),
8949
GEN_VXFORM(vadduhs, 0, 9),
8950
GEN_VXFORM(vadduws, 0, 10),
8951
GEN_VXFORM(vaddsbs, 0, 12),
8952
GEN_VXFORM(vaddshs, 0, 13),
8953
GEN_VXFORM(vaddsws, 0, 14),
8954
GEN_VXFORM(vsububs, 0, 24),
8955
GEN_VXFORM(vsubuhs, 0, 25),
8956
GEN_VXFORM(vsubuws, 0, 26),
8957
GEN_VXFORM(vsubsbs, 0, 28),
8958
GEN_VXFORM(vsubshs, 0, 29),
8959
GEN_VXFORM(vsubsws, 0, 30),
8960
GEN_VXFORM(vrlb, 2, 0),
8961
GEN_VXFORM(vrlh, 2, 1),
8962
GEN_VXFORM(vrlw, 2, 2),
8963
GEN_VXFORM(vsl, 2, 7),
8964
GEN_VXFORM(vsr, 2, 11),
8965
GEN_VXFORM(vpkuhum, 7, 0),
8966
GEN_VXFORM(vpkuwum, 7, 1),
8967
GEN_VXFORM(vpkuhus, 7, 2),
8968
GEN_VXFORM(vpkuwus, 7, 3),
8969
GEN_VXFORM(vpkshus, 7, 4),
8970
GEN_VXFORM(vpkswus, 7, 5),
8971
GEN_VXFORM(vpkshss, 7, 6),
8972
GEN_VXFORM(vpkswss, 7, 7),
8973
GEN_VXFORM(vpkpx, 7, 12),
8974
GEN_VXFORM(vsum4ubs, 4, 24),
8975
GEN_VXFORM(vsum4sbs, 4, 28),
8976
GEN_VXFORM(vsum4shs, 4, 25),
8977
GEN_VXFORM(vsum2sws, 4, 26),
8978
GEN_VXFORM(vsumsws, 4, 30),
8979
GEN_VXFORM(vaddfp, 5, 0),
8980
GEN_VXFORM(vsubfp, 5, 1),
8981
GEN_VXFORM(vmaxfp, 5, 16),
8982
GEN_VXFORM(vminfp, 5, 17),
8986
#define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
8987
GEN_HANDLER2(name, str, 0x4, opc2, opc3, 0x00000000, PPC_ALTIVEC),
8988
#define GEN_VXRFORM(name, opc2, opc3) \
8989
GEN_VXRFORM1(name, name, #name, opc2, opc3) \
8990
GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
8991
GEN_VXRFORM(vcmpequb, 3, 0)
8992
GEN_VXRFORM(vcmpequh, 3, 1)
8993
GEN_VXRFORM(vcmpequw, 3, 2)
8994
GEN_VXRFORM(vcmpgtsb, 3, 12)
8995
GEN_VXRFORM(vcmpgtsh, 3, 13)
8996
GEN_VXRFORM(vcmpgtsw, 3, 14)
8997
GEN_VXRFORM(vcmpgtub, 3, 8)
8998
GEN_VXRFORM(vcmpgtuh, 3, 9)
8999
GEN_VXRFORM(vcmpgtuw, 3, 10)
9000
GEN_VXRFORM(vcmpeqfp, 3, 3)
9001
GEN_VXRFORM(vcmpgefp, 3, 7)
9002
GEN_VXRFORM(vcmpgtfp, 3, 11)
9003
GEN_VXRFORM(vcmpbfp, 3, 15)
9005
#undef GEN_VXFORM_SIMM
9006
#define GEN_VXFORM_SIMM(name, opc2, opc3) \
9007
GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9008
GEN_VXFORM_SIMM(vspltisb, 6, 12),
9009
GEN_VXFORM_SIMM(vspltish, 6, 13),
9010
GEN_VXFORM_SIMM(vspltisw, 6, 14),
9012
#undef GEN_VXFORM_NOA
9013
#define GEN_VXFORM_NOA(name, opc2, opc3) \
9014
GEN_HANDLER(name, 0x04, opc2, opc3, 0x001f0000, PPC_ALTIVEC)
9015
GEN_VXFORM_NOA(vupkhsb, 7, 8),
9016
GEN_VXFORM_NOA(vupkhsh, 7, 9),
9017
GEN_VXFORM_NOA(vupklsb, 7, 10),
9018
GEN_VXFORM_NOA(vupklsh, 7, 11),
9019
GEN_VXFORM_NOA(vupkhpx, 7, 13),
9020
GEN_VXFORM_NOA(vupklpx, 7, 15),
9021
GEN_VXFORM_NOA(vrefp, 5, 4),
9022
GEN_VXFORM_NOA(vrsqrtefp, 5, 5),
9023
GEN_VXFORM_NOA(vexptefp, 5, 6),
9024
GEN_VXFORM_NOA(vlogefp, 5, 7),
9025
GEN_VXFORM_NOA(vrfim, 5, 8),
9026
GEN_VXFORM_NOA(vrfin, 5, 9),
9027
GEN_VXFORM_NOA(vrfip, 5, 10),
9028
GEN_VXFORM_NOA(vrfiz, 5, 11),
9030
#undef GEN_VXFORM_UIMM
9031
#define GEN_VXFORM_UIMM(name, opc2, opc3) \
9032
GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9033
GEN_VXFORM_UIMM(vspltb, 6, 8),
9034
GEN_VXFORM_UIMM(vsplth, 6, 9),
9035
GEN_VXFORM_UIMM(vspltw, 6, 10),
9036
GEN_VXFORM_UIMM(vcfux, 5, 12),
9037
GEN_VXFORM_UIMM(vcfsx, 5, 13),
9038
GEN_VXFORM_UIMM(vctuxs, 5, 14),
9039
GEN_VXFORM_UIMM(vctsxs, 5, 15),
9041
#undef GEN_VAFORM_PAIRED
9042
#define GEN_VAFORM_PAIRED(name0, name1, opc2) \
9043
GEN_HANDLER(name0##_##name1, 0x04, opc2, 0xFF, 0x00000000, PPC_ALTIVEC)
9044
GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16),
9045
GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18),
9046
GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19),
9047
GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20),
9048
GEN_VAFORM_PAIRED(vsel, vperm, 21),
9049
GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23),
9052
#define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
9053
GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type)
9054
GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE),
9055
GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE),
9056
GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE),
9057
GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE),
9058
GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE),
9059
GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE),
9060
GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE),
9061
GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE),
9062
GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, PPC_SPE),
9063
GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE),
9064
GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE),
9065
GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE),
9066
GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE),
9067
GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE),
9068
GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE),
9069
GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE),
9070
GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE),
9071
GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE),
9072
GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE),
9073
GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE),
9074
GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE),
9075
GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE),
9076
GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE),
9077
GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE),
9078
GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE),
9079
GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE),
9080
GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE),
9081
GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE),
9082
GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE),
9084
GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPE_SINGLE),
9085
GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPE_SINGLE),
9086
GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPE_SINGLE),
9087
GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPE_SINGLE),
9088
GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9089
GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9090
GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9091
GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9092
GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9093
GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9094
GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9095
GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9096
GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9097
GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9099
GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPE_SINGLE),
9100
GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPE_SINGLE),
9101
GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPE_SINGLE),
9102
GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPE_SINGLE),
9103
GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9104
GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9105
GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9106
GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9107
GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9108
GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9109
GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9110
GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9111
GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9112
GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9114
GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPE_DOUBLE),
9115
GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9116
GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPE_DOUBLE),
9117
GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPE_DOUBLE),
9118
GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPE_DOUBLE),
9119
GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9120
GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9121
GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9122
GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9123
GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9124
GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9125
GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9126
GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9127
GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9128
GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9129
GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9131
#undef GEN_SPEOP_LDST
9132
#define GEN_SPEOP_LDST(name, opc2, sh) \
9133
GEN_HANDLER(name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE)
9134
GEN_SPEOP_LDST(evldd, 0x00, 3),
9135
GEN_SPEOP_LDST(evldw, 0x01, 3),
9136
GEN_SPEOP_LDST(evldh, 0x02, 3),
9137
GEN_SPEOP_LDST(evlhhesplat, 0x04, 1),
9138
GEN_SPEOP_LDST(evlhhousplat, 0x06, 1),
9139
GEN_SPEOP_LDST(evlhhossplat, 0x07, 1),
9140
GEN_SPEOP_LDST(evlwhe, 0x08, 2),
9141
GEN_SPEOP_LDST(evlwhou, 0x0A, 2),
9142
GEN_SPEOP_LDST(evlwhos, 0x0B, 2),
9143
GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2),
9144
GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2),
9146
GEN_SPEOP_LDST(evstdd, 0x10, 3),
9147
GEN_SPEOP_LDST(evstdw, 0x11, 3),
9148
GEN_SPEOP_LDST(evstdh, 0x12, 3),
9149
GEN_SPEOP_LDST(evstwhe, 0x18, 2),
9150
GEN_SPEOP_LDST(evstwho, 0x1A, 2),
9151
GEN_SPEOP_LDST(evstwwe, 0x1C, 2),
9152
GEN_SPEOP_LDST(evstwwo, 0x1E, 2),
9155
#include "translate_init.c"
9156
#include "helper_regs.h"
9158
/*****************************************************************************/
9159
/* Misc PowerPC helpers */
9160
void cpu_dump_state (CPUState *env, FILE *f, fprintf_function cpu_fprintf,
9168
cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR "
9169
TARGET_FMT_lx " XER " TARGET_FMT_lx "\n",
9170
env->nip, env->lr, env->ctr, env->xer);
9171
cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF "
9172
TARGET_FMT_lx " idx %d\n", env->msr, env->spr[SPR_HID0],
9173
env->hflags, env->mmu_idx);
9174
#if !defined(NO_TIMER_DUMP)
9175
cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64
9176
#if !defined(CONFIG_USER_ONLY)
9180
cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
9181
#if !defined(CONFIG_USER_ONLY)
9182
, cpu_ppc_load_decr(env)
9186
for (i = 0; i < 32; i++) {
9187
if ((i & (RGPL - 1)) == 0)
9188
cpu_fprintf(f, "GPR%02d", i);
9189
cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i));
9190
if ((i & (RGPL - 1)) == (RGPL - 1))
9191
cpu_fprintf(f, "\n");
9193
cpu_fprintf(f, "CR ");
9194
for (i = 0; i < 8; i++)
9195
cpu_fprintf(f, "%01x", env->crf[i]);
9196
cpu_fprintf(f, " [");
9197
for (i = 0; i < 8; i++) {
9199
if (env->crf[i] & 0x08)
9201
else if (env->crf[i] & 0x04)
9203
else if (env->crf[i] & 0x02)
9205
cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
9207
cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n",
9209
for (i = 0; i < 32; i++) {
9210
if ((i & (RFPL - 1)) == 0)
9211
cpu_fprintf(f, "FPR%02d", i);
9212
cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
9213
if ((i & (RFPL - 1)) == (RFPL - 1))
9214
cpu_fprintf(f, "\n");
9216
cpu_fprintf(f, "FPSCR %08x\n", env->fpscr);
9217
#if !defined(CONFIG_USER_ONLY)
9218
cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx
9219
" PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n",
9220
env->spr[SPR_SRR0], env->spr[SPR_SRR1],
9221
env->spr[SPR_PVR], env->spr[SPR_VRSAVE]);
9223
cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx
9224
" SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n",
9225
env->spr[SPR_SPRG0], env->spr[SPR_SPRG1],
9226
env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]);
9228
cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx
9229
" SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n",
9230
env->spr[SPR_SPRG4], env->spr[SPR_SPRG5],
9231
env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]);
9233
if (env->excp_model == POWERPC_EXCP_BOOKE) {
9234
cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx
9235
" MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n",
9236
env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1],
9237
env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]);
9239
cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx
9240
" ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n",
9241
env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR],
9242
env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]);
9244
cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx
9245
" IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n",
9246
env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR],
9247
env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]);
9249
cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx
9250
" EPR " TARGET_FMT_lx "\n",
9251
env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8],
9252
env->spr[SPR_BOOKE_EPR]);
9255
cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx
9256
" PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n",
9257
env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1],
9258
env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]);
9261
* IVORs are left out as they are large and do not change often --
9262
* they can be read with "p $ivor0", "p $ivor1", etc.
9266
switch (env->mmu_model) {
9267
case POWERPC_MMU_32B:
9268
case POWERPC_MMU_601:
9269
case POWERPC_MMU_SOFT_6xx:
9270
case POWERPC_MMU_SOFT_74xx:
9271
#if defined(TARGET_PPC64)
9272
case POWERPC_MMU_620:
9273
case POWERPC_MMU_64B:
9275
cpu_fprintf(f, " SDR1 " TARGET_FMT_lx "\n", env->spr[SPR_SDR1]);
9277
case POWERPC_MMU_BOOKE206:
9278
cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx
9279
" MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n",
9280
env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1],
9281
env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]);
9283
cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx
9284
" MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n",
9285
env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6],
9286
env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]);
9288
cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx
9289
" TLB1CFG " TARGET_FMT_lx "\n",
9290
env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG],
9291
env->spr[SPR_BOOKE_TLB1CFG]);
9302
void cpu_dump_statistics (CPUState *env, FILE*f, fprintf_function cpu_fprintf,
9305
#if defined(DO_PPC_STATISTICS)
9306
opc_handler_t **t1, **t2, **t3, *handler;
9310
for (op1 = 0; op1 < 64; op1++) {
9312
if (is_indirect_opcode(handler)) {
9313
t2 = ind_table(handler);
9314
for (op2 = 0; op2 < 32; op2++) {
9316
if (is_indirect_opcode(handler)) {
9317
t3 = ind_table(handler);
9318
for (op3 = 0; op3 < 32; op3++) {
9320
if (handler->count == 0)
9322
cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
9323
"%016" PRIx64 " %" PRId64 "\n",
9324
op1, op2, op3, op1, (op3 << 5) | op2,
9326
handler->count, handler->count);
9329
if (handler->count == 0)
9331
cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
9332
"%016" PRIx64 " %" PRId64 "\n",
9333
op1, op2, op1, op2, handler->oname,
9334
handler->count, handler->count);
9338
if (handler->count == 0)
9340
cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64
9342
op1, op1, handler->oname,
9343
handler->count, handler->count);
9349
/*****************************************************************************/
9350
static inline void gen_intermediate_code_internal(CPUState *env,
9351
TranslationBlock *tb,
9354
DisasContext ctx, *ctxp = &ctx;
9355
opc_handler_t **table, *handler;
9356
target_ulong pc_start;
9357
uint16_t *gen_opc_end;
9364
gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
9367
ctx.exception = POWERPC_EXCP_NONE;
9368
ctx.spr_cb = env->spr_cb;
9369
ctx.mem_idx = env->mmu_idx;
9370
ctx.access_type = -1;
9371
ctx.le_mode = env->hflags & (1 << MSR_LE) ? 1 : 0;
9372
#if defined(TARGET_PPC64)
9373
ctx.sf_mode = msr_sf;
9375
ctx.fpu_enabled = msr_fp;
9376
if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
9377
ctx.spe_enabled = msr_spe;
9379
ctx.spe_enabled = 0;
9380
if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
9381
ctx.altivec_enabled = msr_vr;
9383
ctx.altivec_enabled = 0;
9384
if ((env->flags & POWERPC_FLAG_SE) && msr_se)
9385
ctx.singlestep_enabled = CPU_SINGLE_STEP;
9387
ctx.singlestep_enabled = 0;
9388
if ((env->flags & POWERPC_FLAG_BE) && msr_be)
9389
ctx.singlestep_enabled |= CPU_BRANCH_STEP;
9390
if (unlikely(env->singlestep_enabled))
9391
ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
9392
#if defined (DO_SINGLE_STEP) && 0
9393
/* Single step trace mode */
9397
max_insns = tb->cflags & CF_COUNT_MASK;
9399
max_insns = CF_COUNT_MASK;
9402
/* Set env in case of segfault during code fetch */
9403
while (ctx.exception == POWERPC_EXCP_NONE && gen_opc_ptr < gen_opc_end) {
9404
if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
9405
QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
9406
if (bp->pc == ctx.nip) {
9407
gen_debug_exception(ctxp);
9412
if (unlikely(search_pc)) {
9413
j = gen_opc_ptr - gen_opc_buf;
9417
gen_opc_instr_start[lj++] = 0;
9419
gen_opc_pc[lj] = ctx.nip;
9420
gen_opc_instr_start[lj] = 1;
9421
gen_opc_icount[lj] = num_insns;
9423
LOG_DISAS("----------------\n");
9424
LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
9425
ctx.nip, ctx.mem_idx, (int)msr_ir);
9426
if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
9428
if (unlikely(ctx.le_mode)) {
9429
ctx.opcode = bswap32(ldl_code(ctx.nip));
9431
ctx.opcode = ldl_code(ctx.nip);
9433
LOG_DISAS("translate opcode %08x (%02x %02x %02x) (%s)\n",
9434
ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
9435
opc3(ctx.opcode), little_endian ? "little" : "big");
9436
if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
9437
tcg_gen_debug_insn_start(ctx.nip);
9439
table = env->opcodes;
9441
handler = table[opc1(ctx.opcode)];
9442
if (is_indirect_opcode(handler)) {
9443
table = ind_table(handler);
9444
handler = table[opc2(ctx.opcode)];
9445
if (is_indirect_opcode(handler)) {
9446
table = ind_table(handler);
9447
handler = table[opc3(ctx.opcode)];
9450
/* Is opcode *REALLY* valid ? */
9451
if (unlikely(handler->handler == &gen_invalid)) {
9452
if (qemu_log_enabled()) {
9453
qemu_log("invalid/unsupported opcode: "
9454
"%02x - %02x - %02x (%08x) " TARGET_FMT_lx " %d\n",
9455
opc1(ctx.opcode), opc2(ctx.opcode),
9456
opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
9459
if (unlikely((ctx.opcode & handler->inval) != 0)) {
9460
if (qemu_log_enabled()) {
9461
qemu_log("invalid bits: %08x for opcode: "
9462
"%02x - %02x - %02x (%08x) " TARGET_FMT_lx "\n",
9463
ctx.opcode & handler->inval, opc1(ctx.opcode),
9464
opc2(ctx.opcode), opc3(ctx.opcode),
9465
ctx.opcode, ctx.nip - 4);
9467
gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL);
9471
(*(handler->handler))(&ctx);
9472
#if defined(DO_PPC_STATISTICS)
9475
/* Check trace mode exceptions */
9476
if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
9477
(ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
9478
ctx.exception != POWERPC_SYSCALL &&
9479
ctx.exception != POWERPC_EXCP_TRAP &&
9480
ctx.exception != POWERPC_EXCP_BRANCH)) {
9481
gen_exception(ctxp, POWERPC_EXCP_TRACE);
9482
} else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
9483
(env->singlestep_enabled) ||
9485
num_insns >= max_insns)) {
9486
/* if we reach a page boundary or are single stepping, stop
9492
if (tb->cflags & CF_LAST_IO)
9494
if (ctx.exception == POWERPC_EXCP_NONE) {
9495
gen_goto_tb(&ctx, 0, ctx.nip);
9496
} else if (ctx.exception != POWERPC_EXCP_BRANCH) {
9497
if (unlikely(env->singlestep_enabled)) {
9498
gen_debug_exception(ctxp);
9500
/* Generate the return instruction */
9503
gen_icount_end(tb, num_insns);
9504
*gen_opc_ptr = INDEX_op_end;
9505
if (unlikely(search_pc)) {
9506
j = gen_opc_ptr - gen_opc_buf;
9509
gen_opc_instr_start[lj++] = 0;
9511
tb->size = ctx.nip - pc_start;
9512
tb->icount = num_insns;
9514
#if defined(DEBUG_DISAS)
9515
if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
9517
flags = env->bfd_mach;
9518
flags |= ctx.le_mode << 16;
9519
qemu_log("IN: %s\n", lookup_symbol(pc_start));
9520
log_target_disas(pc_start, ctx.nip - pc_start, flags);
9526
void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
9528
gen_intermediate_code_internal(env, tb, 0);
9531
void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
9533
gen_intermediate_code_internal(env, tb, 1);
9536
void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
9538
env->nip = gen_opc_pc[pc_pos];