1
/*** VSX extension ***/
3
static inline TCGv_i64 cpu_vsrh(int n)
12
static inline TCGv_i64 cpu_vsrl(int n)
17
return cpu_avrl[n-32];
21
#define VSX_LOAD_SCALAR(name, operation) \
22
static void gen_##name(DisasContext *ctx) \
25
if (unlikely(!ctx->vsx_enabled)) { \
26
gen_exception(ctx, POWERPC_EXCP_VSXU); \
29
gen_set_access_type(ctx, ACCESS_INT); \
30
EA = tcg_temp_new(); \
31
gen_addr_reg_index(ctx, EA); \
32
gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
33
/* NOTE: cpu_vsrl is undefined */ \
37
VSX_LOAD_SCALAR(lxsdx, ld64_i64)
38
VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
39
VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
40
VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
41
VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
42
VSX_LOAD_SCALAR(lxsspx, ld32fs)
44
static void gen_lxvd2x(DisasContext *ctx)
47
if (unlikely(!ctx->vsx_enabled)) {
48
gen_exception(ctx, POWERPC_EXCP_VSXU);
51
gen_set_access_type(ctx, ACCESS_INT);
53
gen_addr_reg_index(ctx, EA);
54
gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
55
tcg_gen_addi_tl(EA, EA, 8);
56
gen_qemu_ld64_i64(ctx, cpu_vsrl(xT(ctx->opcode)), EA);
60
static void gen_lxvdsx(DisasContext *ctx)
63
if (unlikely(!ctx->vsx_enabled)) {
64
gen_exception(ctx, POWERPC_EXCP_VSXU);
67
gen_set_access_type(ctx, ACCESS_INT);
69
gen_addr_reg_index(ctx, EA);
70
gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
71
tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
75
static void gen_lxvw4x(DisasContext *ctx)
78
TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
79
TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
80
if (unlikely(!ctx->vsx_enabled)) {
81
gen_exception(ctx, POWERPC_EXCP_VSXU);
84
gen_set_access_type(ctx, ACCESS_INT);
87
gen_addr_reg_index(ctx, EA);
89
TCGv_i64 t0 = tcg_temp_new_i64();
90
TCGv_i64 t1 = tcg_temp_new_i64();
92
tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
93
tcg_gen_shri_i64(t1, t0, 32);
94
tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
95
tcg_gen_addi_tl(EA, EA, 8);
96
tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
97
tcg_gen_shri_i64(t1, t0, 32);
98
tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
99
tcg_temp_free_i64(t0);
100
tcg_temp_free_i64(t1);
102
tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
103
tcg_gen_addi_tl(EA, EA, 8);
104
tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
109
static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
110
TCGv_i64 inh, TCGv_i64 inl)
112
TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
113
TCGv_i64 t0 = tcg_temp_new_i64();
114
TCGv_i64 t1 = tcg_temp_new_i64();
116
/* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
117
tcg_gen_and_i64(t0, inh, mask);
118
tcg_gen_shli_i64(t0, t0, 8);
119
tcg_gen_shri_i64(t1, inh, 8);
120
tcg_gen_and_i64(t1, t1, mask);
121
tcg_gen_or_i64(outh, t0, t1);
123
/* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
124
tcg_gen_and_i64(t0, inl, mask);
125
tcg_gen_shli_i64(t0, t0, 8);
126
tcg_gen_shri_i64(t1, inl, 8);
127
tcg_gen_and_i64(t1, t1, mask);
128
tcg_gen_or_i64(outl, t0, t1);
130
tcg_temp_free_i64(t0);
131
tcg_temp_free_i64(t1);
132
tcg_temp_free_i64(mask);
135
static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
136
TCGv_i64 inh, TCGv_i64 inl)
138
TCGv_i64 hi = tcg_temp_new_i64();
139
TCGv_i64 lo = tcg_temp_new_i64();
141
tcg_gen_bswap64_i64(hi, inh);
142
tcg_gen_bswap64_i64(lo, inl);
143
tcg_gen_shri_i64(outh, hi, 32);
144
tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
145
tcg_gen_shri_i64(outl, lo, 32);
146
tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
148
tcg_temp_free_i64(hi);
149
tcg_temp_free_i64(lo);
151
static void gen_lxvh8x(DisasContext *ctx)
154
TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
155
TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
157
if (unlikely(!ctx->vsx_enabled)) {
158
gen_exception(ctx, POWERPC_EXCP_VSXU);
161
gen_set_access_type(ctx, ACCESS_INT);
164
gen_addr_reg_index(ctx, EA);
165
tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
166
tcg_gen_addi_tl(EA, EA, 8);
167
tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
169
gen_bswap16x8(xth, xtl, xth, xtl);
174
static void gen_lxvb16x(DisasContext *ctx)
177
TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
178
TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
180
if (unlikely(!ctx->vsx_enabled)) {
181
gen_exception(ctx, POWERPC_EXCP_VSXU);
184
gen_set_access_type(ctx, ACCESS_INT);
186
gen_addr_reg_index(ctx, EA);
187
tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
188
tcg_gen_addi_tl(EA, EA, 8);
189
tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
193
#define VSX_STORE_SCALAR(name, operation) \
194
static void gen_##name(DisasContext *ctx) \
197
if (unlikely(!ctx->vsx_enabled)) { \
198
gen_exception(ctx, POWERPC_EXCP_VSXU); \
201
gen_set_access_type(ctx, ACCESS_INT); \
202
EA = tcg_temp_new(); \
203
gen_addr_reg_index(ctx, EA); \
204
gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
208
VSX_STORE_SCALAR(stxsdx, st64_i64)
210
VSX_STORE_SCALAR(stxsibx, st8_i64)
211
VSX_STORE_SCALAR(stxsihx, st16_i64)
212
VSX_STORE_SCALAR(stxsiwx, st32_i64)
213
VSX_STORE_SCALAR(stxsspx, st32fs)
215
static void gen_stxvd2x(DisasContext *ctx)
218
if (unlikely(!ctx->vsx_enabled)) {
219
gen_exception(ctx, POWERPC_EXCP_VSXU);
222
gen_set_access_type(ctx, ACCESS_INT);
224
gen_addr_reg_index(ctx, EA);
225
gen_qemu_st64_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
226
tcg_gen_addi_tl(EA, EA, 8);
227
gen_qemu_st64_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
231
static void gen_stxvw4x(DisasContext *ctx)
233
TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
234
TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
236
if (unlikely(!ctx->vsx_enabled)) {
237
gen_exception(ctx, POWERPC_EXCP_VSXU);
240
gen_set_access_type(ctx, ACCESS_INT);
242
gen_addr_reg_index(ctx, EA);
244
TCGv_i64 t0 = tcg_temp_new_i64();
245
TCGv_i64 t1 = tcg_temp_new_i64();
247
tcg_gen_shri_i64(t0, xsh, 32);
248
tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
249
tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
250
tcg_gen_addi_tl(EA, EA, 8);
251
tcg_gen_shri_i64(t0, xsl, 32);
252
tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
253
tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
254
tcg_temp_free_i64(t0);
255
tcg_temp_free_i64(t1);
257
tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
258
tcg_gen_addi_tl(EA, EA, 8);
259
tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
264
static void gen_stxvh8x(DisasContext *ctx)
266
TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
267
TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
270
if (unlikely(!ctx->vsx_enabled)) {
271
gen_exception(ctx, POWERPC_EXCP_VSXU);
274
gen_set_access_type(ctx, ACCESS_INT);
276
gen_addr_reg_index(ctx, EA);
278
TCGv_i64 outh = tcg_temp_new_i64();
279
TCGv_i64 outl = tcg_temp_new_i64();
281
gen_bswap16x8(outh, outl, xsh, xsl);
282
tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
283
tcg_gen_addi_tl(EA, EA, 8);
284
tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
285
tcg_temp_free_i64(outh);
286
tcg_temp_free_i64(outl);
288
tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
289
tcg_gen_addi_tl(EA, EA, 8);
290
tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
295
static void gen_stxvb16x(DisasContext *ctx)
297
TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
298
TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
301
if (unlikely(!ctx->vsx_enabled)) {
302
gen_exception(ctx, POWERPC_EXCP_VSXU);
305
gen_set_access_type(ctx, ACCESS_INT);
307
gen_addr_reg_index(ctx, EA);
308
tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
309
tcg_gen_addi_tl(EA, EA, 8);
310
tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
314
#define MV_VSRW(name, tcgop1, tcgop2, target, source) \
315
static void gen_##name(DisasContext *ctx) \
317
if (xS(ctx->opcode) < 32) { \
318
if (unlikely(!ctx->fpu_enabled)) { \
319
gen_exception(ctx, POWERPC_EXCP_FPU); \
323
if (unlikely(!ctx->altivec_enabled)) { \
324
gen_exception(ctx, POWERPC_EXCP_VPU); \
328
TCGv_i64 tmp = tcg_temp_new_i64(); \
329
tcg_gen_##tcgop1(tmp, source); \
330
tcg_gen_##tcgop2(target, tmp); \
331
tcg_temp_free_i64(tmp); \
335
MV_VSRW(mfvsrwz, ext32u_i64, trunc_i64_tl, cpu_gpr[rA(ctx->opcode)], \
336
cpu_vsrh(xS(ctx->opcode)))
337
MV_VSRW(mtvsrwa, extu_tl_i64, ext32s_i64, cpu_vsrh(xT(ctx->opcode)), \
338
cpu_gpr[rA(ctx->opcode)])
339
MV_VSRW(mtvsrwz, extu_tl_i64, ext32u_i64, cpu_vsrh(xT(ctx->opcode)), \
340
cpu_gpr[rA(ctx->opcode)])
342
#if defined(TARGET_PPC64)
343
#define MV_VSRD(name, target, source) \
344
static void gen_##name(DisasContext *ctx) \
346
if (xS(ctx->opcode) < 32) { \
347
if (unlikely(!ctx->fpu_enabled)) { \
348
gen_exception(ctx, POWERPC_EXCP_FPU); \
352
if (unlikely(!ctx->altivec_enabled)) { \
353
gen_exception(ctx, POWERPC_EXCP_VPU); \
357
tcg_gen_mov_i64(target, source); \
360
MV_VSRD(mfvsrd, cpu_gpr[rA(ctx->opcode)], cpu_vsrh(xS(ctx->opcode)))
361
MV_VSRD(mtvsrd, cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)])
363
static void gen_mfvsrld(DisasContext *ctx)
365
if (xS(ctx->opcode) < 32) {
366
if (unlikely(!ctx->vsx_enabled)) {
367
gen_exception(ctx, POWERPC_EXCP_VSXU);
371
if (unlikely(!ctx->altivec_enabled)) {
372
gen_exception(ctx, POWERPC_EXCP_VPU);
377
tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], cpu_vsrl(xS(ctx->opcode)));
380
static void gen_mtvsrdd(DisasContext *ctx)
382
if (xT(ctx->opcode) < 32) {
383
if (unlikely(!ctx->vsx_enabled)) {
384
gen_exception(ctx, POWERPC_EXCP_VSXU);
388
if (unlikely(!ctx->altivec_enabled)) {
389
gen_exception(ctx, POWERPC_EXCP_VPU);
394
if (!rA(ctx->opcode)) {
395
tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), 0);
397
tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)]);
400
tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rB(ctx->opcode)]);
403
static void gen_mtvsrws(DisasContext *ctx)
405
if (xT(ctx->opcode) < 32) {
406
if (unlikely(!ctx->vsx_enabled)) {
407
gen_exception(ctx, POWERPC_EXCP_VSXU);
411
if (unlikely(!ctx->altivec_enabled)) {
412
gen_exception(ctx, POWERPC_EXCP_VPU);
417
tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)],
418
cpu_gpr[rA(ctx->opcode)], 32, 32);
419
tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xT(ctx->opcode)));
424
static void gen_xxpermdi(DisasContext *ctx)
426
if (unlikely(!ctx->vsx_enabled)) {
427
gen_exception(ctx, POWERPC_EXCP_VSXU);
431
if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
432
(xT(ctx->opcode) == xB(ctx->opcode)))) {
435
xh = tcg_temp_new_i64();
436
xl = tcg_temp_new_i64();
438
if ((DM(ctx->opcode) & 2) == 0) {
439
tcg_gen_mov_i64(xh, cpu_vsrh(xA(ctx->opcode)));
441
tcg_gen_mov_i64(xh, cpu_vsrl(xA(ctx->opcode)));
443
if ((DM(ctx->opcode) & 1) == 0) {
444
tcg_gen_mov_i64(xl, cpu_vsrh(xB(ctx->opcode)));
446
tcg_gen_mov_i64(xl, cpu_vsrl(xB(ctx->opcode)));
449
tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xh);
450
tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xl);
452
tcg_temp_free_i64(xh);
453
tcg_temp_free_i64(xl);
455
if ((DM(ctx->opcode) & 2) == 0) {
456
tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)));
458
tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)));
460
if ((DM(ctx->opcode) & 1) == 0) {
461
tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xB(ctx->opcode)));
463
tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xB(ctx->opcode)));
472
#define SGN_MASK_DP 0x8000000000000000ull
473
#define SGN_MASK_SP 0x8000000080000000ull
475
#define VSX_SCALAR_MOVE(name, op, sgn_mask) \
476
static void glue(gen_, name)(DisasContext * ctx) \
479
if (unlikely(!ctx->vsx_enabled)) { \
480
gen_exception(ctx, POWERPC_EXCP_VSXU); \
483
xb = tcg_temp_new_i64(); \
484
sgm = tcg_temp_new_i64(); \
485
tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
486
tcg_gen_movi_i64(sgm, sgn_mask); \
489
tcg_gen_andc_i64(xb, xb, sgm); \
493
tcg_gen_or_i64(xb, xb, sgm); \
497
tcg_gen_xor_i64(xb, xb, sgm); \
501
TCGv_i64 xa = tcg_temp_new_i64(); \
502
tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
503
tcg_gen_and_i64(xa, xa, sgm); \
504
tcg_gen_andc_i64(xb, xb, sgm); \
505
tcg_gen_or_i64(xb, xb, xa); \
506
tcg_temp_free_i64(xa); \
510
tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
511
tcg_temp_free_i64(xb); \
512
tcg_temp_free_i64(sgm); \
515
VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
516
VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
517
VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
518
VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
520
#define VSX_VECTOR_MOVE(name, op, sgn_mask) \
521
static void glue(gen_, name)(DisasContext * ctx) \
523
TCGv_i64 xbh, xbl, sgm; \
524
if (unlikely(!ctx->vsx_enabled)) { \
525
gen_exception(ctx, POWERPC_EXCP_VSXU); \
528
xbh = tcg_temp_new_i64(); \
529
xbl = tcg_temp_new_i64(); \
530
sgm = tcg_temp_new_i64(); \
531
tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
532
tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
533
tcg_gen_movi_i64(sgm, sgn_mask); \
536
tcg_gen_andc_i64(xbh, xbh, sgm); \
537
tcg_gen_andc_i64(xbl, xbl, sgm); \
541
tcg_gen_or_i64(xbh, xbh, sgm); \
542
tcg_gen_or_i64(xbl, xbl, sgm); \
546
tcg_gen_xor_i64(xbh, xbh, sgm); \
547
tcg_gen_xor_i64(xbl, xbl, sgm); \
551
TCGv_i64 xah = tcg_temp_new_i64(); \
552
TCGv_i64 xal = tcg_temp_new_i64(); \
553
tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
554
tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
555
tcg_gen_and_i64(xah, xah, sgm); \
556
tcg_gen_and_i64(xal, xal, sgm); \
557
tcg_gen_andc_i64(xbh, xbh, sgm); \
558
tcg_gen_andc_i64(xbl, xbl, sgm); \
559
tcg_gen_or_i64(xbh, xbh, xah); \
560
tcg_gen_or_i64(xbl, xbl, xal); \
561
tcg_temp_free_i64(xah); \
562
tcg_temp_free_i64(xal); \
566
tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
567
tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
568
tcg_temp_free_i64(xbh); \
569
tcg_temp_free_i64(xbl); \
570
tcg_temp_free_i64(sgm); \
573
VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
574
VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
575
VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
576
VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
577
VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
578
VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
579
VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
580
VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
582
#define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
583
static void gen_##name(DisasContext * ctx) \
586
if (unlikely(!ctx->vsx_enabled)) { \
587
gen_exception(ctx, POWERPC_EXCP_VSXU); \
590
opc = tcg_const_i32(ctx->opcode); \
591
gen_helper_##name(cpu_env, opc); \
592
tcg_temp_free_i32(opc); \
595
#define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
596
static void gen_##name(DisasContext * ctx) \
598
if (unlikely(!ctx->vsx_enabled)) { \
599
gen_exception(ctx, POWERPC_EXCP_VSXU); \
602
gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env, \
603
cpu_vsrh(xB(ctx->opcode))); \
606
GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
607
GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
608
GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
609
GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
610
GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
611
GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
612
GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
613
GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
614
GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
615
GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
616
GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
617
GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
618
GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
619
GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
620
GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
621
GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
622
GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
623
GEN_VSX_HELPER_2(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
624
GEN_VSX_HELPER_2(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
625
GEN_VSX_HELPER_2(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
626
GEN_VSX_HELPER_2(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
627
GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
628
GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
629
GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
630
GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
631
GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
632
GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
633
GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
634
GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
635
GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
636
GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
637
GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
638
GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
639
GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
640
GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
641
GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
642
GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
643
GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
644
GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
645
GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
646
GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
648
GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
649
GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
650
GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
651
GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
652
GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
653
GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
654
GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
655
GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
656
GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
657
GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
658
GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
659
GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
660
GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
661
GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
662
GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
663
GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
664
GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
666
GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
667
GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
668
GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
669
GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
670
GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
671
GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
672
GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
673
GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
674
GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
675
GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
676
GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
677
GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
678
GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
679
GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
680
GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
681
GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
682
GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
683
GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
684
GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
685
GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
686
GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
687
GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
688
GEN_VSX_HELPER_2(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
689
GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
690
GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
691
GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
692
GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
693
GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
694
GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
695
GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
696
GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
697
GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
698
GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
699
GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
700
GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
701
GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
702
GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
704
GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
705
GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
706
GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
707
GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
708
GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
709
GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
710
GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
711
GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
712
GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
713
GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
714
GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
715
GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
716
GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
717
GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
718
GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
719
GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
720
GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
721
GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
722
GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
723
GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
724
GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
725
GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
726
GEN_VSX_HELPER_2(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
727
GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
728
GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
729
GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
730
GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
731
GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
732
GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
733
GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
734
GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
735
GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
736
GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
737
GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
738
GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
739
GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
740
GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
742
static void gen_xxbrd(DisasContext *ctx)
744
TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
745
TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
746
TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
747
TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
749
if (unlikely(!ctx->vsx_enabled)) {
750
gen_exception(ctx, POWERPC_EXCP_VSXU);
753
tcg_gen_bswap64_i64(xth, xbh);
754
tcg_gen_bswap64_i64(xtl, xbl);
757
static void gen_xxbrh(DisasContext *ctx)
759
TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
760
TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
761
TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
762
TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
764
if (unlikely(!ctx->vsx_enabled)) {
765
gen_exception(ctx, POWERPC_EXCP_VSXU);
768
gen_bswap16x8(xth, xtl, xbh, xbl);
771
static void gen_xxbrq(DisasContext *ctx)
773
TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
774
TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
775
TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
776
TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
777
TCGv_i64 t0 = tcg_temp_new_i64();
779
if (unlikely(!ctx->vsx_enabled)) {
780
gen_exception(ctx, POWERPC_EXCP_VSXU);
783
tcg_gen_bswap64_i64(t0, xbl);
784
tcg_gen_bswap64_i64(xtl, xbh);
785
tcg_gen_mov_i64(xth, t0);
786
tcg_temp_free_i64(t0);
789
static void gen_xxbrw(DisasContext *ctx)
791
TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
792
TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
793
TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
794
TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
796
if (unlikely(!ctx->vsx_enabled)) {
797
gen_exception(ctx, POWERPC_EXCP_VSXU);
800
gen_bswap32x4(xth, xtl, xbh, xbl);
803
#define VSX_LOGICAL(name, tcg_op) \
804
static void glue(gen_, name)(DisasContext * ctx) \
806
if (unlikely(!ctx->vsx_enabled)) { \
807
gen_exception(ctx, POWERPC_EXCP_VSXU); \
810
tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
811
cpu_vsrh(xB(ctx->opcode))); \
812
tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
813
cpu_vsrl(xB(ctx->opcode))); \
816
VSX_LOGICAL(xxland, tcg_gen_and_i64)
817
VSX_LOGICAL(xxlandc, tcg_gen_andc_i64)
818
VSX_LOGICAL(xxlor, tcg_gen_or_i64)
819
VSX_LOGICAL(xxlxor, tcg_gen_xor_i64)
820
VSX_LOGICAL(xxlnor, tcg_gen_nor_i64)
821
VSX_LOGICAL(xxleqv, tcg_gen_eqv_i64)
822
VSX_LOGICAL(xxlnand, tcg_gen_nand_i64)
823
VSX_LOGICAL(xxlorc, tcg_gen_orc_i64)
825
#define VSX_XXMRG(name, high) \
826
static void glue(gen_, name)(DisasContext * ctx) \
828
TCGv_i64 a0, a1, b0, b1; \
829
if (unlikely(!ctx->vsx_enabled)) { \
830
gen_exception(ctx, POWERPC_EXCP_VSXU); \
833
a0 = tcg_temp_new_i64(); \
834
a1 = tcg_temp_new_i64(); \
835
b0 = tcg_temp_new_i64(); \
836
b1 = tcg_temp_new_i64(); \
838
tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
839
tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
840
tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
841
tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
843
tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
844
tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
845
tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
846
tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
848
tcg_gen_shri_i64(a0, a0, 32); \
849
tcg_gen_shri_i64(b0, b0, 32); \
850
tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \
852
tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
854
tcg_temp_free_i64(a0); \
855
tcg_temp_free_i64(a1); \
856
tcg_temp_free_i64(b0); \
857
tcg_temp_free_i64(b1); \
860
VSX_XXMRG(xxmrghw, 1)
861
VSX_XXMRG(xxmrglw, 0)
863
static void gen_xxsel(DisasContext * ctx)
866
if (unlikely(!ctx->vsx_enabled)) {
867
gen_exception(ctx, POWERPC_EXCP_VSXU);
870
a = tcg_temp_new_i64();
871
b = tcg_temp_new_i64();
872
c = tcg_temp_new_i64();
874
tcg_gen_mov_i64(a, cpu_vsrh(xA(ctx->opcode)));
875
tcg_gen_mov_i64(b, cpu_vsrh(xB(ctx->opcode)));
876
tcg_gen_mov_i64(c, cpu_vsrh(xC(ctx->opcode)));
878
tcg_gen_and_i64(b, b, c);
879
tcg_gen_andc_i64(a, a, c);
880
tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), a, b);
882
tcg_gen_mov_i64(a, cpu_vsrl(xA(ctx->opcode)));
883
tcg_gen_mov_i64(b, cpu_vsrl(xB(ctx->opcode)));
884
tcg_gen_mov_i64(c, cpu_vsrl(xC(ctx->opcode)));
886
tcg_gen_and_i64(b, b, c);
887
tcg_gen_andc_i64(a, a, c);
888
tcg_gen_or_i64(cpu_vsrl(xT(ctx->opcode)), a, b);
890
tcg_temp_free_i64(a);
891
tcg_temp_free_i64(b);
892
tcg_temp_free_i64(c);
895
static void gen_xxspltw(DisasContext *ctx)
898
TCGv_i64 vsr = (UIM(ctx->opcode) & 2) ?
899
cpu_vsrl(xB(ctx->opcode)) :
900
cpu_vsrh(xB(ctx->opcode));
902
if (unlikely(!ctx->vsx_enabled)) {
903
gen_exception(ctx, POWERPC_EXCP_VSXU);
907
b = tcg_temp_new_i64();
908
b2 = tcg_temp_new_i64();
910
if (UIM(ctx->opcode) & 1) {
911
tcg_gen_ext32u_i64(b, vsr);
913
tcg_gen_shri_i64(b, vsr, 32);
916
tcg_gen_shli_i64(b2, b, 32);
917
tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), b, b2);
918
tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
920
tcg_temp_free_i64(b);
921
tcg_temp_free_i64(b2);
924
#define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
926
static void gen_xxspltib(DisasContext *ctx)
928
unsigned char uim8 = IMM8(ctx->opcode);
929
if (xS(ctx->opcode) < 32) {
930
if (unlikely(!ctx->altivec_enabled)) {
931
gen_exception(ctx, POWERPC_EXCP_VPU);
935
if (unlikely(!ctx->vsx_enabled)) {
936
gen_exception(ctx, POWERPC_EXCP_VSXU);
940
tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), pattern(uim8));
941
tcg_gen_movi_i64(cpu_vsrl(xT(ctx->opcode)), pattern(uim8));
944
static void gen_xxsldwi(DisasContext *ctx)
947
if (unlikely(!ctx->vsx_enabled)) {
948
gen_exception(ctx, POWERPC_EXCP_VSXU);
951
xth = tcg_temp_new_i64();
952
xtl = tcg_temp_new_i64();
954
switch (SHW(ctx->opcode)) {
956
tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
957
tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
961
TCGv_i64 t0 = tcg_temp_new_i64();
962
tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
963
tcg_gen_shli_i64(xth, xth, 32);
964
tcg_gen_mov_i64(t0, cpu_vsrl(xA(ctx->opcode)));
965
tcg_gen_shri_i64(t0, t0, 32);
966
tcg_gen_or_i64(xth, xth, t0);
967
tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
968
tcg_gen_shli_i64(xtl, xtl, 32);
969
tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
970
tcg_gen_shri_i64(t0, t0, 32);
971
tcg_gen_or_i64(xtl, xtl, t0);
972
tcg_temp_free_i64(t0);
976
tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
977
tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
981
TCGv_i64 t0 = tcg_temp_new_i64();
982
tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
983
tcg_gen_shli_i64(xth, xth, 32);
984
tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
985
tcg_gen_shri_i64(t0, t0, 32);
986
tcg_gen_or_i64(xth, xth, t0);
987
tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
988
tcg_gen_shli_i64(xtl, xtl, 32);
989
tcg_gen_mov_i64(t0, cpu_vsrl(xB(ctx->opcode)));
990
tcg_gen_shri_i64(t0, t0, 32);
991
tcg_gen_or_i64(xtl, xtl, t0);
992
tcg_temp_free_i64(t0);
997
tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xth);
998
tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xtl);
1000
tcg_temp_free_i64(xth);
1001
tcg_temp_free_i64(xtl);
1007
#undef GEN_XX3_RC_FORM
1008
#undef GEN_XX3FORM_DM