4
* Copyright (c) 2013 Alexander Graf <agraf@suse.de>
6
* This library is free software; you can redistribute it and/or
7
* modify it under the terms of the GNU Lesser General Public
8
* License as published by the Free Software Foundation; either
9
* version 2 of the License, or (at your option) any later version.
11
* This library is distributed in the hope that it will be useful,
12
* but WITHOUT ANY WARRANTY; without even the implied warranty of
13
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14
* Lesser General Public License for more details.
16
* You should have received a copy of the GNU Lesser General Public
17
* License along with this library; if not, see <http://www.gnu.org/licenses/>.
28
#include "translate.h"
29
#include "qemu/host-utils.h"
31
#include "exec/gen-icount.h"
37
static TCGv_i64 cpu_X[32];
38
static TCGv_i64 cpu_pc;
39
static TCGv_i32 pstate;
41
static const char *regnames[] = {
42
"x0", "x1", "x2", "x3", "x4", "x5", "x6", "x7",
43
"x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15",
44
"x16", "x17", "x18", "x19", "x20", "x21", "x22", "x23",
45
"x24", "x25", "x26", "x27", "x28", "x29", "lr", "sp"
48
/* initialize TCG globals. */
49
void a64_translate_init(void)
53
cpu_pc = tcg_global_mem_new_i64(TCG_AREG0,
54
offsetof(CPUARMState, pc),
56
for (i = 0; i < 32; i++) {
57
cpu_X[i] = tcg_global_mem_new_i64(TCG_AREG0,
58
offsetof(CPUARMState, xregs[i]),
62
pstate = tcg_global_mem_new_i32(TCG_AREG0,
63
offsetof(CPUARMState, pstate),
67
void aarch64_cpu_dump_state(CPUState *cs, FILE *f,
68
fprintf_function cpu_fprintf, int flags)
70
ARMCPU *cpu = ARM_CPU(cs);
71
CPUARMState *env = &cpu->env;
72
uint32_t psr = pstate_read(env);
75
cpu_fprintf(f, "PC=%016"PRIx64" SP=%016"PRIx64"\n",
76
env->pc, env->xregs[31]);
77
for (i = 0; i < 31; i++) {
78
cpu_fprintf(f, "X%02d=%016"PRIx64, i, env->xregs[i]);
85
cpu_fprintf(f, "PSTATE=%08x (flags %c%c%c%c)\n",
87
psr & PSTATE_N ? 'N' : '-',
88
psr & PSTATE_Z ? 'Z' : '-',
89
psr & PSTATE_C ? 'C' : '-',
90
psr & PSTATE_V ? 'V' : '-');
94
void gen_a64_set_pc_im(uint64_t val)
96
tcg_gen_movi_i64(cpu_pc, val);
99
static void gen_exception(int excp)
101
TCGv_i32 tmp = tcg_temp_new_i32();
102
tcg_gen_movi_i32(tmp, excp);
103
gen_helper_exception(cpu_env, tmp);
104
tcg_temp_free_i32(tmp);
107
static void gen_exception_insn(DisasContext *s, int offset, int excp)
109
gen_a64_set_pc_im(s->pc - offset);
111
s->is_jmp = DISAS_EXC;
114
static inline bool use_goto_tb(DisasContext *s, int n, uint64_t dest)
116
/* No direct tb linking with singlestep or deterministic io */
117
if (s->singlestep_enabled || (s->tb->cflags & CF_LAST_IO)) {
121
/* Only link tbs from inside the same guest page */
122
if ((s->tb->pc & TARGET_PAGE_MASK) != (dest & TARGET_PAGE_MASK)) {
129
static inline void gen_goto_tb(DisasContext *s, int n, uint64_t dest)
131
TranslationBlock *tb;
134
if (use_goto_tb(s, n, dest)) {
136
gen_a64_set_pc_im(dest);
137
tcg_gen_exit_tb((tcg_target_long)tb + n);
138
s->is_jmp = DISAS_TB_JUMP;
140
gen_a64_set_pc_im(dest);
141
if (s->singlestep_enabled) {
142
gen_exception(EXCP_DEBUG);
145
s->is_jmp = DISAS_JUMP;
149
static void unallocated_encoding(DisasContext *s)
151
gen_exception_insn(s, 4, EXCP_UDEF);
154
#define unsupported_encoding(s, insn) \
156
qemu_log_mask(LOG_UNIMP, \
157
"%s:%d: unsupported instruction encoding 0x%08x " \
158
"at pc=%016" PRIx64 "\n", \
159
__FILE__, __LINE__, insn, s->pc - 4); \
160
unallocated_encoding(s); \
163
static void init_tmp_a64_array(DisasContext *s)
165
#ifdef CONFIG_DEBUG_TCG
167
for (i = 0; i < ARRAY_SIZE(s->tmp_a64); i++) {
168
TCGV_UNUSED_I64(s->tmp_a64[i]);
171
s->tmp_a64_count = 0;
174
static void free_tmp_a64(DisasContext *s)
177
for (i = 0; i < s->tmp_a64_count; i++) {
178
tcg_temp_free_i64(s->tmp_a64[i]);
180
init_tmp_a64_array(s);
183
static TCGv_i64 new_tmp_a64(DisasContext *s)
185
assert(s->tmp_a64_count < TMP_A64_MAX);
186
return s->tmp_a64[s->tmp_a64_count++] = tcg_temp_new_i64();
189
static TCGv_i64 new_tmp_a64_zero(DisasContext *s)
191
TCGv_i64 t = new_tmp_a64(s);
192
tcg_gen_movi_i64(t, 0);
196
static TCGv_i64 cpu_reg(DisasContext *s, int reg)
199
return new_tmp_a64_zero(s);
205
/* read a cpu register in 32bit/64bit mode. Returns a TCGv_i64
206
* representing the register contents. This TCGv is an auto-freed
207
* temporary so it need not be explicitly freed, and may be modified.
209
static TCGv_i64 read_cpu_reg(DisasContext *s, int reg, int sf)
211
TCGv_i64 v = new_tmp_a64(s);
214
tcg_gen_mov_i64(v, cpu_X[reg]);
216
tcg_gen_ext32u_i64(v, cpu_X[reg]);
219
tcg_gen_movi_i64(v, 0);
225
* the instruction disassembly implemented here matches
226
* the instruction encoding classifications in chapter 3 (C3)
227
* of the ARM Architecture Reference Manual (DDI0487A_a)
230
/* C3.2.7 Unconditional branch (immediate)
232
* +----+-----------+-------------------------------------+
233
* | op | 0 0 1 0 1 | imm26 |
234
* +----+-----------+-------------------------------------+
236
static void disas_uncond_b_imm(DisasContext *s, uint32_t insn)
238
uint64_t addr = s->pc + sextract32(insn, 0, 26) * 4 - 4;
240
if (insn & (1 << 31)) {
241
/* C5.6.26 BL Branch with link */
242
tcg_gen_movi_i64(cpu_reg(s, 30), s->pc);
245
/* C5.6.20 B Branch / C5.6.26 BL Branch with link */
246
gen_goto_tb(s, 0, addr);
249
/* C3.2.1 Compare & branch (immediate)
250
* 31 30 25 24 23 5 4 0
251
* +----+-------------+----+---------------------+--------+
252
* | sf | 0 1 1 0 1 0 | op | imm19 | Rt |
253
* +----+-------------+----+---------------------+--------+
255
static void disas_comp_b_imm(DisasContext *s, uint32_t insn)
257
unsigned int sf, op, rt;
262
sf = extract32(insn, 31, 1);
263
op = extract32(insn, 24, 1); /* 0: CBZ; 1: CBNZ */
264
rt = extract32(insn, 0, 5);
265
addr = s->pc + sextract32(insn, 5, 19) * 4 - 4;
267
tcg_cmp = read_cpu_reg(s, rt, sf);
268
label_match = gen_new_label();
270
tcg_gen_brcondi_i64(op ? TCG_COND_NE : TCG_COND_EQ,
271
tcg_cmp, 0, label_match);
273
gen_goto_tb(s, 0, s->pc);
274
gen_set_label(label_match);
275
gen_goto_tb(s, 1, addr);
278
/* C3.2.5 Test & branch (immediate)
279
* 31 30 25 24 23 19 18 5 4 0
280
* +----+-------------+----+-------+-------------+------+
281
* | b5 | 0 1 1 0 1 1 | op | b40 | imm14 | Rt |
282
* +----+-------------+----+-------+-------------+------+
284
static void disas_test_b_imm(DisasContext *s, uint32_t insn)
286
unsigned int bit_pos, op, rt;
291
bit_pos = (extract32(insn, 31, 1) << 5) | extract32(insn, 19, 5);
292
op = extract32(insn, 24, 1); /* 0: TBZ; 1: TBNZ */
293
addr = s->pc + sextract32(insn, 5, 14) * 4 - 4;
294
rt = extract32(insn, 0, 5);
296
tcg_cmp = tcg_temp_new_i64();
297
tcg_gen_andi_i64(tcg_cmp, cpu_reg(s, rt), (1ULL << bit_pos));
298
label_match = gen_new_label();
299
tcg_gen_brcondi_i64(op ? TCG_COND_NE : TCG_COND_EQ,
300
tcg_cmp, 0, label_match);
301
tcg_temp_free_i64(tcg_cmp);
302
gen_goto_tb(s, 0, s->pc);
303
gen_set_label(label_match);
304
gen_goto_tb(s, 1, addr);
307
/* C3.2.2 / C5.6.19 Conditional branch (immediate)
308
* 31 25 24 23 5 4 3 0
309
* +---------------+----+---------------------+----+------+
310
* | 0 1 0 1 0 1 0 | o1 | imm19 | o0 | cond |
311
* +---------------+----+---------------------+----+------+
313
static void disas_cond_b_imm(DisasContext *s, uint32_t insn)
318
if ((insn & (1 << 4)) || (insn & (1 << 24))) {
319
unallocated_encoding(s);
322
addr = s->pc + sextract32(insn, 5, 19) * 4 - 4;
323
cond = extract32(insn, 0, 4);
326
/* genuinely conditional branches */
327
int label_match = gen_new_label();
328
arm_gen_test_cc(cond, label_match);
329
gen_goto_tb(s, 0, s->pc);
330
gen_set_label(label_match);
331
gen_goto_tb(s, 1, addr);
333
/* 0xe and 0xf are both "always" conditions */
334
gen_goto_tb(s, 0, addr);
339
static void handle_hint(DisasContext *s, uint32_t insn,
340
unsigned int op1, unsigned int op2, unsigned int crm)
342
unsigned int selector = crm << 3 | op2;
345
unallocated_encoding(s);
357
/* we treat all as NOP at least for now */
360
/* default specified as NOP equivalent */
365
/* CLREX, DSB, DMB, ISB */
366
static void handle_sync(DisasContext *s, uint32_t insn,
367
unsigned int op1, unsigned int op2, unsigned int crm)
370
unallocated_encoding(s);
376
unsupported_encoding(s, insn);
381
/* We don't emulate caches so barriers are no-ops */
384
unallocated_encoding(s);
389
/* C5.6.130 MSR (immediate) - move immediate to processor state field */
390
static void handle_msr_i(DisasContext *s, uint32_t insn,
391
unsigned int op1, unsigned int op2, unsigned int crm)
393
unsupported_encoding(s, insn);
397
static void handle_sys(DisasContext *s, uint32_t insn, unsigned int l,
398
unsigned int op1, unsigned int op2,
399
unsigned int crn, unsigned int crm, unsigned int rt)
401
unsupported_encoding(s, insn);
404
/* C5.6.129 MRS - move from system register */
405
static void handle_mrs(DisasContext *s, uint32_t insn, unsigned int op0,
406
unsigned int op1, unsigned int op2,
407
unsigned int crn, unsigned int crm, unsigned int rt)
409
unsupported_encoding(s, insn);
412
/* C5.6.131 MSR (register) - move to system register */
413
static void handle_msr(DisasContext *s, uint32_t insn, unsigned int op0,
414
unsigned int op1, unsigned int op2,
415
unsigned int crn, unsigned int crm, unsigned int rt)
417
unsupported_encoding(s, insn);
421
* 31 22 21 20 19 18 16 15 12 11 8 7 5 4 0
422
* +---------------------+---+-----+-----+-------+-------+-----+------+
423
* | 1 1 0 1 0 1 0 1 0 0 | L | op0 | op1 | CRn | CRm | op2 | Rt |
424
* +---------------------+---+-----+-----+-------+-------+-----+------+
426
static void disas_system(DisasContext *s, uint32_t insn)
428
unsigned int l, op0, op1, crn, crm, op2, rt;
429
l = extract32(insn, 21, 1);
430
op0 = extract32(insn, 19, 2);
431
op1 = extract32(insn, 16, 3);
432
crn = extract32(insn, 12, 4);
433
crm = extract32(insn, 8, 4);
434
op2 = extract32(insn, 5, 3);
435
rt = extract32(insn, 0, 5);
439
unallocated_encoding(s);
443
case 2: /* C5.6.68 HINT */
444
handle_hint(s, insn, op1, op2, crm);
446
case 3: /* CLREX, DSB, DMB, ISB */
447
handle_sync(s, insn, op1, op2, crm);
449
case 4: /* C5.6.130 MSR (immediate) */
450
handle_msr_i(s, insn, op1, op2, crm);
453
unallocated_encoding(s);
461
handle_sys(s, insn, l, op1, op2, crn, crm, rt);
462
} else if (l) { /* op0 > 1 */
463
/* C5.6.129 MRS - move from system register */
464
handle_mrs(s, insn, op0, op1, op2, crn, crm, rt);
466
/* C5.6.131 MSR (register) - move to system register */
467
handle_msr(s, insn, op0, op1, op2, crn, crm, rt);
471
/* Exception generation */
472
static void disas_exc(DisasContext *s, uint32_t insn)
474
unsupported_encoding(s, insn);
477
/* C3.2.7 Unconditional branch (register)
478
* 31 25 24 21 20 16 15 10 9 5 4 0
479
* +---------------+-------+-------+-------+------+-------+
480
* | 1 1 0 1 0 1 1 | opc | op2 | op3 | Rn | op4 |
481
* +---------------+-------+-------+-------+------+-------+
483
static void disas_uncond_b_reg(DisasContext *s, uint32_t insn)
485
unsigned int opc, op2, op3, rn, op4;
487
opc = extract32(insn, 21, 4);
488
op2 = extract32(insn, 16, 5);
489
op3 = extract32(insn, 10, 6);
490
rn = extract32(insn, 5, 5);
491
op4 = extract32(insn, 0, 5);
493
if (op4 != 0x0 || op3 != 0x0 || op2 != 0x1f) {
494
unallocated_encoding(s);
503
tcg_gen_movi_i64(cpu_reg(s, 30), s->pc);
508
unallocated_encoding(s);
510
unsupported_encoding(s, insn);
514
unallocated_encoding(s);
518
tcg_gen_mov_i64(cpu_pc, cpu_reg(s, rn));
519
s->is_jmp = DISAS_JUMP;
522
/* C3.2 Branches, exception generating and system instructions */
523
static void disas_b_exc_sys(DisasContext *s, uint32_t insn)
525
switch (extract32(insn, 25, 7)) {
526
case 0x0a: case 0x0b:
527
case 0x4a: case 0x4b: /* Unconditional branch (immediate) */
528
disas_uncond_b_imm(s, insn);
530
case 0x1a: case 0x5a: /* Compare & branch (immediate) */
531
disas_comp_b_imm(s, insn);
533
case 0x1b: case 0x5b: /* Test & branch (immediate) */
534
disas_test_b_imm(s, insn);
536
case 0x2a: /* Conditional branch (immediate) */
537
disas_cond_b_imm(s, insn);
539
case 0x6a: /* Exception generation / System */
540
if (insn & (1 << 24)) {
541
disas_system(s, insn);
546
case 0x6b: /* Unconditional branch (register) */
547
disas_uncond_b_reg(s, insn);
550
unallocated_encoding(s);
555
/* Load/store exclusive */
556
static void disas_ldst_excl(DisasContext *s, uint32_t insn)
558
unsupported_encoding(s, insn);
561
/* Load register (literal) */
562
static void disas_ld_lit(DisasContext *s, uint32_t insn)
564
unsupported_encoding(s, insn);
567
/* Load/store pair (all forms) */
568
static void disas_ldst_pair(DisasContext *s, uint32_t insn)
570
unsupported_encoding(s, insn);
573
/* Load/store register (all forms) */
574
static void disas_ldst_reg(DisasContext *s, uint32_t insn)
576
unsupported_encoding(s, insn);
579
/* AdvSIMD load/store multiple structures */
580
static void disas_ldst_multiple_struct(DisasContext *s, uint32_t insn)
582
unsupported_encoding(s, insn);
585
/* AdvSIMD load/store single structure */
586
static void disas_ldst_single_struct(DisasContext *s, uint32_t insn)
588
unsupported_encoding(s, insn);
591
/* C3.3 Loads and stores */
592
static void disas_ldst(DisasContext *s, uint32_t insn)
594
switch (extract32(insn, 24, 6)) {
595
case 0x08: /* Load/store exclusive */
596
disas_ldst_excl(s, insn);
598
case 0x18: case 0x1c: /* Load register (literal) */
599
disas_ld_lit(s, insn);
601
case 0x28: case 0x29:
602
case 0x2c: case 0x2d: /* Load/store pair (all forms) */
603
disas_ldst_pair(s, insn);
605
case 0x38: case 0x39:
606
case 0x3c: case 0x3d: /* Load/store register (all forms) */
607
disas_ldst_reg(s, insn);
609
case 0x0c: /* AdvSIMD load/store multiple structures */
610
disas_ldst_multiple_struct(s, insn);
612
case 0x0d: /* AdvSIMD load/store single structure */
613
disas_ldst_single_struct(s, insn);
616
unallocated_encoding(s);
621
/* PC-rel. addressing */
622
static void disas_pc_rel_adr(DisasContext *s, uint32_t insn)
624
unsupported_encoding(s, insn);
627
/* Add/subtract (immediate) */
628
static void disas_add_sub_imm(DisasContext *s, uint32_t insn)
630
unsupported_encoding(s, insn);
633
/* Logical (immediate) */
634
static void disas_logic_imm(DisasContext *s, uint32_t insn)
636
unsupported_encoding(s, insn);
639
/* Move wide (immediate) */
640
static void disas_movw_imm(DisasContext *s, uint32_t insn)
642
unsupported_encoding(s, insn);
646
static void disas_bitfield(DisasContext *s, uint32_t insn)
648
unsupported_encoding(s, insn);
652
static void disas_extract(DisasContext *s, uint32_t insn)
654
unsupported_encoding(s, insn);
657
/* C3.4 Data processing - immediate */
658
static void disas_data_proc_imm(DisasContext *s, uint32_t insn)
660
switch (extract32(insn, 23, 6)) {
661
case 0x20: case 0x21: /* PC-rel. addressing */
662
disas_pc_rel_adr(s, insn);
664
case 0x22: case 0x23: /* Add/subtract (immediate) */
665
disas_add_sub_imm(s, insn);
667
case 0x24: /* Logical (immediate) */
668
disas_logic_imm(s, insn);
670
case 0x25: /* Move wide (immediate) */
671
disas_movw_imm(s, insn);
673
case 0x26: /* Bitfield */
674
disas_bitfield(s, insn);
676
case 0x27: /* Extract */
677
disas_extract(s, insn);
680
unallocated_encoding(s);
685
/* Logical (shifted register) */
686
static void disas_logic_reg(DisasContext *s, uint32_t insn)
688
unsupported_encoding(s, insn);
691
/* Add/subtract (extended register) */
692
static void disas_add_sub_ext_reg(DisasContext *s, uint32_t insn)
694
unsupported_encoding(s, insn);
697
/* Add/subtract (shifted register) */
698
static void disas_add_sub_reg(DisasContext *s, uint32_t insn)
700
unsupported_encoding(s, insn);
703
/* Data-processing (3 source) */
704
static void disas_data_proc_3src(DisasContext *s, uint32_t insn)
706
unsupported_encoding(s, insn);
709
/* Add/subtract (with carry) */
710
static void disas_adc_sbc(DisasContext *s, uint32_t insn)
712
unsupported_encoding(s, insn);
715
/* Conditional compare (immediate) */
716
static void disas_cc_imm(DisasContext *s, uint32_t insn)
718
unsupported_encoding(s, insn);
721
/* Conditional compare (register) */
722
static void disas_cc_reg(DisasContext *s, uint32_t insn)
724
unsupported_encoding(s, insn);
727
/* C3.5.6 Conditional select
728
* 31 30 29 28 21 20 16 15 12 11 10 9 5 4 0
729
* +----+----+---+-----------------+------+------+-----+------+------+
730
* | sf | op | S | 1 1 0 1 0 1 0 0 | Rm | cond | op2 | Rn | Rd |
731
* +----+----+---+-----------------+------+------+-----+------+------+
733
static void disas_cond_select(DisasContext *s, uint32_t insn)
735
unsigned int sf, else_inv, rm, cond, else_inc, rn, rd;
736
TCGv_i64 tcg_rd, tcg_src;
738
if (extract32(insn, 29, 1) || extract32(insn, 11, 1)) {
739
/* S == 1 or op2<1> == 1 */
740
unallocated_encoding(s);
743
sf = extract32(insn, 31, 1);
744
else_inv = extract32(insn, 30, 1);
745
rm = extract32(insn, 16, 5);
746
cond = extract32(insn, 12, 4);
747
else_inc = extract32(insn, 10, 1);
748
rn = extract32(insn, 5, 5);
749
rd = extract32(insn, 0, 5);
752
/* silly no-op write; until we use movcond we must special-case
753
* this to avoid a dead temporary across basic blocks.
758
tcg_rd = cpu_reg(s, rd);
760
if (cond >= 0x0e) { /* condition "always" */
761
tcg_src = read_cpu_reg(s, rn, sf);
762
tcg_gen_mov_i64(tcg_rd, tcg_src);
764
/* OPTME: we could use movcond here, at the cost of duplicating
765
* a lot of the arm_gen_test_cc() logic.
767
int label_match = gen_new_label();
768
int label_continue = gen_new_label();
770
arm_gen_test_cc(cond, label_match);
772
tcg_src = cpu_reg(s, rm);
774
if (else_inv && else_inc) {
775
tcg_gen_neg_i64(tcg_rd, tcg_src);
776
} else if (else_inv) {
777
tcg_gen_not_i64(tcg_rd, tcg_src);
778
} else if (else_inc) {
779
tcg_gen_addi_i64(tcg_rd, tcg_src, 1);
781
tcg_gen_mov_i64(tcg_rd, tcg_src);
784
tcg_gen_ext32u_i64(tcg_rd, tcg_rd);
786
tcg_gen_br(label_continue);
788
gen_set_label(label_match);
789
tcg_src = read_cpu_reg(s, rn, sf);
790
tcg_gen_mov_i64(tcg_rd, tcg_src);
792
gen_set_label(label_continue);
796
/* Data-processing (1 source) */
797
static void disas_data_proc_1src(DisasContext *s, uint32_t insn)
799
unsupported_encoding(s, insn);
802
/* Data-processing (2 source) */
803
static void disas_data_proc_2src(DisasContext *s, uint32_t insn)
805
unsupported_encoding(s, insn);
808
/* C3.5 Data processing - register */
809
static void disas_data_proc_reg(DisasContext *s, uint32_t insn)
811
switch (extract32(insn, 24, 5)) {
812
case 0x0a: /* Logical (shifted register) */
813
disas_logic_reg(s, insn);
815
case 0x0b: /* Add/subtract */
816
if (insn & (1 << 21)) { /* (extended register) */
817
disas_add_sub_ext_reg(s, insn);
819
disas_add_sub_reg(s, insn);
822
case 0x1b: /* Data-processing (3 source) */
823
disas_data_proc_3src(s, insn);
826
switch (extract32(insn, 21, 3)) {
827
case 0x0: /* Add/subtract (with carry) */
828
disas_adc_sbc(s, insn);
830
case 0x2: /* Conditional compare */
831
if (insn & (1 << 11)) { /* (immediate) */
832
disas_cc_imm(s, insn);
833
} else { /* (register) */
834
disas_cc_reg(s, insn);
837
case 0x4: /* Conditional select */
838
disas_cond_select(s, insn);
840
case 0x6: /* Data-processing */
841
if (insn & (1 << 30)) { /* (1 source) */
842
disas_data_proc_1src(s, insn);
843
} else { /* (2 source) */
844
disas_data_proc_2src(s, insn);
848
unallocated_encoding(s);
853
unallocated_encoding(s);
858
/* C3.6 Data processing - SIMD and floating point */
859
static void disas_data_proc_simd_fp(DisasContext *s, uint32_t insn)
861
unsupported_encoding(s, insn);
864
/* C3.1 A64 instruction index by encoding */
865
static void disas_a64_insn(CPUARMState *env, DisasContext *s)
869
insn = arm_ldl_code(env, s->pc, s->bswap_code);
873
switch (extract32(insn, 25, 4)) {
874
case 0x0: case 0x1: case 0x2: case 0x3: /* UNALLOCATED */
875
unallocated_encoding(s);
877
case 0x8: case 0x9: /* Data processing - immediate */
878
disas_data_proc_imm(s, insn);
880
case 0xa: case 0xb: /* Branch, exception generation and system insns */
881
disas_b_exc_sys(s, insn);
886
case 0xe: /* Loads and stores */
890
case 0xd: /* Data processing - register */
891
disas_data_proc_reg(s, insn);
894
case 0xf: /* Data processing - SIMD and floating point */
895
disas_data_proc_simd_fp(s, insn);
898
assert(FALSE); /* all 15 cases should be handled above */
902
/* if we allocated any temporaries, free them here */
906
void gen_intermediate_code_internal_a64(ARMCPU *cpu,
907
TranslationBlock *tb,
910
CPUState *cs = CPU(cpu);
911
CPUARMState *env = &cpu->env;
912
DisasContext dc1, *dc = &dc1;
914
uint16_t *gen_opc_end;
916
target_ulong pc_start;
917
target_ulong next_page_start;
925
gen_opc_end = tcg_ctx.gen_opc_buf + OPC_MAX_SIZE;
927
dc->is_jmp = DISAS_NEXT;
929
dc->singlestep_enabled = cs->singlestep_enabled;
935
dc->condexec_mask = 0;
936
dc->condexec_cond = 0;
937
#if !defined(CONFIG_USER_ONLY)
944
init_tmp_a64_array(dc);
946
next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
949
max_insns = tb->cflags & CF_COUNT_MASK;
950
if (max_insns == 0) {
951
max_insns = CF_COUNT_MASK;
956
tcg_clear_temp_count();
959
if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
960
QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
961
if (bp->pc == dc->pc) {
962
gen_exception_insn(dc, 0, EXCP_DEBUG);
963
/* Advance PC so that clearing the breakpoint will
964
invalidate this TB. */
966
goto done_generating;
972
j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
976
tcg_ctx.gen_opc_instr_start[lj++] = 0;
979
tcg_ctx.gen_opc_pc[lj] = dc->pc;
980
tcg_ctx.gen_opc_instr_start[lj] = 1;
981
tcg_ctx.gen_opc_icount[lj] = num_insns;
984
if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO)) {
988
if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
989
tcg_gen_debug_insn_start(dc->pc);
992
disas_a64_insn(env, dc);
994
if (tcg_check_temp_count()) {
995
fprintf(stderr, "TCG temporary leak before "TARGET_FMT_lx"\n",
999
/* Translation stops when a conditional branch is encountered.
1000
* Otherwise the subsequent code could get translated several times.
1001
* Also stop translation when a page boundary is reached. This
1002
* ensures prefetch aborts occur at the right place.
1005
} while (!dc->is_jmp && tcg_ctx.gen_opc_ptr < gen_opc_end &&
1006
!cs->singlestep_enabled &&
1008
dc->pc < next_page_start &&
1009
num_insns < max_insns);
1011
if (tb->cflags & CF_LAST_IO) {
1015
if (unlikely(cs->singlestep_enabled) && dc->is_jmp != DISAS_EXC) {
1016
/* Note that this means single stepping WFI doesn't halt the CPU.
1017
* For conditional branch insns this is harmless unreachable code as
1018
* gen_goto_tb() has already handled emitting the debug exception
1019
* (and thus a tb-jump is not possible when singlestepping).
1021
assert(dc->is_jmp != DISAS_TB_JUMP);
1022
if (dc->is_jmp != DISAS_JUMP) {
1023
gen_a64_set_pc_im(dc->pc);
1025
gen_exception(EXCP_DEBUG);
1027
switch (dc->is_jmp) {
1029
gen_goto_tb(dc, 1, dc->pc);
1034
/* indicate that the hash table must be used to find the next TB */
1042
/* This is a special case because we don't want to just halt the CPU
1043
* if trying to debug across a WFI.
1045
gen_helper_wfi(cpu_env);
1051
gen_tb_end(tb, num_insns);
1052
*tcg_ctx.gen_opc_ptr = INDEX_op_end;
1055
if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
1056
qemu_log("----------------\n");
1057
qemu_log("IN: %s\n", lookup_symbol(pc_start));
1058
log_target_disas(env, pc_start, dc->pc - pc_start,
1059
dc->thumb | (dc->bswap_code << 1));
1064
j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
1067
tcg_ctx.gen_opc_instr_start[lj++] = 0;
1070
tb->size = dc->pc - pc_start;
1071
tb->icount = num_insns;