654
839
if (bit (inst2, 12) == 0)
655
840
nextpc = nextpc & 0xfffffffc;
657
if (!skip_prologue_function (nextpc))
660
else if ((insn & 0xfe50) == 0xe800 /* stm{db,ia} Rn[!], { registers } */
661
&& pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
663
else if ((insn & 0xfe50) == 0xe840 /* strd Rt, Rt2, [Rn, #imm] */
664
&& pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
842
if (!skip_prologue_function (gdbarch, nextpc,
843
bit (inst2, 12) != 0))
847
else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!}, { registers } */
848
&& pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
850
pv_t addr = regs[bits (insn, 0, 3)];
853
if (pv_area_store_would_trash (stack, addr))
856
/* Calculate offsets of saved registers. */
857
for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
858
if (inst2 & (1 << regno))
860
addr = pv_add_constant (addr, -4);
861
pv_area_store (stack, addr, 4, regs[regno]);
865
regs[bits (insn, 0, 3)] = addr;
868
else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2, [Rn, #+/-imm]{!} */
869
&& pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
871
int regno1 = bits (inst2, 12, 15);
872
int regno2 = bits (inst2, 8, 11);
873
pv_t addr = regs[bits (insn, 0, 3)];
875
offset = inst2 & 0xff;
877
addr = pv_add_constant (addr, offset);
879
addr = pv_add_constant (addr, -offset);
881
if (pv_area_store_would_trash (stack, addr))
884
pv_area_store (stack, addr, 4, regs[regno1]);
885
pv_area_store (stack, pv_add_constant (addr, 4),
889
regs[bits (insn, 0, 3)] = addr;
892
else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
893
&& (inst2 & 0x0c00) == 0x0c00
894
&& pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
896
int regno = bits (inst2, 12, 15);
897
pv_t addr = regs[bits (insn, 0, 3)];
899
offset = inst2 & 0xff;
901
addr = pv_add_constant (addr, offset);
903
addr = pv_add_constant (addr, -offset);
905
if (pv_area_store_would_trash (stack, addr))
908
pv_area_store (stack, addr, 4, regs[regno]);
911
regs[bits (insn, 0, 3)] = addr;
914
else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
915
&& pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
917
int regno = bits (inst2, 12, 15);
920
offset = inst2 & 0xfff;
921
addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
923
if (pv_area_store_would_trash (stack, addr))
926
pv_area_store (stack, addr, 4, regs[regno]);
929
else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
930
&& pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
931
/* Ignore stores of argument registers to the stack. */
934
else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
935
&& (inst2 & 0x0d00) == 0x0c00
936
&& pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
937
/* Ignore stores of argument registers to the stack. */
666
940
else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!], { registers } */
667
&& (inst2 & 0x8000) == 0x0000
668
&& pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
941
&& (inst2 & 0x8000) == 0x0000
942
&& pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
943
/* Ignore block loads from the stack, potentially copying
944
parameters from memory. */
947
else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2, [Rn, #+/-imm] */
948
&& pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
949
/* Similarly ignore dual loads from the stack. */
952
else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
953
&& (inst2 & 0x0d00) == 0x0c00
954
&& pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
955
/* Similarly ignore single loads from the stack. */
958
else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
959
&& pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
960
/* Similarly ignore single loads from the stack. */
670
963
else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
671
964
&& (inst2 & 0x8000) == 0x0000)
672
/* Since we only recognize this for prologue skipping, do not bother
673
to compute the constant. */
674
regs[bits (inst2, 8, 11)] = regs[bits (insn, 0, 3)];
675
else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm12 */
676
&& (inst2 & 0x8000) == 0x0000)
677
/* Since we only recognize this for prologue skipping, do not bother
678
to compute the constant. */
679
regs[bits (inst2, 8, 11)] = regs[bits (insn, 0, 3)];
680
else if ((insn & 0xfbf0) == 0xf2a0 /* sub.w Rd, Rn, #imm8 */
681
&& (inst2 & 0x8000) == 0x0000)
682
/* Since we only recognize this for prologue skipping, do not bother
683
to compute the constant. */
684
regs[bits (inst2, 8, 11)] = regs[bits (insn, 0, 3)];
685
else if ((insn & 0xff50) == 0xf850 /* ldr.w Rd, [Rn, #imm]{!} */
686
&& pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
688
else if ((insn & 0xff50) == 0xe950 /* ldrd Rt, Rt2, [Rn, #imm]{!} */
689
&& pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
691
else if ((insn & 0xff50) == 0xf800 /* strb.w or strh.w */
692
&& pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
966
unsigned int imm = ((bits (insn, 10, 10) << 11)
967
| (bits (inst2, 12, 14) << 8)
968
| bits (inst2, 0, 7));
970
regs[bits (inst2, 8, 11)]
971
= pv_add_constant (regs[bits (insn, 0, 3)],
972
thumb_expand_immediate (imm));
975
else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
976
&& (inst2 & 0x8000) == 0x0000)
978
unsigned int imm = ((bits (insn, 10, 10) << 11)
979
| (bits (inst2, 12, 14) << 8)
980
| bits (inst2, 0, 7));
982
regs[bits (inst2, 8, 11)]
983
= pv_add_constant (regs[bits (insn, 0, 3)], imm);
986
else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
987
&& (inst2 & 0x8000) == 0x0000)
989
unsigned int imm = ((bits (insn, 10, 10) << 11)
990
| (bits (inst2, 12, 14) << 8)
991
| bits (inst2, 0, 7));
993
regs[bits (inst2, 8, 11)]
994
= pv_add_constant (regs[bits (insn, 0, 3)],
995
- (CORE_ADDR) thumb_expand_immediate (imm));
998
else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
999
&& (inst2 & 0x8000) == 0x0000)
1001
unsigned int imm = ((bits (insn, 10, 10) << 11)
1002
| (bits (inst2, 12, 14) << 8)
1003
| bits (inst2, 0, 7));
1005
regs[bits (inst2, 8, 11)]
1006
= pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1009
else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1011
unsigned int imm = ((bits (insn, 10, 10) << 11)
1012
| (bits (inst2, 12, 14) << 8)
1013
| bits (inst2, 0, 7));
1015
regs[bits (inst2, 8, 11)]
1016
= pv_constant (thumb_expand_immediate (imm));
1019
else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1022
= EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1024
regs[bits (inst2, 8, 11)] = pv_constant (imm);
1027
else if (insn == 0xea5f /* mov.w Rd,Rm */
1028
&& (inst2 & 0xf0f0) == 0)
1030
int dst_reg = (inst2 & 0x0f00) >> 8;
1031
int src_reg = inst2 & 0xf;
1032
regs[dst_reg] = regs[src_reg];
1035
else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1037
/* Constant pool loads. */
1038
unsigned int constant;
1041
offset = bits (insn, 0, 11);
1043
loc = start + 4 + offset;
1045
loc = start + 4 - offset;
1047
constant = read_memory_unsigned_integer (loc, 4, byte_order);
1048
regs[bits (inst2, 12, 15)] = pv_constant (constant);
1051
else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1053
/* Constant pool loads. */
1054
unsigned int constant;
1057
offset = bits (insn, 0, 7) << 2;
1059
loc = start + 4 + offset;
1061
loc = start + 4 - offset;
1063
constant = read_memory_unsigned_integer (loc, 4, byte_order);
1064
regs[bits (inst2, 12, 15)] = pv_constant (constant);
1066
constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1067
regs[bits (inst2, 8, 11)] = pv_constant (constant);
1070
else if (thumb2_instruction_changes_pc (insn, inst2))
1072
/* Don't scan past anything that might change control flow. */
696
/* We don't know what this instruction is. We're finished
697
scanning. NOTE: Recognizing more safe-to-ignore
698
instructions here will improve support for optimized
1077
/* The optimizer might shove anything into the prologue,
1078
so we just skip what we don't recognize. */
1079
unrecognized_pc = start;
1084
else if (thumb_instruction_changes_pc (insn))
1086
/* Don't scan past anything that might change control flow. */
707
/* We don't know what this instruction is. We're finished
708
scanning. NOTE: Recognizing more safe-to-ignore
709
instructions here will improve support for optimized
1091
/* The optimizer might shove anything into the prologue,
1092
so we just skip what we don't recognize. */
1093
unrecognized_pc = start;
754
1139
cache->saved_regs[i].addr = offset;
756
1141
do_cleanups (back_to);
1142
return unrecognized_pc;
1146
/* Try to analyze the instructions starting from PC, which load symbol
1147
__stack_chk_guard. Return the address of instruction after loading this
1148
symbol, set the dest register number to *BASEREG, and set the size of
1149
instructions for loading symbol in OFFSET. Return 0 if instructions are
1153
arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1154
unsigned int *destreg, int *offset)
1156
enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1157
int is_thumb = arm_pc_is_thumb (pc);
1158
unsigned int low, high, address;
1163
unsigned short insn1
1164
= read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1166
if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1168
*destreg = bits (insn1, 8, 10);
1170
address = bits (insn1, 0, 7);
1172
else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1174
unsigned short insn2
1175
= read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1177
low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1180
= read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1182
= read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1184
/* movt Rd, #const */
1185
if ((insn1 & 0xfbc0) == 0xf2c0)
1187
high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1188
*destreg = bits (insn2, 8, 11);
1190
address = (high << 16 | low);
1197
= read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1199
if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1201
address = bits (insn, 0, 11);
1202
*destreg = bits (insn, 12, 15);
1205
else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1207
low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1210
= read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1212
if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1214
high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1215
*destreg = bits (insn, 12, 15);
1217
address = (high << 16 | low);
1225
/* Try to skip a sequence of instructions used for stack protector. If PC
1226
points to the first instruction of this sequence, return the address of first
1227
instruction after this sequence, otherwise, return original PC.
1229
On arm, this sequence of instructions is composed of mainly three steps,
1230
Step 1: load symbol __stack_chk_guard,
1231
Step 2: load from address of __stack_chk_guard,
1232
Step 3: store it to somewhere else.
1234
Usually, instructions on step 2 and step 3 are the same on various ARM
1235
architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1236
on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1237
instructions in step 1 vary from different ARM architectures. On ARMv7,
1240
movw Rn, #:lower16:__stack_chk_guard
1241
movt Rn, #:upper16:__stack_chk_guard
1248
.word __stack_chk_guard
1250
Since ldr/str is a very popular instruction, we can't use them as
1251
'fingerprint' or 'signature' of stack protector sequence. Here we choose
1252
sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1253
stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1256
arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1258
enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1259
unsigned int address, basereg;
1260
struct minimal_symbol *stack_chk_guard;
1262
int is_thumb = arm_pc_is_thumb (pc);
1265
/* Try to parse the instructions in Step 1. */
1266
addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1271
stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1272
/* If name of symbol doesn't start with '__stack_chk_guard', this
1273
instruction sequence is not for stack protector. If symbol is
1274
removed, we conservatively think this sequence is for stack protector. */
1276
&& strncmp (SYMBOL_LINKAGE_NAME (stack_chk_guard), "__stack_chk_guard",
1277
strlen ("__stack_chk_guard")) != 0)
1282
unsigned int destreg;
1284
= read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1286
/* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1287
if ((insn & 0xf800) != 0x6800)
1289
if (bits (insn, 3, 5) != basereg)
1291
destreg = bits (insn, 0, 2);
1293
insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1294
byte_order_for_code);
1295
/* Step 3: str Rd, [Rn, #immed], encoding T1. */
1296
if ((insn & 0xf800) != 0x6000)
1298
if (destreg != bits (insn, 0, 2))
1303
unsigned int destreg;
1305
= read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1307
/* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1308
if ((insn & 0x0e500000) != 0x04100000)
1310
if (bits (insn, 16, 19) != basereg)
1312
destreg = bits (insn, 12, 15);
1313
/* Step 3: str Rd, [Rn, #immed], encoding A1. */
1314
insn = read_memory_unsigned_integer (pc + offset + 4,
1315
4, byte_order_for_code);
1316
if ((insn & 0x0e500000) != 0x04000000)
1318
if (bits (insn, 12, 15) != destreg)
1321
/* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1324
return pc + offset + 4;
1326
return pc + offset + 8;
760
1329
/* Advance the PC across any function entry prologue instructions to
1529
2109
default_frame_sniffer
2112
/* Maintain a list of ARM exception table entries per objfile, similar to the
2113
list of mapping symbols. We only cache entries for standard ARM-defined
2114
personality routines; the cache will contain only the frame unwinding
2115
instructions associated with the entry (not the descriptors). */
2117
static const struct objfile_data *arm_exidx_data_key;
2119
struct arm_exidx_entry
2124
typedef struct arm_exidx_entry arm_exidx_entry_s;
2125
DEF_VEC_O(arm_exidx_entry_s);
2127
struct arm_exidx_data
2129
VEC(arm_exidx_entry_s) **section_maps;
2133
arm_exidx_data_free (struct objfile *objfile, void *arg)
2135
struct arm_exidx_data *data = arg;
2138
for (i = 0; i < objfile->obfd->section_count; i++)
2139
VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2143
arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2144
const struct arm_exidx_entry *rhs)
2146
return lhs->addr < rhs->addr;
2149
static struct obj_section *
2150
arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2152
struct obj_section *osect;
2154
ALL_OBJFILE_OSECTIONS (objfile, osect)
2155
if (bfd_get_section_flags (objfile->obfd,
2156
osect->the_bfd_section) & SEC_ALLOC)
2158
bfd_vma start, size;
2159
start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2160
size = bfd_get_section_size (osect->the_bfd_section);
2162
if (start <= vma && vma < start + size)
2169
/* Parse contents of exception table and exception index sections
2170
of OBJFILE, and fill in the exception table entry cache.
2172
For each entry that refers to a standard ARM-defined personality
2173
routine, extract the frame unwinding instructions (from either
2174
the index or the table section). The unwinding instructions
2176
- extracting them from the rest of the table data
2177
- converting to host endianness
2178
- appending the implicit 0xb0 ("Finish") code
2180
The extracted and normalized instructions are stored for later
2181
retrieval by the arm_find_exidx_entry routine. */
2184
arm_exidx_new_objfile (struct objfile *objfile)
2186
struct cleanup *cleanups = make_cleanup (null_cleanup, NULL);
2187
struct arm_exidx_data *data;
2188
asection *exidx, *extab;
2189
bfd_vma exidx_vma = 0, extab_vma = 0;
2190
bfd_size_type exidx_size = 0, extab_size = 0;
2191
gdb_byte *exidx_data = NULL, *extab_data = NULL;
2194
/* If we've already touched this file, do nothing. */
2195
if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2198
/* Read contents of exception table and index. */
2199
exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2202
exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2203
exidx_size = bfd_get_section_size (exidx);
2204
exidx_data = xmalloc (exidx_size);
2205
make_cleanup (xfree, exidx_data);
2207
if (!bfd_get_section_contents (objfile->obfd, exidx,
2208
exidx_data, 0, exidx_size))
2210
do_cleanups (cleanups);
2215
extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2218
extab_vma = bfd_section_vma (objfile->obfd, extab);
2219
extab_size = bfd_get_section_size (extab);
2220
extab_data = xmalloc (extab_size);
2221
make_cleanup (xfree, extab_data);
2223
if (!bfd_get_section_contents (objfile->obfd, extab,
2224
extab_data, 0, extab_size))
2226
do_cleanups (cleanups);
2231
/* Allocate exception table data structure. */
2232
data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2233
set_objfile_data (objfile, arm_exidx_data_key, data);
2234
data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2235
objfile->obfd->section_count,
2236
VEC(arm_exidx_entry_s) *);
2238
/* Fill in exception table. */
2239
for (i = 0; i < exidx_size / 8; i++)
2241
struct arm_exidx_entry new_exidx_entry;
2242
bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2243
bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2244
bfd_vma addr = 0, word = 0;
2245
int n_bytes = 0, n_words = 0;
2246
struct obj_section *sec;
2247
gdb_byte *entry = NULL;
2249
/* Extract address of start of function. */
2250
idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2251
idx += exidx_vma + i * 8;
2253
/* Find section containing function and compute section offset. */
2254
sec = arm_obj_section_from_vma (objfile, idx);
2257
idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2259
/* Determine address of exception table entry. */
2262
/* EXIDX_CANTUNWIND -- no exception table entry present. */
2264
else if ((val & 0xff000000) == 0x80000000)
2266
/* Exception table entry embedded in .ARM.exidx
2267
-- must be short form. */
2271
else if (!(val & 0x80000000))
2273
/* Exception table entry in .ARM.extab. */
2274
addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2275
addr += exidx_vma + i * 8 + 4;
2277
if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2279
word = bfd_h_get_32 (objfile->obfd,
2280
extab_data + addr - extab_vma);
2283
if ((word & 0xff000000) == 0x80000000)
2288
else if ((word & 0xff000000) == 0x81000000
2289
|| (word & 0xff000000) == 0x82000000)
2293
n_words = ((word >> 16) & 0xff);
2295
else if (!(word & 0x80000000))
2298
struct obj_section *pers_sec;
2299
int gnu_personality = 0;
2301
/* Custom personality routine. */
2302
pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2303
pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2305
/* Check whether we've got one of the variants of the
2306
GNU personality routines. */
2307
pers_sec = arm_obj_section_from_vma (objfile, pers);
2310
static const char *personality[] =
2312
"__gcc_personality_v0",
2313
"__gxx_personality_v0",
2314
"__gcj_personality_v0",
2315
"__gnu_objc_personality_v0",
2319
CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2322
for (k = 0; personality[k]; k++)
2323
if (lookup_minimal_symbol_by_pc_name
2324
(pc, personality[k], objfile))
2326
gnu_personality = 1;
2331
/* If so, the next word contains a word count in the high
2332
byte, followed by the same unwind instructions as the
2333
pre-defined forms. */
2335
&& addr + 4 <= extab_vma + extab_size)
2337
word = bfd_h_get_32 (objfile->obfd,
2338
extab_data + addr - extab_vma);
2341
n_words = ((word >> 24) & 0xff);
2347
/* Sanity check address. */
2349
if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2350
n_words = n_bytes = 0;
2352
/* The unwind instructions reside in WORD (only the N_BYTES least
2353
significant bytes are valid), followed by N_WORDS words in the
2354
extab section starting at ADDR. */
2355
if (n_bytes || n_words)
2357
gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2358
n_bytes + n_words * 4 + 1);
2361
*p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2365
word = bfd_h_get_32 (objfile->obfd,
2366
extab_data + addr - extab_vma);
2369
*p++ = (gdb_byte) ((word >> 24) & 0xff);
2370
*p++ = (gdb_byte) ((word >> 16) & 0xff);
2371
*p++ = (gdb_byte) ((word >> 8) & 0xff);
2372
*p++ = (gdb_byte) (word & 0xff);
2375
/* Implied "Finish" to terminate the list. */
2379
/* Push entry onto vector. They are guaranteed to always
2380
appear in order of increasing addresses. */
2381
new_exidx_entry.addr = idx;
2382
new_exidx_entry.entry = entry;
2383
VEC_safe_push (arm_exidx_entry_s,
2384
data->section_maps[sec->the_bfd_section->index],
2388
do_cleanups (cleanups);
2391
/* Search for the exception table entry covering MEMADDR. If one is found,
2392
return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2393
set *START to the start of the region covered by this entry. */
2396
arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2398
struct obj_section *sec;
2400
sec = find_pc_section (memaddr);
2403
struct arm_exidx_data *data;
2404
VEC(arm_exidx_entry_s) *map;
2405
struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2408
data = objfile_data (sec->objfile, arm_exidx_data_key);
2411
map = data->section_maps[sec->the_bfd_section->index];
2412
if (!VEC_empty (arm_exidx_entry_s, map))
2414
struct arm_exidx_entry *map_sym;
2416
idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2417
arm_compare_exidx_entries);
2419
/* VEC_lower_bound finds the earliest ordered insertion
2420
point. If the following symbol starts at this exact
2421
address, we use that; otherwise, the preceding
2422
exception table entry covers this address. */
2423
if (idx < VEC_length (arm_exidx_entry_s, map))
2425
map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2426
if (map_sym->addr == map_key.addr)
2429
*start = map_sym->addr + obj_section_addr (sec);
2430
return map_sym->entry;
2436
map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2438
*start = map_sym->addr + obj_section_addr (sec);
2439
return map_sym->entry;
2448
/* Given the current frame THIS_FRAME, and its associated frame unwinding
2449
instruction list from the ARM exception table entry ENTRY, allocate and
2450
return a prologue cache structure describing how to unwind this frame.
2452
Return NULL if the unwinding instruction list contains a "spare",
2453
"reserved" or "refuse to unwind" instruction as defined in section
2454
"9.3 Frame unwinding instructions" of the "Exception Handling ABI
2455
for the ARM Architecture" document. */
2457
static struct arm_prologue_cache *
2458
arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2463
struct arm_prologue_cache *cache;
2464
cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2465
cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2471
/* Whenever we reload SP, we actually have to retrieve its
2472
actual value in the current frame. */
2475
if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2477
int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2478
vsp = get_frame_register_unsigned (this_frame, reg);
2482
CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2483
vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2489
/* Decode next unwind instruction. */
2492
if ((insn & 0xc0) == 0)
2494
int offset = insn & 0x3f;
2495
vsp += (offset << 2) + 4;
2497
else if ((insn & 0xc0) == 0x40)
2499
int offset = insn & 0x3f;
2500
vsp -= (offset << 2) + 4;
2502
else if ((insn & 0xf0) == 0x80)
2504
int mask = ((insn & 0xf) << 8) | *entry++;
2507
/* The special case of an all-zero mask identifies
2508
"Refuse to unwind". We return NULL to fall back
2509
to the prologue analyzer. */
2513
/* Pop registers r4..r15 under mask. */
2514
for (i = 0; i < 12; i++)
2515
if (mask & (1 << i))
2517
cache->saved_regs[4 + i].addr = vsp;
2521
/* Special-case popping SP -- we need to reload vsp. */
2522
if (mask & (1 << (ARM_SP_REGNUM - 4)))
2525
else if ((insn & 0xf0) == 0x90)
2527
int reg = insn & 0xf;
2529
/* Reserved cases. */
2530
if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2533
/* Set SP from another register and mark VSP for reload. */
2534
cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2537
else if ((insn & 0xf0) == 0xa0)
2539
int count = insn & 0x7;
2540
int pop_lr = (insn & 0x8) != 0;
2543
/* Pop r4..r[4+count]. */
2544
for (i = 0; i <= count; i++)
2546
cache->saved_regs[4 + i].addr = vsp;
2550
/* If indicated by flag, pop LR as well. */
2553
cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2557
else if (insn == 0xb0)
2559
/* We could only have updated PC by popping into it; if so, it
2560
will show up as address. Otherwise, copy LR into PC. */
2561
if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2562
cache->saved_regs[ARM_PC_REGNUM]
2563
= cache->saved_regs[ARM_LR_REGNUM];
2568
else if (insn == 0xb1)
2570
int mask = *entry++;
2573
/* All-zero mask and mask >= 16 is "spare". */
2574
if (mask == 0 || mask >= 16)
2577
/* Pop r0..r3 under mask. */
2578
for (i = 0; i < 4; i++)
2579
if (mask & (1 << i))
2581
cache->saved_regs[i].addr = vsp;
2585
else if (insn == 0xb2)
2587
ULONGEST offset = 0;
2592
offset |= (*entry & 0x7f) << shift;
2595
while (*entry++ & 0x80);
2597
vsp += 0x204 + (offset << 2);
2599
else if (insn == 0xb3)
2601
int start = *entry >> 4;
2602
int count = (*entry++) & 0xf;
2605
/* Only registers D0..D15 are valid here. */
2606
if (start + count >= 16)
2609
/* Pop VFP double-precision registers D[start]..D[start+count]. */
2610
for (i = 0; i <= count; i++)
2612
cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2616
/* Add an extra 4 bytes for FSTMFDX-style stack. */
2619
else if ((insn & 0xf8) == 0xb8)
2621
int count = insn & 0x7;
2624
/* Pop VFP double-precision registers D[8]..D[8+count]. */
2625
for (i = 0; i <= count; i++)
2627
cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2631
/* Add an extra 4 bytes for FSTMFDX-style stack. */
2634
else if (insn == 0xc6)
2636
int start = *entry >> 4;
2637
int count = (*entry++) & 0xf;
2640
/* Only registers WR0..WR15 are valid. */
2641
if (start + count >= 16)
2644
/* Pop iwmmx registers WR[start]..WR[start+count]. */
2645
for (i = 0; i <= count; i++)
2647
cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2651
else if (insn == 0xc7)
2653
int mask = *entry++;
2656
/* All-zero mask and mask >= 16 is "spare". */
2657
if (mask == 0 || mask >= 16)
2660
/* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2661
for (i = 0; i < 4; i++)
2662
if (mask & (1 << i))
2664
cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2668
else if ((insn & 0xf8) == 0xc0)
2670
int count = insn & 0x7;
2673
/* Pop iwmmx registers WR[10]..WR[10+count]. */
2674
for (i = 0; i <= count; i++)
2676
cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2680
else if (insn == 0xc8)
2682
int start = *entry >> 4;
2683
int count = (*entry++) & 0xf;
2686
/* Only registers D0..D31 are valid. */
2687
if (start + count >= 16)
2690
/* Pop VFP double-precision registers
2691
D[16+start]..D[16+start+count]. */
2692
for (i = 0; i <= count; i++)
2694
cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2698
else if (insn == 0xc9)
2700
int start = *entry >> 4;
2701
int count = (*entry++) & 0xf;
2704
/* Pop VFP double-precision registers D[start]..D[start+count]. */
2705
for (i = 0; i <= count; i++)
2707
cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2711
else if ((insn & 0xf8) == 0xd0)
2713
int count = insn & 0x7;
2716
/* Pop VFP double-precision registers D[8]..D[8+count]. */
2717
for (i = 0; i <= count; i++)
2719
cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2725
/* Everything else is "spare". */
2730
/* If we restore SP from a register, assume this was the frame register.
2731
Otherwise just fall back to SP as frame register. */
2732
if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2733
cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2735
cache->framereg = ARM_SP_REGNUM;
2737
/* Determine offset to previous frame. */
2739
= vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2741
/* We already got the previous SP. */
2742
cache->prev_sp = vsp;
2747
/* Unwinding via ARM exception table entries. Note that the sniffer
2748
already computes a filled-in prologue cache, which is then used
2749
with the same arm_prologue_this_id and arm_prologue_prev_register
2750
routines also used for prologue-parsing based unwinding. */
2753
arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2754
struct frame_info *this_frame,
2755
void **this_prologue_cache)
2757
struct gdbarch *gdbarch = get_frame_arch (this_frame);
2758
enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2759
CORE_ADDR addr_in_block, exidx_region, func_start;
2760
struct arm_prologue_cache *cache;
2763
/* See if we have an ARM exception table entry covering this address. */
2764
addr_in_block = get_frame_address_in_block (this_frame);
2765
entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2769
/* The ARM exception table does not describe unwind information
2770
for arbitrary PC values, but is guaranteed to be correct only
2771
at call sites. We have to decide here whether we want to use
2772
ARM exception table information for this frame, or fall back
2773
to using prologue parsing. (Note that if we have DWARF CFI,
2774
this sniffer isn't even called -- CFI is always preferred.)
2776
Before we make this decision, however, we check whether we
2777
actually have *symbol* information for the current frame.
2778
If not, prologue parsing would not work anyway, so we might
2779
as well use the exception table and hope for the best. */
2780
if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2784
/* If the next frame is "normal", we are at a call site in this
2785
frame, so exception information is guaranteed to be valid. */
2786
if (get_next_frame (this_frame)
2787
&& get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2790
/* We also assume exception information is valid if we're currently
2791
blocked in a system call. The system library is supposed to
2792
ensure this, so that e.g. pthread cancellation works. */
2793
if (arm_frame_is_thumb (this_frame))
2797
if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2798
byte_order_for_code, &insn)
2799
&& (insn & 0xff00) == 0xdf00 /* svc */)
2806
if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2807
byte_order_for_code, &insn)
2808
&& (insn & 0x0f000000) == 0x0f000000 /* svc */)
2812
/* Bail out if we don't know that exception information is valid. */
2816
/* The ARM exception index does not mark the *end* of the region
2817
covered by the entry, and some functions will not have any entry.
2818
To correctly recognize the end of the covered region, the linker
2819
should have inserted dummy records with a CANTUNWIND marker.
2821
Unfortunately, current versions of GNU ld do not reliably do
2822
this, and thus we may have found an incorrect entry above.
2823
As a (temporary) sanity check, we only use the entry if it
2824
lies *within* the bounds of the function. Note that this check
2825
might reject perfectly valid entries that just happen to cover
2826
multiple functions; therefore this check ought to be removed
2827
once the linker is fixed. */
2828
if (func_start > exidx_region)
2832
/* Decode the list of unwinding instructions into a prologue cache.
2833
Note that this may fail due to e.g. a "refuse to unwind" code. */
2834
cache = arm_exidx_fill_cache (this_frame, entry);
2838
*this_prologue_cache = cache;
2842
struct frame_unwind arm_exidx_unwind = {
2844
arm_prologue_this_id,
2845
arm_prologue_prev_register,
2847
arm_exidx_unwind_sniffer
1532
2850
static struct arm_prologue_cache *
1533
2851
arm_make_stub_cache (struct frame_info *this_frame)
3007
/* Return true if we are in the function's epilogue, i.e. after the
3008
instruction that destroyed the function's stack frame. */
3011
thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3013
enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3014
unsigned int insn, insn2;
3015
int found_return = 0, found_stack_adjust = 0;
3016
CORE_ADDR func_start, func_end;
3020
if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3023
/* The epilogue is a sequence of instructions along the following lines:
3025
- add stack frame size to SP or FP
3026
- [if frame pointer used] restore SP from FP
3027
- restore registers from SP [may include PC]
3028
- a return-type instruction [if PC wasn't already restored]
3030
In a first pass, we scan forward from the current PC and verify the
3031
instructions we find as compatible with this sequence, ending in a
3034
However, this is not sufficient to distinguish indirect function calls
3035
within a function from indirect tail calls in the epilogue in some cases.
3036
Therefore, if we didn't already find any SP-changing instruction during
3037
forward scan, we add a backward scanning heuristic to ensure we actually
3038
are in the epilogue. */
3041
while (scan_pc < func_end && !found_return)
3043
if (target_read_memory (scan_pc, buf, 2))
3047
insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3049
if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3051
else if (insn == 0x46f7) /* mov pc, lr */
3053
else if (insn == 0x46bd) /* mov sp, r7 */
3054
found_stack_adjust = 1;
3055
else if ((insn & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3056
found_stack_adjust = 1;
3057
else if ((insn & 0xfe00) == 0xbc00) /* pop <registers> */
3059
found_stack_adjust = 1;
3060
if (insn & 0x0100) /* <registers> include PC. */
3063
else if ((insn & 0xe000) == 0xe000) /* 32-bit Thumb-2 instruction */
3065
if (target_read_memory (scan_pc, buf, 2))
3069
insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3071
if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3073
found_stack_adjust = 1;
3074
if (insn2 & 0x8000) /* <registers> include PC. */
3077
else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3078
&& (insn2 & 0x0fff) == 0x0b04)
3080
found_stack_adjust = 1;
3081
if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3084
else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3085
&& (insn2 & 0x0e00) == 0x0a00)
3086
found_stack_adjust = 1;
3097
/* Since any instruction in the epilogue sequence, with the possible
3098
exception of return itself, updates the stack pointer, we need to
3099
scan backwards for at most one instruction. Try either a 16-bit or
3100
a 32-bit instruction. This is just a heuristic, so we do not worry
3101
too much about false positives.*/
3103
if (!found_stack_adjust)
3105
if (pc - 4 < func_start)
3107
if (target_read_memory (pc - 4, buf, 4))
3110
insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3111
insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3113
if (insn2 == 0x46bd) /* mov sp, r7 */
3114
found_stack_adjust = 1;
3115
else if ((insn2 & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3116
found_stack_adjust = 1;
3117
else if ((insn2 & 0xff00) == 0xbc00) /* pop <registers> without PC */
3118
found_stack_adjust = 1;
3119
else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3120
found_stack_adjust = 1;
3121
else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3122
&& (insn2 & 0x0fff) == 0x0b04)
3123
found_stack_adjust = 1;
3124
else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3125
&& (insn2 & 0x0e00) == 0x0a00)
3126
found_stack_adjust = 1;
3129
return found_stack_adjust;
3132
/* Return true if we are in the function's epilogue, i.e. after the
3133
instruction that destroyed the function's stack frame. */
3136
arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3138
enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3140
int found_return, found_stack_adjust;
3141
CORE_ADDR func_start, func_end;
3143
if (arm_pc_is_thumb (pc))
3144
return thumb_in_function_epilogue_p (gdbarch, pc);
3146
if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3149
/* We are in the epilogue if the previous instruction was a stack
3150
adjustment and the next instruction is a possible return (bx, mov
3151
pc, or pop). We could have to scan backwards to find the stack
3152
adjustment, or forwards to find the return, but this is a decent
3153
approximation. First scan forwards. */
3156
insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3157
if (bits (insn, 28, 31) != INST_NV)
3159
if ((insn & 0x0ffffff0) == 0x012fff10)
3162
else if ((insn & 0x0ffffff0) == 0x01a0f000)
3165
else if ((insn & 0x0fff0000) == 0x08bd0000
3166
&& (insn & 0x0000c000) != 0)
3167
/* POP (LDMIA), including PC or LR. */
3174
/* Scan backwards. This is just a heuristic, so do not worry about
3175
false positives from mode changes. */
3177
if (pc < func_start + 4)
3180
insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3181
if (bits (insn, 28, 31) != INST_NV)
3183
if ((insn & 0x0df0f000) == 0x0080d000)
3184
/* ADD SP (register or immediate). */
3185
found_stack_adjust = 1;
3186
else if ((insn & 0x0df0f000) == 0x0040d000)
3187
/* SUB SP (register or immediate). */
3188
found_stack_adjust = 1;
3189
else if ((insn & 0x0ffffff0) == 0x01a0d000)
3192
else if ((insn & 0x0fff0000) == 0x08bd0000)
3194
found_stack_adjust = 1;
3197
if (found_stack_adjust)
1689
3204
/* When arguments must be pushed onto the stack, they go on in reverse
1690
3205
order. The code below implements a FILO (stack) to do this. */