]> git.ipfire.org Git - thirdparty/binutils-gdb.git/blob - gdb/arm-tdep.c
a7648250af6d6d30242c2245957c15ba924261d1
[thirdparty/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2025 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20
21 #include <ctype.h>
22
23 #include "extract-store-integer.h"
24 #include "frame.h"
25 #include "language.h"
26 #include "inferior.h"
27 #include "infrun.h"
28 #include "cli/cli-cmds.h"
29 #include "gdbcore.h"
30 #include "dis-asm.h"
31 #include "disasm.h"
32 #include "regcache.h"
33 #include "reggroups.h"
34 #include "target-float.h"
35 #include "value.h"
36 #include "arch-utils.h"
37 #include "osabi.h"
38 #include "frame-unwind.h"
39 #include "frame-base.h"
40 #include "trad-frame.h"
41 #include "objfiles.h"
42 #include "dwarf2.h"
43 #include "dwarf2/frame.h"
44 #include "gdbtypes.h"
45 #include "prologue-value.h"
46 #include "remote.h"
47 #include "target-descriptions.h"
48 #include "user-regs.h"
49 #include "observable.h"
50 #include "count-one-bits.h"
51
52 #include "arch/arm.h"
53 #include "arch/arm-get-next-pcs.h"
54 #include "arm-tdep.h"
55 #include "sim/sim-arm.h"
56
57 #include "elf-bfd.h"
58 #include "coff/internal.h"
59 #include "elf/arm.h"
60
61 #include "record.h"
62 #include "record-full.h"
63 #include <algorithm>
64
65 #include "producer.h"
66
67 #if GDB_SELF_TEST
68 #include "gdbsupport/selftest.h"
69 #endif
70
71 static bool arm_debug;
72
73 /* Print an "arm" debug statement. */
74
75 #define arm_debug_printf(fmt, ...) \
76 debug_prefixed_printf_cond (arm_debug, "arm", fmt, ##__VA_ARGS__)
77
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
81
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
84
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 (msym)->set_target_flag_1 (true)
87
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 (msym)->target_flag_1 ()
90
91 struct arm_mapping_symbol
92 {
93 CORE_ADDR value;
94 char type;
95
96 bool operator< (const arm_mapping_symbol &other) const
97 { return this->value < other.value; }
98 };
99
100 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
101
102 struct arm_per_bfd
103 {
104 explicit arm_per_bfd (size_t num_sections)
105 : section_maps (new arm_mapping_symbol_vec[num_sections]),
106 section_maps_sorted (new bool[num_sections] ())
107 {}
108
109 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
110
111 /* Information about mapping symbols ($a, $d, $t) in the objfile.
112
113 The format is an array of vectors of arm_mapping_symbols, there is one
114 vector for each section of the objfile (the array is index by BFD section
115 index).
116
117 For each section, the vector of arm_mapping_symbol is sorted by
118 symbol value (address). */
119 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
120
121 /* For each corresponding element of section_maps above, is this vector
122 sorted. */
123 std::unique_ptr<bool[]> section_maps_sorted;
124 };
125
126 /* Per-bfd data used for mapping symbols. */
127 static const registry<bfd>::key<arm_per_bfd> arm_bfd_data_key;
128
129 /* The list of available "set arm ..." and "show arm ..." commands. */
130 static struct cmd_list_element *setarmcmdlist = NULL;
131 static struct cmd_list_element *showarmcmdlist = NULL;
132
133 /* The type of floating-point to use. Keep this in sync with enum
134 arm_float_model, and the help string in _initialize_arm_tdep. */
135 static const char *const fp_model_strings[] =
136 {
137 "auto",
138 "softfpa",
139 "fpa",
140 "softvfp",
141 "vfp",
142 NULL
143 };
144
145 /* A variable that can be configured by the user. */
146 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
147 static const char *current_fp_model = "auto";
148
149 /* The ABI to use. Keep this in sync with arm_abi_kind. */
150 static const char *const arm_abi_strings[] =
151 {
152 "auto",
153 "APCS",
154 "AAPCS",
155 NULL
156 };
157
158 /* A variable that can be configured by the user. */
159 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
160 static const char *arm_abi_string = "auto";
161
162 /* The execution mode to assume. */
163 static const char *const arm_mode_strings[] =
164 {
165 "auto",
166 "arm",
167 "thumb",
168 NULL
169 };
170
171 static const char *arm_fallback_mode_string = "auto";
172 static const char *arm_force_mode_string = "auto";
173
174 /* The standard register names, and all the valid aliases for them. Note
175 that `fp', `sp' and `pc' are not added in this alias list, because they
176 have been added as builtin user registers in
177 std-regs.c:_initialize_frame_reg. */
178 static const struct
179 {
180 const char *name;
181 int regnum;
182 } arm_register_aliases[] = {
183 /* Basic register numbers. */
184 { "r0", 0 },
185 { "r1", 1 },
186 { "r2", 2 },
187 { "r3", 3 },
188 { "r4", 4 },
189 { "r5", 5 },
190 { "r6", 6 },
191 { "r7", 7 },
192 { "r8", 8 },
193 { "r9", 9 },
194 { "r10", 10 },
195 { "r11", 11 },
196 { "r12", 12 },
197 { "r13", 13 },
198 { "r14", 14 },
199 { "r15", 15 },
200 /* Synonyms (argument and variable registers). */
201 { "a1", 0 },
202 { "a2", 1 },
203 { "a3", 2 },
204 { "a4", 3 },
205 { "v1", 4 },
206 { "v2", 5 },
207 { "v3", 6 },
208 { "v4", 7 },
209 { "v5", 8 },
210 { "v6", 9 },
211 { "v7", 10 },
212 { "v8", 11 },
213 /* Other platform-specific names for r9. */
214 { "sb", 9 },
215 { "tr", 9 },
216 /* Special names. */
217 { "ip", 12 },
218 { "lr", 14 },
219 /* Names used by GCC (not listed in the ARM EABI). */
220 { "sl", 10 },
221 /* A special name from the older ATPCS. */
222 { "wr", 7 },
223 };
224
225 static const char *const arm_register_names[] =
226 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
227 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
228 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
229 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
230 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
231 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
232 "fps", "cpsr" }; /* 24 25 */
233
234 /* Holds the current set of options to be passed to the disassembler. */
235 static std::string arm_disassembler_options;
236
237 /* Valid register name styles. */
238 static const char **valid_disassembly_styles;
239
240 /* Disassembly style to use. Default to "std" register names. */
241 static const char *disassembly_style;
242
243 /* All possible arm target descriptors. */
244 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID][2];
245 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
246
247 /* This is used to keep the bfd arch_info in sync with the disassembly
248 style. */
249 static void set_disassembly_style_sfunc (const char *, int,
250 struct cmd_list_element *);
251 static void show_disassembly_style_sfunc (struct ui_file *, int,
252 struct cmd_list_element *,
253 const char *);
254
255 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
256 readable_regcache *regcache,
257 int regnum, gdb_byte *buf);
258 static void arm_neon_quad_write (struct gdbarch *gdbarch,
259 struct regcache *regcache,
260 int regnum, const gdb_byte *buf);
261
262 static CORE_ADDR
263 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
264
265
266 /* get_next_pcs operations. */
267 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
268 arm_get_next_pcs_read_memory_unsigned_integer,
269 arm_get_next_pcs_syscall_next_pc,
270 arm_get_next_pcs_addr_bits_remove,
271 arm_get_next_pcs_is_thumb,
272 NULL,
273 };
274
275 struct arm_prologue_cache
276 {
277 /* The stack pointer at the time this frame was created; i.e. the
278 caller's stack pointer when this function was called. It is used
279 to identify this frame. */
280 CORE_ADDR sp;
281
282 /* Additional stack pointers used by M-profile with Security extension. */
283 /* Use msp_s / psp_s to hold the values of msp / psp when there is
284 no Security extension. */
285 CORE_ADDR msp_s;
286 CORE_ADDR msp_ns;
287 CORE_ADDR psp_s;
288 CORE_ADDR psp_ns;
289
290 /* Active stack pointer. */
291 int active_sp_regnum;
292 int active_msp_regnum;
293 int active_psp_regnum;
294
295 /* The frame base for this frame is just prev_sp - frame size.
296 FRAMESIZE is the distance from the frame pointer to the
297 initial stack pointer. */
298
299 int framesize;
300
301 /* The register used to hold the frame pointer for this frame. */
302 int framereg;
303
304 /* True if the return address is signed, false otherwise. */
305 std::optional<bool> ra_signed_state;
306
307 /* Saved register offsets. */
308 trad_frame_saved_reg *saved_regs;
309
310 arm_prologue_cache() = default;
311 };
312
313
314 /* Reconstruct T bit in program status register from LR value. */
315
316 static inline ULONGEST
317 reconstruct_t_bit(struct gdbarch *gdbarch, CORE_ADDR lr, ULONGEST psr)
318 {
319 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
320 if (IS_THUMB_ADDR (lr))
321 psr |= t_bit;
322 else
323 psr &= ~t_bit;
324
325 return psr;
326 }
327
328 /* Initialize CACHE fields for which zero is not adequate (CACHE is
329 expected to have been ZALLOC'ed before calling this function). */
330
331 static void
332 arm_cache_init (struct arm_prologue_cache *cache, struct gdbarch *gdbarch)
333 {
334 cache->active_sp_regnum = ARM_SP_REGNUM;
335
336 cache->saved_regs = trad_frame_alloc_saved_regs (gdbarch);
337 }
338
339 /* Similar to the previous function, but extracts GDBARCH from FRAME. */
340
341 static void
342 arm_cache_init (struct arm_prologue_cache *cache, const frame_info_ptr &frame)
343 {
344 struct gdbarch *gdbarch = get_frame_arch (frame);
345 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
346
347 arm_cache_init (cache, gdbarch);
348 cache->sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
349
350 if (tdep->have_sec_ext)
351 {
352 const CORE_ADDR msp_val
353 = get_frame_register_unsigned (frame, tdep->m_profile_msp_regnum);
354 const CORE_ADDR psp_val
355 = get_frame_register_unsigned (frame, tdep->m_profile_psp_regnum);
356
357 cache->msp_s
358 = get_frame_register_unsigned (frame, tdep->m_profile_msp_s_regnum);
359 cache->msp_ns
360 = get_frame_register_unsigned (frame, tdep->m_profile_msp_ns_regnum);
361 cache->psp_s
362 = get_frame_register_unsigned (frame, tdep->m_profile_psp_s_regnum);
363 cache->psp_ns
364 = get_frame_register_unsigned (frame, tdep->m_profile_psp_ns_regnum);
365
366 /* Identify what msp is alias for (msp_s or msp_ns). */
367 if (msp_val == cache->msp_s)
368 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
369 else if (msp_val == cache->msp_ns)
370 cache->active_msp_regnum = tdep->m_profile_msp_ns_regnum;
371 else
372 {
373 warning (_("Invalid state, unable to determine msp alias, assuming "
374 "msp_s."));
375 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
376 }
377
378 /* Identify what psp is alias for (psp_s or psp_ns). */
379 if (psp_val == cache->psp_s)
380 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
381 else if (psp_val == cache->psp_ns)
382 cache->active_psp_regnum = tdep->m_profile_psp_ns_regnum;
383 else
384 {
385 warning (_("Invalid state, unable to determine psp alias, assuming "
386 "psp_s."));
387 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
388 }
389
390 /* Identify what sp is alias for (msp_s, msp_ns, psp_s or psp_ns). */
391 if (msp_val == cache->sp)
392 cache->active_sp_regnum = cache->active_msp_regnum;
393 else if (psp_val == cache->sp)
394 cache->active_sp_regnum = cache->active_psp_regnum;
395 else
396 {
397 warning (_("Invalid state, unable to determine sp alias, assuming "
398 "msp."));
399 cache->active_sp_regnum = cache->active_msp_regnum;
400 }
401 }
402 else if (tdep->is_m)
403 {
404 cache->msp_s
405 = get_frame_register_unsigned (frame, tdep->m_profile_msp_regnum);
406 cache->psp_s
407 = get_frame_register_unsigned (frame, tdep->m_profile_psp_regnum);
408
409 /* Identify what sp is alias for (msp or psp). */
410 if (cache->msp_s == cache->sp)
411 cache->active_sp_regnum = tdep->m_profile_msp_regnum;
412 else if (cache->psp_s == cache->sp)
413 cache->active_sp_regnum = tdep->m_profile_psp_regnum;
414 else
415 {
416 warning (_("Invalid state, unable to determine sp alias, assuming "
417 "msp."));
418 cache->active_sp_regnum = tdep->m_profile_msp_regnum;
419 }
420 }
421 else
422 {
423 cache->msp_s
424 = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
425
426 cache->active_sp_regnum = ARM_SP_REGNUM;
427 }
428 }
429
430 /* Return the requested stack pointer value (in REGNUM), taking into
431 account whether we have a Security extension or an M-profile
432 CPU. */
433
434 static CORE_ADDR
435 arm_cache_get_sp_register (struct arm_prologue_cache *cache,
436 arm_gdbarch_tdep *tdep, int regnum)
437 {
438 if (tdep->have_sec_ext)
439 {
440 if (regnum == tdep->m_profile_msp_s_regnum)
441 return cache->msp_s;
442 if (regnum == tdep->m_profile_msp_ns_regnum)
443 return cache->msp_ns;
444 if (regnum == tdep->m_profile_psp_s_regnum)
445 return cache->psp_s;
446 if (regnum == tdep->m_profile_psp_ns_regnum)
447 return cache->psp_ns;
448 if (regnum == tdep->m_profile_msp_regnum)
449 return arm_cache_get_sp_register (cache, tdep, cache->active_msp_regnum);
450 if (regnum == tdep->m_profile_psp_regnum)
451 return arm_cache_get_sp_register (cache, tdep, cache->active_psp_regnum);
452 if (regnum == ARM_SP_REGNUM)
453 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
454 }
455 else if (tdep->is_m)
456 {
457 if (regnum == tdep->m_profile_msp_regnum)
458 return cache->msp_s;
459 if (regnum == tdep->m_profile_psp_regnum)
460 return cache->psp_s;
461 if (regnum == ARM_SP_REGNUM)
462 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
463 }
464 else if (regnum == ARM_SP_REGNUM)
465 return cache->sp;
466
467 gdb_assert_not_reached ("Invalid SP selection");
468 }
469
470 /* Return the previous stack address, depending on which SP register
471 is active. */
472
473 static CORE_ADDR
474 arm_cache_get_prev_sp_value (struct arm_prologue_cache *cache, arm_gdbarch_tdep *tdep)
475 {
476 CORE_ADDR val = arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
477 return val;
478 }
479
480 /* Set the active stack pointer to VAL. */
481
482 static void
483 arm_cache_set_active_sp_value (struct arm_prologue_cache *cache,
484 arm_gdbarch_tdep *tdep, CORE_ADDR val)
485 {
486 if (tdep->have_sec_ext)
487 {
488 if (cache->active_sp_regnum == tdep->m_profile_msp_s_regnum)
489 cache->msp_s = val;
490 else if (cache->active_sp_regnum == tdep->m_profile_msp_ns_regnum)
491 cache->msp_ns = val;
492 else if (cache->active_sp_regnum == tdep->m_profile_psp_s_regnum)
493 cache->psp_s = val;
494 else if (cache->active_sp_regnum == tdep->m_profile_psp_ns_regnum)
495 cache->psp_ns = val;
496
497 return;
498 }
499 else if (tdep->is_m)
500 {
501 if (cache->active_sp_regnum == tdep->m_profile_msp_regnum)
502 cache->msp_s = val;
503 else if (cache->active_sp_regnum == tdep->m_profile_psp_regnum)
504 cache->psp_s = val;
505
506 return;
507 }
508 else if (cache->active_sp_regnum == ARM_SP_REGNUM)
509 {
510 cache->sp = val;
511 return;
512 }
513
514 gdb_assert_not_reached ("Invalid SP selection");
515 }
516
517 /* Return true if REGNUM is one of the alternative stack pointers. */
518
519 static bool
520 arm_is_alternative_sp_register (arm_gdbarch_tdep *tdep, int regnum)
521 {
522 if ((regnum == tdep->m_profile_msp_regnum)
523 || (regnum == tdep->m_profile_msp_s_regnum)
524 || (regnum == tdep->m_profile_msp_ns_regnum)
525 || (regnum == tdep->m_profile_psp_regnum)
526 || (regnum == tdep->m_profile_psp_s_regnum)
527 || (regnum == tdep->m_profile_psp_ns_regnum))
528 return true;
529 else
530 return false;
531 }
532
533 /* Set the active stack pointer to SP_REGNUM. */
534
535 static void
536 arm_cache_switch_prev_sp (struct arm_prologue_cache *cache,
537 arm_gdbarch_tdep *tdep, int sp_regnum)
538 {
539 gdb_assert (arm_is_alternative_sp_register (tdep, sp_regnum));
540
541 if (tdep->have_sec_ext)
542 {
543 gdb_assert (sp_regnum != tdep->m_profile_msp_regnum
544 && sp_regnum != tdep->m_profile_psp_regnum);
545
546 if (sp_regnum == tdep->m_profile_msp_s_regnum
547 || sp_regnum == tdep->m_profile_psp_s_regnum)
548 {
549 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
550 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
551 }
552 else if (sp_regnum == tdep->m_profile_msp_ns_regnum
553 || sp_regnum == tdep->m_profile_psp_ns_regnum)
554 {
555 cache->active_msp_regnum = tdep->m_profile_msp_ns_regnum;
556 cache->active_psp_regnum = tdep->m_profile_psp_ns_regnum;
557 }
558 }
559
560 cache->active_sp_regnum = sp_regnum;
561 }
562
563 namespace {
564
565 /* Abstract class to read ARM instructions from memory. */
566
567 class arm_instruction_reader
568 {
569 public:
570 /* Read a 4 bytes instruction from memory using the BYTE_ORDER endianness. */
571 virtual uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const = 0;
572 };
573
574 /* Read instructions from target memory. */
575
576 class target_arm_instruction_reader : public arm_instruction_reader
577 {
578 public:
579 uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const override
580 {
581 return read_code_unsigned_integer (memaddr, 4, byte_order);
582 }
583 };
584
585 } /* namespace */
586
587 static CORE_ADDR arm_analyze_prologue
588 (struct gdbarch *gdbarch, CORE_ADDR prologue_start, CORE_ADDR prologue_end,
589 struct arm_prologue_cache *cache, const arm_instruction_reader &insn_reader);
590
591 /* Architecture version for displaced stepping. This effects the behavior of
592 certain instructions, and really should not be hard-wired. */
593
594 #define DISPLACED_STEPPING_ARCH_VERSION 5
595
596 /* See arm-tdep.h. */
597
598 bool arm_apcs_32 = true;
599 bool arm_unwind_secure_frames = true;
600
601 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
602
603 int
604 arm_psr_thumb_bit (struct gdbarch *gdbarch)
605 {
606 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
607
608 if (tdep->is_m)
609 return XPSR_T;
610 else
611 return CPSR_T;
612 }
613
614 /* Determine if the processor is currently executing in Thumb mode. */
615
616 int
617 arm_is_thumb (struct regcache *regcache)
618 {
619 ULONGEST cpsr;
620 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
621
622 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
623
624 return (cpsr & t_bit) != 0;
625 }
626
627 /* Determine if FRAME is executing in Thumb mode. FRAME must be an ARM
628 frame. */
629
630 int
631 arm_frame_is_thumb (const frame_info_ptr &frame)
632 {
633 /* Check the architecture of FRAME. */
634 struct gdbarch *gdbarch = get_frame_arch (frame);
635 gdb_assert (gdbarch_bfd_arch_info (gdbarch)->arch == bfd_arch_arm);
636
637 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
638 directly (from a signal frame or dummy frame) or by interpreting
639 the saved LR (from a prologue or DWARF frame). So consult it and
640 trust the unwinders. */
641 CORE_ADDR cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
642
643 /* Find and extract the thumb bit. */
644 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
645 return (cpsr & t_bit) != 0;
646 }
647
648 /* Search for the mapping symbol covering MEMADDR. If one is found,
649 return its type. Otherwise, return 0. If START is non-NULL,
650 set *START to the location of the mapping symbol. */
651
652 static char
653 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
654 {
655 struct obj_section *sec;
656
657 /* If there are mapping symbols, consult them. */
658 sec = find_pc_section (memaddr);
659 if (sec != NULL)
660 {
661 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd.get ());
662 if (data != NULL)
663 {
664 unsigned int section_idx = sec->the_bfd_section->index;
665 arm_mapping_symbol_vec &map
666 = data->section_maps[section_idx];
667
668 /* Sort the vector on first use. */
669 if (!data->section_maps_sorted[section_idx])
670 {
671 std::sort (map.begin (), map.end ());
672 data->section_maps_sorted[section_idx] = true;
673 }
674
675 arm_mapping_symbol map_key = { memaddr - sec->addr (), 0 };
676 arm_mapping_symbol_vec::const_iterator it
677 = std::lower_bound (map.begin (), map.end (), map_key);
678
679 /* std::lower_bound finds the earliest ordered insertion
680 point. If the symbol at this position starts at this exact
681 address, we use that; otherwise, the preceding
682 mapping symbol covers this address. */
683 if (it < map.end ())
684 {
685 if (it->value == map_key.value)
686 {
687 if (start)
688 *start = it->value + sec->addr ();
689 return it->type;
690 }
691 }
692
693 if (it > map.begin ())
694 {
695 arm_mapping_symbol_vec::const_iterator prev_it
696 = it - 1;
697
698 if (start)
699 *start = prev_it->value + sec->addr ();
700 return prev_it->type;
701 }
702 }
703 }
704
705 return 0;
706 }
707
708 /* Determine if the program counter specified in MEMADDR is in a Thumb
709 function. This function should be called for addresses unrelated to
710 any executing frame; otherwise, prefer arm_frame_is_thumb. */
711
712 int
713 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
714 {
715 char type;
716 arm_displaced_step_copy_insn_closure *dsc = nullptr;
717 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
718
719 if (gdbarch_displaced_step_copy_insn_closure_by_addr_p (gdbarch))
720 dsc = ((arm_displaced_step_copy_insn_closure * )
721 gdbarch_displaced_step_copy_insn_closure_by_addr
722 (gdbarch, current_inferior (), memaddr));
723
724 /* If checking the mode of displaced instruction in copy area, the mode
725 should be determined by instruction on the original address. */
726 if (dsc)
727 {
728 displaced_debug_printf ("check mode of %.8lx instead of %.8lx",
729 (unsigned long) dsc->insn_addr,
730 (unsigned long) memaddr);
731 memaddr = dsc->insn_addr;
732 }
733
734 /* If bit 0 of the address is set, assume this is a Thumb address. */
735 if (IS_THUMB_ADDR (memaddr))
736 return 1;
737
738 /* If the user wants to override the symbol table, let him. */
739 if (strcmp (arm_force_mode_string, "arm") == 0)
740 return 0;
741 if (strcmp (arm_force_mode_string, "thumb") == 0)
742 return 1;
743
744 /* ARM v6-M and v7-M are always in Thumb mode. */
745 if (tdep->is_m)
746 return 1;
747
748 /* If there are mapping symbols, consult them. */
749 type = arm_find_mapping_symbol (memaddr, NULL);
750 if (type)
751 return type == 't';
752
753 /* Thumb functions have a "special" bit set in minimal symbols. */
754 bound_minimal_symbol sym = lookup_minimal_symbol_by_pc (memaddr);
755 if (sym.minsym)
756 return (MSYMBOL_IS_SPECIAL (sym.minsym));
757
758 /* If the user wants to override the fallback mode, let them. */
759 if (strcmp (arm_fallback_mode_string, "arm") == 0)
760 return 0;
761 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
762 return 1;
763
764 /* If we couldn't find any symbol, but we're talking to a running
765 target, then trust the current value of $cpsr. This lets
766 "display/i $pc" always show the correct mode (though if there is
767 a symbol table we will not reach here, so it still may not be
768 displayed in the mode it will be executed). */
769 if (target_has_registers ())
770 return arm_frame_is_thumb (get_current_frame ());
771
772 /* Otherwise we're out of luck; we assume ARM. */
773 return 0;
774 }
775
776 static inline bool
777 arm_m_addr_is_lockup (CORE_ADDR addr)
778 {
779 switch (addr)
780 {
781 /* Values for lockup state.
782 For more details see "B1.5.15 Unrecoverable exception cases" in
783 both ARMv6-M and ARMv7-M Architecture Reference Manuals, or
784 see "B4.32 Lockup" in ARMv8-M Architecture Reference Manual. */
785 case 0xeffffffe:
786 case 0xfffffffe:
787 case 0xffffffff:
788 return true;
789
790 default:
791 /* Address is not lockup. */
792 return false;
793 }
794 }
795
796 /* Determine if the address specified equals any of these magic return
797 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
798 architectures. Also include lockup magic PC value.
799 Check also for FNC_RETURN if we have the v8-M security extension.
800
801 From ARMv6-M Reference Manual B1.5.8
802 Table B1-5 Exception return behavior
803
804 EXC_RETURN Return To Return Stack
805 0xFFFFFFF1 Handler mode Main
806 0xFFFFFFF9 Thread mode Main
807 0xFFFFFFFD Thread mode Process
808
809 From ARMv7-M Reference Manual B1.5.8
810 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
811
812 EXC_RETURN Return To Return Stack
813 0xFFFFFFF1 Handler mode Main
814 0xFFFFFFF9 Thread mode Main
815 0xFFFFFFFD Thread mode Process
816
817 Table B1-9 EXC_RETURN definition of exception return behavior, with
818 FP
819
820 EXC_RETURN Return To Return Stack Frame Type
821 0xFFFFFFE1 Handler mode Main Extended
822 0xFFFFFFE9 Thread mode Main Extended
823 0xFFFFFFED Thread mode Process Extended
824 0xFFFFFFF1 Handler mode Main Basic
825 0xFFFFFFF9 Thread mode Main Basic
826 0xFFFFFFFD Thread mode Process Basic
827
828 For more details see "B1.5.8 Exception return behavior"
829 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
830
831 From ARMv8-M Architecture Technical Reference, D1.2.95
832 FType, Mode and SPSEL bits are to be considered when the Security
833 Extension is not implemented.
834
835 EXC_RETURN Return To Return Stack Frame Type
836 0xFFFFFFA0 Handler mode Main Extended
837 0xFFFFFFA8 Thread mode Main Extended
838 0xFFFFFFAC Thread mode Process Extended
839 0xFFFFFFB0 Handler mode Main Standard
840 0xFFFFFFB8 Thread mode Main Standard
841 0xFFFFFFBC Thread mode Process Standard */
842
843 static int
844 arm_m_addr_is_magic (struct gdbarch *gdbarch, CORE_ADDR addr)
845 {
846 if (arm_m_addr_is_lockup (addr))
847 return 1;
848
849 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
850 if (tdep->have_sec_ext)
851 {
852 switch ((addr & 0xff000000))
853 {
854 case 0xff000000: /* EXC_RETURN pattern. */
855 case 0xfe000000: /* FNC_RETURN pattern. */
856 return 1;
857 default:
858 return 0;
859 }
860 }
861 else
862 {
863 switch (addr)
864 {
865 /* Values from ARMv8-M Architecture Technical Reference. */
866 case 0xffffffa0:
867 case 0xffffffa8:
868 case 0xffffffac:
869 case 0xffffffb0:
870 case 0xffffffb8:
871 case 0xffffffbc:
872 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
873 the exception return behavior. */
874 case 0xffffffe1:
875 case 0xffffffe9:
876 case 0xffffffed:
877 case 0xfffffff1:
878 case 0xfffffff9:
879 case 0xfffffffd:
880 /* Address is magic. */
881 return 1;
882
883 default:
884 /* Address is not magic. */
885 return 0;
886 }
887 }
888 }
889
890 /* Remove useless bits from addresses in a running program. */
891 static CORE_ADDR
892 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
893 {
894 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
895
896 /* On M-profile devices, do not strip the low bit from EXC_RETURN
897 (the magic exception return address). */
898 if (tdep->is_m && arm_m_addr_is_magic (gdbarch, val))
899 return val;
900
901 if (arm_apcs_32)
902 return UNMAKE_THUMB_ADDR (val);
903 else
904 return (val & 0x03fffffc);
905 }
906
907 /* Return 1 if PC is the start of a compiler helper function which
908 can be safely ignored during prologue skipping. IS_THUMB is true
909 if the function is known to be a Thumb function due to the way it
910 is being called. */
911 static int
912 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
913 {
914 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
915
916 bound_minimal_symbol msym = lookup_minimal_symbol_by_pc (pc);
917 if (msym.minsym != NULL
918 && msym.value_address () == pc
919 && msym.minsym->linkage_name () != NULL)
920 {
921 const char *name = msym.minsym->linkage_name ();
922
923 /* The GNU linker's Thumb call stub to foo is named
924 __foo_from_thumb. */
925 if (strstr (name, "_from_thumb") != NULL)
926 name += 2;
927
928 /* On soft-float targets, __truncdfsf2 is called to convert promoted
929 arguments to their argument types in non-prototyped
930 functions. */
931 if (startswith (name, "__truncdfsf2"))
932 return 1;
933 if (startswith (name, "__aeabi_d2f"))
934 return 1;
935
936 /* Internal functions related to thread-local storage. */
937 if (startswith (name, "__tls_get_addr"))
938 return 1;
939 if (startswith (name, "__aeabi_read_tp"))
940 return 1;
941 }
942 else
943 {
944 /* If we run against a stripped glibc, we may be unable to identify
945 special functions by name. Check for one important case,
946 __aeabi_read_tp, by comparing the *code* against the default
947 implementation (this is hand-written ARM assembler in glibc). */
948
949 if (!is_thumb
950 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
951 == 0xe3e00a0f /* mov r0, #0xffff0fff */
952 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
953 == 0xe240f01f) /* sub pc, r0, #31 */
954 return 1;
955 }
956
957 return 0;
958 }
959
960 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
961 the first 16-bit of instruction, and INSN2 is the second 16-bit of
962 instruction. */
963 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
964 ((bits ((insn1), 0, 3) << 12) \
965 | (bits ((insn1), 10, 10) << 11) \
966 | (bits ((insn2), 12, 14) << 8) \
967 | bits ((insn2), 0, 7))
968
969 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
970 the 32-bit instruction. */
971 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
972 ((bits ((insn), 16, 19) << 12) \
973 | bits ((insn), 0, 11))
974
975 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
976
977 static unsigned int
978 thumb_expand_immediate (unsigned int imm)
979 {
980 unsigned int count = imm >> 7;
981
982 if (count < 8)
983 switch (count / 2)
984 {
985 case 0:
986 return imm & 0xff;
987 case 1:
988 return (imm & 0xff) | ((imm & 0xff) << 16);
989 case 2:
990 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
991 case 3:
992 return (imm & 0xff) | ((imm & 0xff) << 8)
993 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
994 }
995
996 return (0x80 | (imm & 0x7f)) << (32 - count);
997 }
998
999 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
1000 epilogue, 0 otherwise. */
1001
1002 static int
1003 thumb_instruction_restores_sp (unsigned short insn)
1004 {
1005 return (insn == 0x46bd /* mov sp, r7 */
1006 || (insn & 0xff80) == 0xb000 /* add sp, imm */
1007 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
1008 }
1009
1010 /* Analyze a Thumb prologue, looking for a recognizable stack frame
1011 and frame pointer. Scan until we encounter a store that could
1012 clobber the stack frame unexpectedly, or an unknown instruction.
1013 Return the last address which is definitely safe to skip for an
1014 initial breakpoint. */
1015
1016 static CORE_ADDR
1017 thumb_analyze_prologue (struct gdbarch *gdbarch,
1018 CORE_ADDR start, CORE_ADDR limit,
1019 struct arm_prologue_cache *cache)
1020 {
1021 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
1022 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1023 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1024 int i;
1025 pv_t regs[16];
1026 CORE_ADDR offset;
1027 CORE_ADDR unrecognized_pc = 0;
1028
1029 for (i = 0; i < 16; i++)
1030 regs[i] = pv_register (i, 0);
1031 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1032
1033 while (start < limit)
1034 {
1035 unsigned short insn;
1036 std::optional<bool> ra_signed_state;
1037
1038 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
1039
1040 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
1041 {
1042 int regno;
1043 int mask;
1044
1045 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1046 break;
1047
1048 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
1049 whether to save LR (R14). */
1050 mask = (insn & 0xff) | ((insn & 0x100) << 6);
1051
1052 /* Calculate offsets of saved R0-R7 and LR. */
1053 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
1054 if (mask & (1 << regno))
1055 {
1056 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
1057 -4);
1058 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1059 }
1060 }
1061 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
1062 {
1063 offset = (insn & 0x7f) << 2; /* get scaled offset */
1064 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
1065 -offset);
1066 }
1067 else if (thumb_instruction_restores_sp (insn))
1068 {
1069 /* Don't scan past the epilogue. */
1070 break;
1071 }
1072 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
1073 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
1074 (insn & 0xff) << 2);
1075 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
1076 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
1077 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
1078 bits (insn, 6, 8));
1079 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
1080 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
1081 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
1082 bits (insn, 0, 7));
1083 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
1084 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
1085 && pv_is_constant (regs[bits (insn, 3, 5)]))
1086 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
1087 regs[bits (insn, 6, 8)]);
1088 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
1089 && pv_is_constant (regs[bits (insn, 3, 6)]))
1090 {
1091 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
1092 int rm = bits (insn, 3, 6);
1093 regs[rd] = pv_add (regs[rd], regs[rm]);
1094 }
1095 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
1096 {
1097 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
1098 int src_reg = (insn & 0x78) >> 3;
1099 regs[dst_reg] = regs[src_reg];
1100 }
1101 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
1102 {
1103 /* Handle stores to the stack. Normally pushes are used,
1104 but with GCC -mtpcs-frame, there may be other stores
1105 in the prologue to create the frame. */
1106 int regno = (insn >> 8) & 0x7;
1107 pv_t addr;
1108
1109 offset = (insn & 0xff) << 2;
1110 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
1111
1112 if (stack.store_would_trash (addr))
1113 break;
1114
1115 stack.store (addr, 4, regs[regno]);
1116 }
1117 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
1118 {
1119 int rd = bits (insn, 0, 2);
1120 int rn = bits (insn, 3, 5);
1121 pv_t addr;
1122
1123 offset = bits (insn, 6, 10) << 2;
1124 addr = pv_add_constant (regs[rn], offset);
1125
1126 if (stack.store_would_trash (addr))
1127 break;
1128
1129 stack.store (addr, 4, regs[rd]);
1130 }
1131 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
1132 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
1133 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
1134 /* Ignore stores of argument registers to the stack. */
1135 ;
1136 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
1137 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
1138 /* Ignore block loads from the stack, potentially copying
1139 parameters from memory. */
1140 ;
1141 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
1142 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
1143 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
1144 /* Similarly ignore single loads from the stack. */
1145 ;
1146 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
1147 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
1148 /* Skip register copies, i.e. saves to another register
1149 instead of the stack. */
1150 ;
1151 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
1152 /* Recognize constant loads; even with small stacks these are necessary
1153 on Thumb. */
1154 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
1155 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
1156 {
1157 /* Constant pool loads, for the same reason. */
1158 unsigned int constant;
1159 CORE_ADDR loc;
1160
1161 loc = start + 4 + bits (insn, 0, 7) * 4;
1162 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1163 regs[bits (insn, 8, 10)] = pv_constant (constant);
1164 }
1165 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
1166 {
1167 unsigned short inst2;
1168
1169 inst2 = read_code_unsigned_integer (start + 2, 2,
1170 byte_order_for_code);
1171 uint32_t whole_insn = (insn << 16) | inst2;
1172
1173 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
1174 {
1175 /* BL, BLX. Allow some special function calls when
1176 skipping the prologue; GCC generates these before
1177 storing arguments to the stack. */
1178 CORE_ADDR nextpc;
1179 int j1, j2, imm1, imm2;
1180
1181 imm1 = sbits (insn, 0, 10);
1182 imm2 = bits (inst2, 0, 10);
1183 j1 = bit (inst2, 13);
1184 j2 = bit (inst2, 11);
1185
1186 offset = ((imm1 << 12) + (imm2 << 1));
1187 offset ^= ((!j2) << 22) | ((!j1) << 23);
1188
1189 nextpc = start + 4 + offset;
1190 /* For BLX make sure to clear the low bits. */
1191 if (bit (inst2, 12) == 0)
1192 nextpc = nextpc & 0xfffffffc;
1193
1194 if (!skip_prologue_function (gdbarch, nextpc,
1195 bit (inst2, 12) != 0))
1196 break;
1197 }
1198
1199 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
1200 { registers } */
1201 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1202 {
1203 pv_t addr = regs[bits (insn, 0, 3)];
1204 int regno;
1205
1206 if (stack.store_would_trash (addr))
1207 break;
1208
1209 /* Calculate offsets of saved registers. */
1210 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
1211 if (inst2 & (1 << regno))
1212 {
1213 addr = pv_add_constant (addr, -4);
1214 stack.store (addr, 4, regs[regno]);
1215 }
1216
1217 if (insn & 0x0020)
1218 regs[bits (insn, 0, 3)] = addr;
1219 }
1220
1221 /* vstmdb Rn{!}, { D-registers } (aka vpush). */
1222 else if ((insn & 0xff20) == 0xed20
1223 && (inst2 & 0x0f00) == 0x0b00
1224 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1225 {
1226 /* Address SP points to. */
1227 pv_t addr = regs[bits (insn, 0, 3)];
1228
1229 /* Number of registers saved. */
1230 unsigned int number = bits (inst2, 0, 7) >> 1;
1231
1232 /* First register to save. */
1233 int vd = bits (inst2, 12, 15) | (bits (insn, 6, 6) << 4);
1234
1235 if (stack.store_would_trash (addr))
1236 break;
1237
1238 /* Calculate offsets of saved registers. */
1239 for (; number > 0; number--)
1240 {
1241 addr = pv_add_constant (addr, -8);
1242 stack.store (addr, 8, pv_register (ARM_D0_REGNUM
1243 + vd + number, 0));
1244 }
1245
1246 /* Writeback SP to account for the saved registers. */
1247 regs[bits (insn, 0, 3)] = addr;
1248 }
1249
1250 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
1251 [Rn, #+/-imm]{!} */
1252 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1253 {
1254 int regno1 = bits (inst2, 12, 15);
1255 int regno2 = bits (inst2, 8, 11);
1256 pv_t addr = regs[bits (insn, 0, 3)];
1257
1258 offset = inst2 & 0xff;
1259 if (insn & 0x0080)
1260 addr = pv_add_constant (addr, offset);
1261 else
1262 addr = pv_add_constant (addr, -offset);
1263
1264 if (stack.store_would_trash (addr))
1265 break;
1266
1267 stack.store (addr, 4, regs[regno1]);
1268 stack.store (pv_add_constant (addr, 4),
1269 4, regs[regno2]);
1270
1271 if (insn & 0x0020)
1272 regs[bits (insn, 0, 3)] = addr;
1273 }
1274
1275 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
1276 && (inst2 & 0x0c00) == 0x0c00
1277 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1278 {
1279 int regno = bits (inst2, 12, 15);
1280 pv_t addr = regs[bits (insn, 0, 3)];
1281
1282 offset = inst2 & 0xff;
1283 if (inst2 & 0x0200)
1284 addr = pv_add_constant (addr, offset);
1285 else
1286 addr = pv_add_constant (addr, -offset);
1287
1288 if (stack.store_would_trash (addr))
1289 break;
1290
1291 stack.store (addr, 4, regs[regno]);
1292
1293 if (inst2 & 0x0100)
1294 regs[bits (insn, 0, 3)] = addr;
1295 }
1296
1297 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
1298 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1299 {
1300 int regno = bits (inst2, 12, 15);
1301 pv_t addr;
1302
1303 offset = inst2 & 0xfff;
1304 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
1305
1306 if (stack.store_would_trash (addr))
1307 break;
1308
1309 stack.store (addr, 4, regs[regno]);
1310 }
1311
1312 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
1313 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1314 /* Ignore stores of argument registers to the stack. */
1315 ;
1316
1317 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
1318 && (inst2 & 0x0d00) == 0x0c00
1319 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1320 /* Ignore stores of argument registers to the stack. */
1321 ;
1322
1323 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
1324 { registers } */
1325 && (inst2 & 0x8000) == 0x0000
1326 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1327 /* Ignore block loads from the stack, potentially copying
1328 parameters from memory. */
1329 ;
1330
1331 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
1332 [Rn, #+/-imm] */
1333 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1334 /* Similarly ignore dual loads from the stack. */
1335 ;
1336
1337 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
1338 && (inst2 & 0x0d00) == 0x0c00
1339 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1340 /* Similarly ignore single loads from the stack. */
1341 ;
1342
1343 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1344 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1345 /* Similarly ignore single loads from the stack. */
1346 ;
1347
1348 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1349 && (inst2 & 0x8000) == 0x0000)
1350 {
1351 unsigned int imm = ((bits (insn, 10, 10) << 11)
1352 | (bits (inst2, 12, 14) << 8)
1353 | bits (inst2, 0, 7));
1354
1355 regs[bits (inst2, 8, 11)]
1356 = pv_add_constant (regs[bits (insn, 0, 3)],
1357 thumb_expand_immediate (imm));
1358 }
1359
1360 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1361 && (inst2 & 0x8000) == 0x0000)
1362 {
1363 unsigned int imm = ((bits (insn, 10, 10) << 11)
1364 | (bits (inst2, 12, 14) << 8)
1365 | bits (inst2, 0, 7));
1366
1367 regs[bits (inst2, 8, 11)]
1368 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1369 }
1370
1371 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1372 && (inst2 & 0x8000) == 0x0000)
1373 {
1374 unsigned int imm = ((bits (insn, 10, 10) << 11)
1375 | (bits (inst2, 12, 14) << 8)
1376 | bits (inst2, 0, 7));
1377
1378 regs[bits (inst2, 8, 11)]
1379 = pv_add_constant (regs[bits (insn, 0, 3)],
1380 - (CORE_ADDR) thumb_expand_immediate (imm));
1381 }
1382
1383 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1384 && (inst2 & 0x8000) == 0x0000)
1385 {
1386 unsigned int imm = ((bits (insn, 10, 10) << 11)
1387 | (bits (inst2, 12, 14) << 8)
1388 | bits (inst2, 0, 7));
1389
1390 regs[bits (inst2, 8, 11)]
1391 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1392 }
1393
1394 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1395 {
1396 unsigned int imm = ((bits (insn, 10, 10) << 11)
1397 | (bits (inst2, 12, 14) << 8)
1398 | bits (inst2, 0, 7));
1399
1400 regs[bits (inst2, 8, 11)]
1401 = pv_constant (thumb_expand_immediate (imm));
1402 }
1403
1404 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1405 {
1406 unsigned int imm
1407 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1408
1409 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1410 }
1411
1412 else if (insn == 0xea5f /* mov.w Rd,Rm */
1413 && (inst2 & 0xf0f0) == 0)
1414 {
1415 int dst_reg = (inst2 & 0x0f00) >> 8;
1416 int src_reg = inst2 & 0xf;
1417 regs[dst_reg] = regs[src_reg];
1418 }
1419
1420 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1421 {
1422 /* Constant pool loads. */
1423 unsigned int constant;
1424 CORE_ADDR loc;
1425
1426 offset = bits (inst2, 0, 11);
1427 if (insn & 0x0080)
1428 loc = start + 4 + offset;
1429 else
1430 loc = start + 4 - offset;
1431
1432 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1433 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1434 }
1435
1436 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1437 {
1438 /* Constant pool loads. */
1439 unsigned int constant;
1440 CORE_ADDR loc;
1441
1442 offset = bits (inst2, 0, 7) << 2;
1443 if (insn & 0x0080)
1444 loc = start + 4 + offset;
1445 else
1446 loc = start + 4 - offset;
1447
1448 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1449 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1450
1451 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1452 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1453 }
1454 /* Start of ARMv8.1-m PACBTI extension instructions. */
1455 else if (IS_PAC (whole_insn))
1456 {
1457 /* LR and SP are input registers. PAC is in R12. LR is
1458 signed from this point onwards. NOP space. */
1459 ra_signed_state = true;
1460 }
1461 else if (IS_PACBTI (whole_insn))
1462 {
1463 /* LR and SP are input registers. PAC is in R12 and PC is a
1464 valid BTI landing pad. LR is signed from this point onwards.
1465 NOP space. */
1466 ra_signed_state = true;
1467 }
1468 else if (IS_BTI (whole_insn))
1469 {
1470 /* Valid BTI landing pad. NOP space. */
1471 }
1472 else if (IS_PACG (whole_insn))
1473 {
1474 /* Sign Rn using Rm and store the PAC in Rd. Rd is signed from
1475 this point onwards. */
1476 ra_signed_state = true;
1477 }
1478 else if (IS_AUT (whole_insn) || IS_AUTG (whole_insn))
1479 {
1480 /* These instructions appear close to the epilogue, when signed
1481 pointers are getting authenticated. */
1482 ra_signed_state = false;
1483 }
1484 /* End of ARMv8.1-m PACBTI extension instructions */
1485 else if (thumb2_instruction_changes_pc (insn, inst2))
1486 {
1487 /* Don't scan past anything that might change control flow. */
1488 break;
1489 }
1490 else
1491 {
1492 /* The optimizer might shove anything into the prologue,
1493 so we just skip what we don't recognize. */
1494 unrecognized_pc = start;
1495 }
1496
1497 /* Make sure we are dealing with a target that supports ARMv8.1-m
1498 PACBTI. */
1499 if (cache != nullptr && tdep->have_pacbti
1500 && ra_signed_state.has_value ())
1501 {
1502 arm_debug_printf ("Found pacbti instruction at %s",
1503 paddress (gdbarch, start));
1504 arm_debug_printf ("RA is %s",
1505 *ra_signed_state ? "signed" : "not signed");
1506 cache->ra_signed_state = ra_signed_state;
1507 }
1508
1509 start += 2;
1510 }
1511 else if (thumb_instruction_changes_pc (insn))
1512 {
1513 /* Don't scan past anything that might change control flow. */
1514 break;
1515 }
1516 else
1517 {
1518 /* The optimizer might shove anything into the prologue,
1519 so we just skip what we don't recognize. */
1520 unrecognized_pc = start;
1521 }
1522
1523 start += 2;
1524 }
1525
1526 arm_debug_printf ("Prologue scan stopped at %s",
1527 paddress (gdbarch, start));
1528
1529 if (unrecognized_pc == 0)
1530 unrecognized_pc = start;
1531
1532 if (cache == NULL)
1533 return unrecognized_pc;
1534
1535 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1536 {
1537 /* Frame pointer is fp. Frame size is constant. */
1538 cache->framereg = ARM_FP_REGNUM;
1539 cache->framesize = -regs[ARM_FP_REGNUM].k;
1540 }
1541 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1542 {
1543 /* Frame pointer is r7. Frame size is constant. */
1544 cache->framereg = THUMB_FP_REGNUM;
1545 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1546 }
1547 else
1548 {
1549 /* Try the stack pointer... this is a bit desperate. */
1550 cache->framereg = ARM_SP_REGNUM;
1551 cache->framesize = -regs[ARM_SP_REGNUM].k;
1552 }
1553
1554 for (i = 0; i < gdbarch_num_regs (gdbarch); i++)
1555 if (stack.find_reg (gdbarch, i, &offset))
1556 {
1557 cache->saved_regs[i].set_addr (offset);
1558 if (i == ARM_SP_REGNUM)
1559 arm_cache_set_active_sp_value(cache, tdep, offset);
1560 }
1561
1562 return unrecognized_pc;
1563 }
1564
1565
1566 /* Try to analyze the instructions starting from PC, which load symbol
1567 __stack_chk_guard. Return the address of instruction after loading this
1568 symbol, set the dest register number to *BASEREG, and set the size of
1569 instructions for loading symbol in OFFSET. Return 0 if instructions are
1570 not recognized. */
1571
1572 static CORE_ADDR
1573 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1574 unsigned int *destreg, int *offset)
1575 {
1576 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1577 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1578 unsigned int low, high, address;
1579
1580 address = 0;
1581 if (is_thumb)
1582 {
1583 unsigned short insn1
1584 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1585
1586 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1587 {
1588 *destreg = bits (insn1, 8, 10);
1589 *offset = 2;
1590 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1591 address = read_memory_unsigned_integer (address, 4,
1592 byte_order_for_code);
1593 }
1594 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1595 {
1596 unsigned short insn2
1597 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1598
1599 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1600
1601 insn1
1602 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1603 insn2
1604 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1605
1606 /* movt Rd, #const */
1607 if ((insn1 & 0xfbc0) == 0xf2c0)
1608 {
1609 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1610 *destreg = bits (insn2, 8, 11);
1611 *offset = 8;
1612 address = (high << 16 | low);
1613 }
1614 }
1615 }
1616 else
1617 {
1618 unsigned int insn
1619 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1620
1621 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1622 {
1623 address = bits (insn, 0, 11) + pc + 8;
1624 address = read_memory_unsigned_integer (address, 4,
1625 byte_order_for_code);
1626
1627 *destreg = bits (insn, 12, 15);
1628 *offset = 4;
1629 }
1630 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1631 {
1632 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1633
1634 insn
1635 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1636
1637 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1638 {
1639 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1640 *destreg = bits (insn, 12, 15);
1641 *offset = 8;
1642 address = (high << 16 | low);
1643 }
1644 }
1645 }
1646
1647 return address;
1648 }
1649
1650 /* Try to skip a sequence of instructions used for stack protector. If PC
1651 points to the first instruction of this sequence, return the address of
1652 first instruction after this sequence, otherwise, return original PC.
1653
1654 On arm, this sequence of instructions is composed of mainly three steps,
1655 Step 1: load symbol __stack_chk_guard,
1656 Step 2: load from address of __stack_chk_guard,
1657 Step 3: store it to somewhere else.
1658
1659 Usually, instructions on step 2 and step 3 are the same on various ARM
1660 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1661 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1662 instructions in step 1 vary from different ARM architectures. On ARMv7,
1663 they are,
1664
1665 movw Rn, #:lower16:__stack_chk_guard
1666 movt Rn, #:upper16:__stack_chk_guard
1667
1668 On ARMv5t, it is,
1669
1670 ldr Rn, .Label
1671 ....
1672 .Label:
1673 .word __stack_chk_guard
1674
1675 Since ldr/str is a very popular instruction, we can't use them as
1676 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1677 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1678 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1679
1680 static CORE_ADDR
1681 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1682 {
1683 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1684 unsigned int basereg;
1685 int offset;
1686 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1687 CORE_ADDR addr;
1688
1689 /* Try to parse the instructions in Step 1. */
1690 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1691 &basereg, &offset);
1692 if (!addr)
1693 return pc;
1694
1695 bound_minimal_symbol stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1696 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1697 Otherwise, this sequence cannot be for stack protector. */
1698 if (stack_chk_guard.minsym == NULL
1699 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1700 return pc;
1701
1702 if (is_thumb)
1703 {
1704 unsigned int destreg;
1705 unsigned short insn
1706 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1707
1708 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1709 if ((insn & 0xf800) != 0x6800)
1710 return pc;
1711 if (bits (insn, 3, 5) != basereg)
1712 return pc;
1713 destreg = bits (insn, 0, 2);
1714
1715 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1716 byte_order_for_code);
1717 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1718 if ((insn & 0xf800) != 0x6000)
1719 return pc;
1720 if (destreg != bits (insn, 0, 2))
1721 return pc;
1722 }
1723 else
1724 {
1725 unsigned int destreg;
1726 unsigned int insn
1727 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1728
1729 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1730 if ((insn & 0x0e500000) != 0x04100000)
1731 return pc;
1732 if (bits (insn, 16, 19) != basereg)
1733 return pc;
1734 destreg = bits (insn, 12, 15);
1735 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1736 insn = read_code_unsigned_integer (pc + offset + 4,
1737 4, byte_order_for_code);
1738 if ((insn & 0x0e500000) != 0x04000000)
1739 return pc;
1740 if (bits (insn, 12, 15) != destreg)
1741 return pc;
1742 }
1743 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1744 on arm. */
1745 if (is_thumb)
1746 return pc + offset + 4;
1747 else
1748 return pc + offset + 8;
1749 }
1750
1751 /* Advance the PC across any function entry prologue instructions to
1752 reach some "real" code.
1753
1754 The APCS (ARM Procedure Call Standard) defines the following
1755 prologue:
1756
1757 mov ip, sp
1758 [stmfd sp!, {a1,a2,a3,a4}]
1759 stmfd sp!, {...,fp,ip,lr,pc}
1760 [stfe f7, [sp, #-12]!]
1761 [stfe f6, [sp, #-12]!]
1762 [stfe f5, [sp, #-12]!]
1763 [stfe f4, [sp, #-12]!]
1764 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1765
1766 static CORE_ADDR
1767 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1768 {
1769 CORE_ADDR func_addr, func_end_addr, limit_pc;
1770
1771 /* See if we can determine the end of the prologue via the symbol table.
1772 If so, then return either PC, or the PC after the prologue, whichever
1773 is greater. */
1774 bool func_addr_found
1775 = find_pc_partial_function (pc, NULL, &func_addr, &func_end_addr);
1776
1777 /* Whether the function is thumb mode or not. */
1778 bool func_is_thumb = false;
1779
1780 if (func_addr_found)
1781 {
1782 CORE_ADDR post_prologue_pc
1783 = skip_prologue_using_sal (gdbarch, func_addr);
1784 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1785
1786 if (post_prologue_pc)
1787 post_prologue_pc
1788 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1789
1790
1791 /* GCC always emits a line note before the prologue and another
1792 one after, even if the two are at the same address or on the
1793 same line. Take advantage of this so that we do not need to
1794 know every instruction that might appear in the prologue. We
1795 will have producer information for most binaries; if it is
1796 missing (e.g. for -gstabs), assuming the GNU tools. */
1797 if (post_prologue_pc
1798 && (cust == NULL
1799 || cust->producer () == NULL
1800 || startswith (cust->producer (), "GNU ")
1801 || producer_is_llvm (cust->producer ())))
1802 return post_prologue_pc;
1803
1804 if (post_prologue_pc != 0)
1805 {
1806 CORE_ADDR analyzed_limit;
1807
1808 /* For non-GCC compilers, make sure the entire line is an
1809 acceptable prologue; GDB will round this function's
1810 return value up to the end of the following line so we
1811 can not skip just part of a line (and we do not want to).
1812
1813 RealView does not treat the prologue specially, but does
1814 associate prologue code with the opening brace; so this
1815 lets us skip the first line if we think it is the opening
1816 brace. */
1817 func_is_thumb = arm_pc_is_thumb (gdbarch, func_addr);
1818 if (func_is_thumb)
1819 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1820 post_prologue_pc, NULL);
1821 else
1822 analyzed_limit
1823 = arm_analyze_prologue (gdbarch, func_addr, post_prologue_pc,
1824 NULL, target_arm_instruction_reader ());
1825
1826 if (analyzed_limit != post_prologue_pc)
1827 return func_addr;
1828
1829 return post_prologue_pc;
1830 }
1831 }
1832
1833 /* Can't determine prologue from the symbol table, need to examine
1834 instructions. */
1835
1836 /* Find an upper limit on the function prologue using the debug
1837 information. If the debug information could not be used to provide
1838 that bound, then use an arbitrary large number as the upper bound. */
1839 /* Like arm_scan_prologue, stop no later than pc + 64. */
1840 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1841 if (limit_pc == 0)
1842 limit_pc = pc + 64; /* Magic. */
1843
1844 /* Set the correct adjustment based on whether the function is thumb mode or
1845 not. We use it to get the address of the last instruction in the
1846 function (as opposed to the first address of the next function). */
1847 CORE_ADDR adjustment = func_is_thumb ? 2 : 4;
1848
1849 limit_pc
1850 = func_end_addr == 0 ? limit_pc : std::min (limit_pc,
1851 func_end_addr - adjustment);
1852
1853 /* Check if this is Thumb code. */
1854 if (arm_pc_is_thumb (gdbarch, pc))
1855 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1856 else
1857 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL,
1858 target_arm_instruction_reader ());
1859 }
1860
1861 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1862 This function decodes a Thumb function prologue to determine:
1863 1) the size of the stack frame
1864 2) which registers are saved on it
1865 3) the offsets of saved regs
1866 4) the offset from the stack pointer to the frame pointer
1867
1868 A typical Thumb function prologue would create this stack frame
1869 (offsets relative to FP)
1870 old SP -> 24 stack parameters
1871 20 LR
1872 16 R7
1873 R7 -> 0 local variables (16 bytes)
1874 SP -> -12 additional stack space (12 bytes)
1875 The frame size would thus be 36 bytes, and the frame offset would be
1876 12 bytes. The frame register is R7.
1877
1878 The comments for thumb_skip_prolog() describe the algorithm we use
1879 to detect the end of the prolog. */
1880
1881 static void
1882 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1883 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1884 {
1885 CORE_ADDR prologue_start;
1886 CORE_ADDR prologue_end;
1887
1888 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1889 &prologue_end))
1890 {
1891 /* See comment in arm_scan_prologue for an explanation of
1892 this heuristics. */
1893 if (prologue_end > prologue_start + 64)
1894 {
1895 prologue_end = prologue_start + 64;
1896 }
1897 }
1898 else
1899 /* We're in the boondocks: we have no idea where the start of the
1900 function is. */
1901 return;
1902
1903 prologue_end = std::min (prologue_end, prev_pc);
1904
1905 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1906 }
1907
1908 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1909 otherwise. */
1910
1911 static int
1912 arm_instruction_restores_sp (unsigned int insn)
1913 {
1914 if (bits (insn, 28, 31) != INST_NV)
1915 {
1916 if ((insn & 0x0df0f000) == 0x0080d000
1917 /* ADD SP (register or immediate). */
1918 || (insn & 0x0df0f000) == 0x0040d000
1919 /* SUB SP (register or immediate). */
1920 || (insn & 0x0ffffff0) == 0x01a0d000
1921 /* MOV SP. */
1922 || (insn & 0x0fff0000) == 0x08bd0000
1923 /* POP (LDMIA). */
1924 || (insn & 0x0fff0000) == 0x049d0000)
1925 /* POP of a single register. */
1926 return 1;
1927 }
1928
1929 return 0;
1930 }
1931
1932 /* Implement immediate value decoding, as described in section A5.2.4
1933 (Modified immediate constants in ARM instructions) of the ARM Architecture
1934 Reference Manual (ARMv7-A and ARMv7-R edition). */
1935
1936 static uint32_t
1937 arm_expand_immediate (uint32_t imm)
1938 {
1939 /* Immediate values are 12 bits long. */
1940 gdb_assert ((imm & 0xfffff000) == 0);
1941
1942 uint32_t unrotated_value = imm & 0xff;
1943 uint32_t rotate_amount = (imm & 0xf00) >> 7;
1944
1945 if (rotate_amount == 0)
1946 return unrotated_value;
1947
1948 return ((unrotated_value >> rotate_amount)
1949 | (unrotated_value << (32 - rotate_amount)));
1950 }
1951
1952 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1953 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1954 fill it in. Return the first address not recognized as a prologue
1955 instruction.
1956
1957 We recognize all the instructions typically found in ARM prologues,
1958 plus harmless instructions which can be skipped (either for analysis
1959 purposes, or a more restrictive set that can be skipped when finding
1960 the end of the prologue). */
1961
1962 static CORE_ADDR
1963 arm_analyze_prologue (struct gdbarch *gdbarch,
1964 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1965 struct arm_prologue_cache *cache,
1966 const arm_instruction_reader &insn_reader)
1967 {
1968 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1969 int regno;
1970 CORE_ADDR offset, current_pc;
1971 pv_t regs[ARM_FPS_REGNUM];
1972 CORE_ADDR unrecognized_pc = 0;
1973 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
1974
1975 /* Search the prologue looking for instructions that set up the
1976 frame pointer, adjust the stack pointer, and save registers.
1977
1978 Be careful, however, and if it doesn't look like a prologue,
1979 don't try to scan it. If, for instance, a frameless function
1980 begins with stmfd sp!, then we will tell ourselves there is
1981 a frame, which will confuse stack traceback, as well as "finish"
1982 and other operations that rely on a knowledge of the stack
1983 traceback. */
1984
1985 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1986 regs[regno] = pv_register (regno, 0);
1987 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1988
1989 for (current_pc = prologue_start;
1990 current_pc < prologue_end;
1991 current_pc += 4)
1992 {
1993 uint32_t insn = insn_reader.read (current_pc, byte_order_for_code);
1994
1995 if (insn == 0xe1a0c00d) /* mov ip, sp */
1996 {
1997 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1998 continue;
1999 }
2000 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
2001 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2002 {
2003 uint32_t imm = arm_expand_immediate (insn & 0xfff);
2004 int rd = bits (insn, 12, 15);
2005 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
2006 continue;
2007 }
2008 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
2009 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2010 {
2011 uint32_t imm = arm_expand_immediate (insn & 0xfff);
2012 int rd = bits (insn, 12, 15);
2013 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
2014 continue;
2015 }
2016 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
2017 [sp, #-4]! */
2018 {
2019 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2020 break;
2021 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
2022 stack.store (regs[ARM_SP_REGNUM], 4,
2023 regs[bits (insn, 12, 15)]);
2024 continue;
2025 }
2026 else if ((insn & 0xffff0000) == 0xe92d0000)
2027 /* stmfd sp!, {..., fp, ip, lr, pc}
2028 or
2029 stmfd sp!, {a1, a2, a3, a4} */
2030 {
2031 int mask = insn & 0xffff;
2032
2033 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2034 break;
2035
2036 /* Calculate offsets of saved registers. */
2037 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
2038 if (mask & (1 << regno))
2039 {
2040 regs[ARM_SP_REGNUM]
2041 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
2042 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
2043 }
2044 }
2045 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
2046 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
2047 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
2048 {
2049 /* No need to add this to saved_regs -- it's just an arg reg. */
2050 continue;
2051 }
2052 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
2053 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
2054 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
2055 {
2056 /* No need to add this to saved_regs -- it's just an arg reg. */
2057 continue;
2058 }
2059 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
2060 { registers } */
2061 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2062 {
2063 /* No need to add this to saved_regs -- it's just arg regs. */
2064 continue;
2065 }
2066 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
2067 {
2068 uint32_t imm = arm_expand_immediate (insn & 0xfff);
2069 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
2070 }
2071 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
2072 {
2073 uint32_t imm = arm_expand_immediate(insn & 0xfff);
2074 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
2075 }
2076 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
2077 [sp, -#c]! */
2078 && tdep->have_fpa_registers)
2079 {
2080 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2081 break;
2082
2083 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
2084 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
2085 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
2086 }
2087 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
2088 [sp!] */
2089 && tdep->have_fpa_registers)
2090 {
2091 int n_saved_fp_regs;
2092 unsigned int fp_start_reg, fp_bound_reg;
2093
2094 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2095 break;
2096
2097 if ((insn & 0x800) == 0x800) /* N0 is set */
2098 {
2099 if ((insn & 0x40000) == 0x40000) /* N1 is set */
2100 n_saved_fp_regs = 3;
2101 else
2102 n_saved_fp_regs = 1;
2103 }
2104 else
2105 {
2106 if ((insn & 0x40000) == 0x40000) /* N1 is set */
2107 n_saved_fp_regs = 2;
2108 else
2109 n_saved_fp_regs = 4;
2110 }
2111
2112 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
2113 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
2114 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
2115 {
2116 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
2117 stack.store (regs[ARM_SP_REGNUM], 12,
2118 regs[fp_start_reg++]);
2119 }
2120 }
2121 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
2122 {
2123 /* Allow some special function calls when skipping the
2124 prologue; GCC generates these before storing arguments to
2125 the stack. */
2126 CORE_ADDR dest = BranchDest (current_pc, insn);
2127
2128 if (skip_prologue_function (gdbarch, dest, 0))
2129 continue;
2130 else
2131 break;
2132 }
2133 else if ((insn & 0xf0000000) != 0xe0000000)
2134 break; /* Condition not true, exit early. */
2135 else if (arm_instruction_changes_pc (insn))
2136 /* Don't scan past anything that might change control flow. */
2137 break;
2138 else if (arm_instruction_restores_sp (insn))
2139 {
2140 /* Don't scan past the epilogue. */
2141 break;
2142 }
2143 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
2144 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2145 /* Ignore block loads from the stack, potentially copying
2146 parameters from memory. */
2147 continue;
2148 else if ((insn & 0xfc500000) == 0xe4100000
2149 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2150 /* Similarly ignore single loads from the stack. */
2151 continue;
2152 else if ((insn & 0xffff0ff0) == 0xe1a00000)
2153 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
2154 register instead of the stack. */
2155 continue;
2156 else
2157 {
2158 /* The optimizer might shove anything into the prologue, if
2159 we build up cache (cache != NULL) from scanning prologue,
2160 we just skip what we don't recognize and scan further to
2161 make cache as complete as possible. However, if we skip
2162 prologue, we'll stop immediately on unrecognized
2163 instruction. */
2164 unrecognized_pc = current_pc;
2165 if (cache != NULL)
2166 continue;
2167 else
2168 break;
2169 }
2170 }
2171
2172 if (unrecognized_pc == 0)
2173 unrecognized_pc = current_pc;
2174
2175 if (cache)
2176 {
2177 int framereg, framesize;
2178
2179 /* The frame size is just the distance from the frame register
2180 to the original stack pointer. */
2181 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
2182 {
2183 /* Frame pointer is fp. */
2184 framereg = ARM_FP_REGNUM;
2185 framesize = -regs[ARM_FP_REGNUM].k;
2186 }
2187 else
2188 {
2189 /* Try the stack pointer... this is a bit desperate. */
2190 framereg = ARM_SP_REGNUM;
2191 framesize = -regs[ARM_SP_REGNUM].k;
2192 }
2193
2194 cache->framereg = framereg;
2195 cache->framesize = framesize;
2196
2197 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
2198 if (stack.find_reg (gdbarch, regno, &offset))
2199 {
2200 cache->saved_regs[regno].set_addr (offset);
2201 if (regno == ARM_SP_REGNUM)
2202 arm_cache_set_active_sp_value(cache, tdep, offset);
2203 }
2204 }
2205
2206 arm_debug_printf ("Prologue scan stopped at %s",
2207 paddress (gdbarch, unrecognized_pc));
2208
2209 return unrecognized_pc;
2210 }
2211
2212 static void
2213 arm_scan_prologue (const frame_info_ptr &this_frame,
2214 struct arm_prologue_cache *cache)
2215 {
2216 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2217 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2218 CORE_ADDR prologue_start, prologue_end;
2219 CORE_ADDR prev_pc = get_frame_pc (this_frame);
2220 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
2221 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
2222
2223 /* Assume there is no frame until proven otherwise. */
2224 cache->framereg = ARM_SP_REGNUM;
2225 cache->framesize = 0;
2226
2227 /* Check for Thumb prologue. */
2228 if (arm_frame_is_thumb (this_frame))
2229 {
2230 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
2231 return;
2232 }
2233
2234 /* Find the function prologue. If we can't find the function in
2235 the symbol table, peek in the stack frame to find the PC. */
2236 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
2237 &prologue_end))
2238 {
2239 /* One way to find the end of the prologue (which works well
2240 for unoptimized code) is to do the following:
2241
2242 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
2243
2244 if (sal.line == 0)
2245 prologue_end = prev_pc;
2246 else if (sal.end < prologue_end)
2247 prologue_end = sal.end;
2248
2249 This mechanism is very accurate so long as the optimizer
2250 doesn't move any instructions from the function body into the
2251 prologue. If this happens, sal.end will be the last
2252 instruction in the first hunk of prologue code just before
2253 the first instruction that the scheduler has moved from
2254 the body to the prologue.
2255
2256 In order to make sure that we scan all of the prologue
2257 instructions, we use a slightly less accurate mechanism which
2258 may scan more than necessary. To help compensate for this
2259 lack of accuracy, the prologue scanning loop below contains
2260 several clauses which'll cause the loop to terminate early if
2261 an implausible prologue instruction is encountered.
2262
2263 The expression
2264
2265 prologue_start + 64
2266
2267 is a suitable endpoint since it accounts for the largest
2268 possible prologue plus up to five instructions inserted by
2269 the scheduler. */
2270
2271 if (prologue_end > prologue_start + 64)
2272 {
2273 prologue_end = prologue_start + 64; /* See above. */
2274 }
2275 }
2276 else
2277 {
2278 /* We have no symbol information. Our only option is to assume this
2279 function has a standard stack frame and the normal frame register.
2280 Then, we can find the value of our frame pointer on entrance to
2281 the callee (or at the present moment if this is the innermost frame).
2282 The value stored there should be the address of the stmfd + 8. */
2283 CORE_ADDR frame_loc;
2284 ULONGEST return_value;
2285
2286 /* AAPCS does not use a frame register, so we can abort here. */
2287 if (tdep->arm_abi == ARM_ABI_AAPCS)
2288 return;
2289
2290 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
2291 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
2292 &return_value))
2293 return;
2294 else
2295 {
2296 prologue_start = gdbarch_addr_bits_remove
2297 (gdbarch, return_value) - 8;
2298 prologue_end = prologue_start + 64; /* See above. */
2299 }
2300 }
2301
2302 if (prev_pc < prologue_end)
2303 prologue_end = prev_pc;
2304
2305 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache,
2306 target_arm_instruction_reader ());
2307 }
2308
2309 static struct arm_prologue_cache *
2310 arm_make_prologue_cache (const frame_info_ptr &this_frame)
2311 {
2312 int reg;
2313 struct arm_prologue_cache *cache;
2314 CORE_ADDR unwound_fp, prev_sp;
2315
2316 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2317 arm_cache_init (cache, this_frame);
2318
2319 arm_scan_prologue (this_frame, cache);
2320
2321 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2322 if (unwound_fp == 0)
2323 return cache;
2324
2325 arm_gdbarch_tdep *tdep =
2326 gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
2327
2328 prev_sp = unwound_fp + cache->framesize;
2329 arm_cache_set_active_sp_value (cache, tdep, prev_sp);
2330
2331 /* Calculate actual addresses of saved registers using offsets
2332 determined by arm_scan_prologue. */
2333 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2334 if (cache->saved_regs[reg].is_addr ())
2335 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr () +
2336 prev_sp);
2337
2338 return cache;
2339 }
2340
2341 /* Implementation of the stop_reason hook for arm_prologue frames. */
2342
2343 static enum unwind_stop_reason
2344 arm_prologue_unwind_stop_reason (const frame_info_ptr &this_frame,
2345 void **this_cache)
2346 {
2347 struct arm_prologue_cache *cache;
2348 CORE_ADDR pc;
2349
2350 if (*this_cache == NULL)
2351 *this_cache = arm_make_prologue_cache (this_frame);
2352 cache = (struct arm_prologue_cache *) *this_cache;
2353
2354 /* This is meant to halt the backtrace at "_start". */
2355 pc = get_frame_pc (this_frame);
2356 gdbarch *arch = get_frame_arch (this_frame);
2357 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (arch);
2358 if (pc <= tdep->lowest_pc)
2359 return UNWIND_OUTERMOST;
2360
2361 /* If we've hit a wall, stop. */
2362 if (arm_cache_get_prev_sp_value (cache, tdep) == 0)
2363 return UNWIND_OUTERMOST;
2364
2365 return UNWIND_NO_REASON;
2366 }
2367
2368 /* Our frame ID for a normal frame is the current function's starting PC
2369 and the caller's SP when we were called. */
2370
2371 static void
2372 arm_prologue_this_id (const frame_info_ptr &this_frame,
2373 void **this_cache,
2374 struct frame_id *this_id)
2375 {
2376 struct arm_prologue_cache *cache;
2377 struct frame_id id;
2378 CORE_ADDR pc, func;
2379
2380 if (*this_cache == NULL)
2381 *this_cache = arm_make_prologue_cache (this_frame);
2382 cache = (struct arm_prologue_cache *) *this_cache;
2383
2384 arm_gdbarch_tdep *tdep
2385 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
2386
2387 /* Use function start address as part of the frame ID. If we cannot
2388 identify the start address (due to missing symbol information),
2389 fall back to just using the current PC. */
2390 pc = get_frame_pc (this_frame);
2391 func = get_frame_func (this_frame);
2392 if (!func)
2393 func = pc;
2394
2395 id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), func);
2396 *this_id = id;
2397 }
2398
2399 static struct value *
2400 arm_prologue_prev_register (const frame_info_ptr &this_frame,
2401 void **this_cache,
2402 int prev_regnum)
2403 {
2404 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2405 struct arm_prologue_cache *cache;
2406 CORE_ADDR sp_value;
2407
2408 if (*this_cache == NULL)
2409 *this_cache = arm_make_prologue_cache (this_frame);
2410 cache = (struct arm_prologue_cache *) *this_cache;
2411
2412 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
2413
2414 /* If this frame has signed the return address, mark it as so. */
2415 if (tdep->have_pacbti && cache->ra_signed_state.has_value ()
2416 && *cache->ra_signed_state)
2417 set_frame_previous_pc_masked (this_frame);
2418
2419 /* If we are asked to unwind the PC, then we need to return the LR
2420 instead. The prologue may save PC, but it will point into this
2421 frame's prologue, not the next frame's resume location. Also
2422 strip the saved T bit. A valid LR may have the low bit set, but
2423 a valid PC never does. */
2424 if (prev_regnum == ARM_PC_REGNUM)
2425 {
2426 CORE_ADDR lr;
2427
2428 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2429 return frame_unwind_got_constant (this_frame, prev_regnum,
2430 arm_addr_bits_remove (gdbarch, lr));
2431 }
2432
2433 /* SP is generally not saved to the stack, but this frame is
2434 identified by the next frame's stack pointer at the time of the call.
2435 The value was already reconstructed into PREV_SP. */
2436 if (prev_regnum == ARM_SP_REGNUM)
2437 return frame_unwind_got_constant (this_frame, prev_regnum,
2438 arm_cache_get_prev_sp_value (cache, tdep));
2439
2440 /* The value might be one of the alternative SP, if so, use the
2441 value already constructed. */
2442 if (arm_is_alternative_sp_register (tdep, prev_regnum))
2443 {
2444 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
2445 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
2446 }
2447
2448 /* The CPSR may have been changed by the call instruction and by the
2449 called function. The only bit we can reconstruct is the T bit,
2450 by checking the low bit of LR as of the call. This is a reliable
2451 indicator of Thumb-ness except for some ARM v4T pre-interworking
2452 Thumb code, which could get away with a clear low bit as long as
2453 the called function did not use bx. Guess that all other
2454 bits are unchanged; the condition flags are presumably lost,
2455 but the processor status is likely valid. */
2456 if (prev_regnum == ARM_PS_REGNUM)
2457 {
2458 ULONGEST cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2459 CORE_ADDR lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2460
2461 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr);
2462 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2463 }
2464
2465 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2466 prev_regnum);
2467 }
2468
2469 static const frame_unwind_legacy arm_prologue_unwind (
2470 "arm prologue",
2471 NORMAL_FRAME,
2472 FRAME_UNWIND_ARCH,
2473 arm_prologue_unwind_stop_reason,
2474 arm_prologue_this_id,
2475 arm_prologue_prev_register,
2476 NULL,
2477 default_frame_sniffer
2478 );
2479
2480 /* Maintain a list of ARM exception table entries per objfile, similar to the
2481 list of mapping symbols. We only cache entries for standard ARM-defined
2482 personality routines; the cache will contain only the frame unwinding
2483 instructions associated with the entry (not the descriptors). */
2484
2485 struct arm_exidx_entry
2486 {
2487 CORE_ADDR addr;
2488 gdb_byte *entry;
2489
2490 bool operator< (const arm_exidx_entry &other) const
2491 {
2492 return addr < other.addr;
2493 }
2494 };
2495
2496 struct arm_exidx_data
2497 {
2498 std::vector<std::vector<arm_exidx_entry>> section_maps;
2499 };
2500
2501 /* Per-BFD key to store exception handling information. */
2502 static const registry<bfd>::key<arm_exidx_data> arm_exidx_data_key;
2503
2504 static struct obj_section *
2505 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2506 {
2507 for (obj_section *osect : objfile->sections ())
2508 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2509 {
2510 bfd_vma start, size;
2511 start = bfd_section_vma (osect->the_bfd_section);
2512 size = bfd_section_size (osect->the_bfd_section);
2513
2514 if (start <= vma && vma < start + size)
2515 return osect;
2516 }
2517
2518 return NULL;
2519 }
2520
2521 /* Parse contents of exception table and exception index sections
2522 of OBJFILE, and fill in the exception table entry cache.
2523
2524 For each entry that refers to a standard ARM-defined personality
2525 routine, extract the frame unwinding instructions (from either
2526 the index or the table section). The unwinding instructions
2527 are normalized by:
2528 - extracting them from the rest of the table data
2529 - converting to host endianness
2530 - appending the implicit 0xb0 ("Finish") code
2531
2532 The extracted and normalized instructions are stored for later
2533 retrieval by the arm_find_exidx_entry routine. */
2534
2535 static void
2536 arm_exidx_new_objfile (struct objfile *objfile)
2537 {
2538 struct arm_exidx_data *data;
2539 asection *exidx, *extab;
2540 bfd_vma exidx_vma = 0, extab_vma = 0;
2541 LONGEST i;
2542
2543 /* If we've already touched this file, do nothing. */
2544 if (arm_exidx_data_key.get (objfile->obfd.get ()) != nullptr)
2545 return;
2546
2547 /* Read contents of exception table and index. */
2548 exidx = bfd_get_section_by_name (objfile->obfd.get (),
2549 ELF_STRING_ARM_unwind);
2550 gdb::byte_vector exidx_data;
2551 if (exidx)
2552 {
2553 exidx_vma = bfd_section_vma (exidx);
2554 exidx_data.resize (bfd_section_size (exidx));
2555
2556 if (!bfd_get_section_contents (objfile->obfd.get (), exidx,
2557 exidx_data.data (), 0,
2558 exidx_data.size ()))
2559 return;
2560 }
2561
2562 extab = bfd_get_section_by_name (objfile->obfd.get (), ".ARM.extab");
2563 gdb::byte_vector extab_data;
2564 if (extab)
2565 {
2566 extab_vma = bfd_section_vma (extab);
2567 extab_data.resize (bfd_section_size (extab));
2568
2569 if (!bfd_get_section_contents (objfile->obfd.get (), extab,
2570 extab_data.data (), 0,
2571 extab_data.size ()))
2572 return;
2573 }
2574
2575 /* Allocate exception table data structure. */
2576 data = arm_exidx_data_key.emplace (objfile->obfd.get ());
2577 data->section_maps.resize (objfile->obfd->section_count);
2578
2579 /* Fill in exception table. */
2580 for (i = 0; i < exidx_data.size () / 8; i++)
2581 {
2582 struct arm_exidx_entry new_exidx_entry;
2583 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2584 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2585 exidx_data.data () + i * 8 + 4);
2586 bfd_vma addr = 0, word = 0;
2587 int n_bytes = 0, n_words = 0;
2588 struct obj_section *sec;
2589 gdb_byte *entry = NULL;
2590
2591 /* Extract address of start of function. */
2592 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2593 idx += exidx_vma + i * 8;
2594
2595 /* Find section containing function and compute section offset. */
2596 sec = arm_obj_section_from_vma (objfile, idx);
2597 if (sec == NULL)
2598 continue;
2599 idx -= bfd_section_vma (sec->the_bfd_section);
2600
2601 /* Determine address of exception table entry. */
2602 if (val == 1)
2603 {
2604 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2605 }
2606 else if ((val & 0xff000000) == 0x80000000)
2607 {
2608 /* Exception table entry embedded in .ARM.exidx
2609 -- must be short form. */
2610 word = val;
2611 n_bytes = 3;
2612 }
2613 else if (!(val & 0x80000000))
2614 {
2615 /* Exception table entry in .ARM.extab. */
2616 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2617 addr += exidx_vma + i * 8 + 4;
2618
2619 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2620 {
2621 word = bfd_h_get_32 (objfile->obfd,
2622 extab_data.data () + addr - extab_vma);
2623 addr += 4;
2624
2625 if ((word & 0xff000000) == 0x80000000)
2626 {
2627 /* Short form. */
2628 n_bytes = 3;
2629 }
2630 else if ((word & 0xff000000) == 0x81000000
2631 || (word & 0xff000000) == 0x82000000)
2632 {
2633 /* Long form. */
2634 n_bytes = 2;
2635 n_words = ((word >> 16) & 0xff);
2636 }
2637 else if (!(word & 0x80000000))
2638 {
2639 bfd_vma pers;
2640 struct obj_section *pers_sec;
2641 int gnu_personality = 0;
2642
2643 /* Custom personality routine. */
2644 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2645 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2646
2647 /* Check whether we've got one of the variants of the
2648 GNU personality routines. */
2649 pers_sec = arm_obj_section_from_vma (objfile, pers);
2650 if (pers_sec)
2651 {
2652 static const char *personality[] =
2653 {
2654 "__gcc_personality_v0",
2655 "__gxx_personality_v0",
2656 "__gcj_personality_v0",
2657 "__gnu_objc_personality_v0",
2658 NULL
2659 };
2660
2661 CORE_ADDR pc = pers + pers_sec->offset ();
2662 int k;
2663
2664 for (k = 0; personality[k]; k++)
2665 if (lookup_minimal_symbol_by_pc_name
2666 (pc, personality[k], objfile))
2667 {
2668 gnu_personality = 1;
2669 break;
2670 }
2671 }
2672
2673 /* If so, the next word contains a word count in the high
2674 byte, followed by the same unwind instructions as the
2675 pre-defined forms. */
2676 if (gnu_personality
2677 && addr + 4 <= extab_vma + extab_data.size ())
2678 {
2679 word = bfd_h_get_32 (objfile->obfd,
2680 (extab_data.data ()
2681 + addr - extab_vma));
2682 addr += 4;
2683 n_bytes = 3;
2684 n_words = ((word >> 24) & 0xff);
2685 }
2686 }
2687 }
2688 }
2689
2690 /* Sanity check address. */
2691 if (n_words)
2692 if (addr < extab_vma
2693 || addr + 4 * n_words > extab_vma + extab_data.size ())
2694 n_words = n_bytes = 0;
2695
2696 /* The unwind instructions reside in WORD (only the N_BYTES least
2697 significant bytes are valid), followed by N_WORDS words in the
2698 extab section starting at ADDR. */
2699 if (n_bytes || n_words)
2700 {
2701 gdb_byte *p = entry
2702 = (gdb_byte *) obstack_alloc (&objfile->per_bfd->storage_obstack,
2703 n_bytes + n_words * 4 + 1);
2704
2705 while (n_bytes--)
2706 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2707
2708 while (n_words--)
2709 {
2710 word = bfd_h_get_32 (objfile->obfd,
2711 extab_data.data () + addr - extab_vma);
2712 addr += 4;
2713
2714 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2715 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2716 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2717 *p++ = (gdb_byte) (word & 0xff);
2718 }
2719
2720 /* Implied "Finish" to terminate the list. */
2721 *p++ = 0xb0;
2722 }
2723
2724 /* Push entry onto vector. They are guaranteed to always
2725 appear in order of increasing addresses. */
2726 new_exidx_entry.addr = idx;
2727 new_exidx_entry.entry = entry;
2728 data->section_maps[sec->the_bfd_section->index].push_back
2729 (new_exidx_entry);
2730 }
2731 }
2732
2733 /* Search for the exception table entry covering MEMADDR. If one is found,
2734 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2735 set *START to the start of the region covered by this entry. */
2736
2737 static gdb_byte *
2738 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2739 {
2740 struct obj_section *sec;
2741
2742 sec = find_pc_section (memaddr);
2743 if (sec != NULL)
2744 {
2745 struct arm_exidx_data *data;
2746 struct arm_exidx_entry map_key = { memaddr - sec->addr (), 0 };
2747
2748 data = arm_exidx_data_key.get (sec->objfile->obfd.get ());
2749 if (data != NULL)
2750 {
2751 std::vector<arm_exidx_entry> &map
2752 = data->section_maps[sec->the_bfd_section->index];
2753 if (!map.empty ())
2754 {
2755 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2756
2757 /* std::lower_bound finds the earliest ordered insertion
2758 point. If the following symbol starts at this exact
2759 address, we use that; otherwise, the preceding
2760 exception table entry covers this address. */
2761 if (idx < map.end ())
2762 {
2763 if (idx->addr == map_key.addr)
2764 {
2765 if (start)
2766 *start = idx->addr + sec->addr ();
2767 return idx->entry;
2768 }
2769 }
2770
2771 if (idx > map.begin ())
2772 {
2773 idx = idx - 1;
2774 if (start)
2775 *start = idx->addr + sec->addr ();
2776 return idx->entry;
2777 }
2778 }
2779 }
2780 }
2781
2782 return NULL;
2783 }
2784
2785 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2786 instruction list from the ARM exception table entry ENTRY, allocate and
2787 return a prologue cache structure describing how to unwind this frame.
2788
2789 Return NULL if the unwinding instruction list contains a "spare",
2790 "reserved" or "refuse to unwind" instruction as defined in section
2791 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2792 for the ARM Architecture" document. */
2793
2794 static struct arm_prologue_cache *
2795 arm_exidx_fill_cache (const frame_info_ptr &this_frame, gdb_byte *entry)
2796 {
2797 CORE_ADDR vsp = 0;
2798 int vsp_valid = 0;
2799
2800 struct arm_prologue_cache *cache;
2801 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2802 arm_cache_init (cache, this_frame);
2803
2804 for (;;)
2805 {
2806 gdb_byte insn;
2807
2808 /* Whenever we reload SP, we actually have to retrieve its
2809 actual value in the current frame. */
2810 if (!vsp_valid)
2811 {
2812 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2813 {
2814 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2815 vsp = get_frame_register_unsigned (this_frame, reg);
2816 }
2817 else
2818 {
2819 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr ();
2820 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2821 }
2822
2823 vsp_valid = 1;
2824 }
2825
2826 /* Decode next unwind instruction. */
2827 insn = *entry++;
2828
2829 if ((insn & 0xc0) == 0)
2830 {
2831 int offset = insn & 0x3f;
2832 vsp += (offset << 2) + 4;
2833 }
2834 else if ((insn & 0xc0) == 0x40)
2835 {
2836 int offset = insn & 0x3f;
2837 vsp -= (offset << 2) + 4;
2838 }
2839 else if ((insn & 0xf0) == 0x80)
2840 {
2841 int mask = ((insn & 0xf) << 8) | *entry++;
2842 int i;
2843
2844 /* The special case of an all-zero mask identifies
2845 "Refuse to unwind". We return NULL to fall back
2846 to the prologue analyzer. */
2847 if (mask == 0)
2848 return NULL;
2849
2850 /* Pop registers r4..r15 under mask. */
2851 for (i = 0; i < 12; i++)
2852 if (mask & (1 << i))
2853 {
2854 cache->saved_regs[4 + i].set_addr (vsp);
2855 vsp += 4;
2856 }
2857
2858 /* Special-case popping SP -- we need to reload vsp. */
2859 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2860 vsp_valid = 0;
2861 }
2862 else if ((insn & 0xf0) == 0x90)
2863 {
2864 int reg = insn & 0xf;
2865
2866 /* Reserved cases. */
2867 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2868 return NULL;
2869
2870 /* Set SP from another register and mark VSP for reload. */
2871 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2872 vsp_valid = 0;
2873 }
2874 else if ((insn & 0xf0) == 0xa0)
2875 {
2876 int count = insn & 0x7;
2877 int pop_lr = (insn & 0x8) != 0;
2878 int i;
2879
2880 /* Pop r4..r[4+count]. */
2881 for (i = 0; i <= count; i++)
2882 {
2883 cache->saved_regs[4 + i].set_addr (vsp);
2884 vsp += 4;
2885 }
2886
2887 /* If indicated by flag, pop LR as well. */
2888 if (pop_lr)
2889 {
2890 cache->saved_regs[ARM_LR_REGNUM].set_addr (vsp);
2891 vsp += 4;
2892 }
2893 }
2894 else if (insn == 0xb0)
2895 {
2896 /* We could only have updated PC by popping into it; if so, it
2897 will show up as address. Otherwise, copy LR into PC. */
2898 if (!cache->saved_regs[ARM_PC_REGNUM].is_addr ())
2899 cache->saved_regs[ARM_PC_REGNUM]
2900 = cache->saved_regs[ARM_LR_REGNUM];
2901
2902 /* We're done. */
2903 break;
2904 }
2905 else if (insn == 0xb1)
2906 {
2907 int mask = *entry++;
2908 int i;
2909
2910 /* All-zero mask and mask >= 16 is "spare". */
2911 if (mask == 0 || mask >= 16)
2912 return NULL;
2913
2914 /* Pop r0..r3 under mask. */
2915 for (i = 0; i < 4; i++)
2916 if (mask & (1 << i))
2917 {
2918 cache->saved_regs[i].set_addr (vsp);
2919 vsp += 4;
2920 }
2921 }
2922 else if (insn == 0xb2)
2923 {
2924 ULONGEST offset = 0;
2925 unsigned shift = 0;
2926
2927 do
2928 {
2929 offset |= (*entry & 0x7f) << shift;
2930 shift += 7;
2931 }
2932 while (*entry++ & 0x80);
2933
2934 vsp += 0x204 + (offset << 2);
2935 }
2936 else if (insn == 0xb3)
2937 {
2938 int start = *entry >> 4;
2939 int count = (*entry++) & 0xf;
2940 int i;
2941
2942 /* Only registers D0..D15 are valid here. */
2943 if (start + count >= 16)
2944 return NULL;
2945
2946 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2947 for (i = 0; i <= count; i++)
2948 {
2949 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2950 vsp += 8;
2951 }
2952
2953 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2954 vsp += 4;
2955 }
2956 else if ((insn & 0xf8) == 0xb8)
2957 {
2958 int count = insn & 0x7;
2959 int i;
2960
2961 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2962 for (i = 0; i <= count; i++)
2963 {
2964 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2965 vsp += 8;
2966 }
2967
2968 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2969 vsp += 4;
2970 }
2971 else if (insn == 0xc6)
2972 {
2973 int start = *entry >> 4;
2974 int count = (*entry++) & 0xf;
2975 int i;
2976
2977 /* Only registers WR0..WR15 are valid. */
2978 if (start + count >= 16)
2979 return NULL;
2980
2981 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2982 for (i = 0; i <= count; i++)
2983 {
2984 cache->saved_regs[ARM_WR0_REGNUM + start + i].set_addr (vsp);
2985 vsp += 8;
2986 }
2987 }
2988 else if (insn == 0xc7)
2989 {
2990 int mask = *entry++;
2991 int i;
2992
2993 /* All-zero mask and mask >= 16 is "spare". */
2994 if (mask == 0 || mask >= 16)
2995 return NULL;
2996
2997 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2998 for (i = 0; i < 4; i++)
2999 if (mask & (1 << i))
3000 {
3001 cache->saved_regs[ARM_WCGR0_REGNUM + i].set_addr (vsp);
3002 vsp += 4;
3003 }
3004 }
3005 else if ((insn & 0xf8) == 0xc0)
3006 {
3007 int count = insn & 0x7;
3008 int i;
3009
3010 /* Pop iwmmx registers WR[10]..WR[10+count]. */
3011 for (i = 0; i <= count; i++)
3012 {
3013 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].set_addr (vsp);
3014 vsp += 8;
3015 }
3016 }
3017 else if (insn == 0xc8)
3018 {
3019 int start = *entry >> 4;
3020 int count = (*entry++) & 0xf;
3021 int i;
3022
3023 /* Only registers D0..D31 are valid. */
3024 if (start + count >= 16)
3025 return NULL;
3026
3027 /* Pop VFP double-precision registers
3028 D[16+start]..D[16+start+count]. */
3029 for (i = 0; i <= count; i++)
3030 {
3031 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].set_addr (vsp);
3032 vsp += 8;
3033 }
3034 }
3035 else if (insn == 0xc9)
3036 {
3037 int start = *entry >> 4;
3038 int count = (*entry++) & 0xf;
3039 int i;
3040
3041 /* Pop VFP double-precision registers D[start]..D[start+count]. */
3042 for (i = 0; i <= count; i++)
3043 {
3044 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
3045 vsp += 8;
3046 }
3047 }
3048 else if ((insn & 0xf8) == 0xd0)
3049 {
3050 int count = insn & 0x7;
3051 int i;
3052
3053 /* Pop VFP double-precision registers D[8]..D[8+count]. */
3054 for (i = 0; i <= count; i++)
3055 {
3056 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
3057 vsp += 8;
3058 }
3059 }
3060 else
3061 {
3062 /* Everything else is "spare". */
3063 return NULL;
3064 }
3065 }
3066
3067 /* If we restore SP from a register, assume this was the frame register.
3068 Otherwise just fall back to SP as frame register. */
3069 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
3070 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
3071 else
3072 cache->framereg = ARM_SP_REGNUM;
3073
3074 /* Determine offset to previous frame. */
3075 cache->framesize
3076 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
3077
3078 /* We already got the previous SP. */
3079 arm_gdbarch_tdep *tdep
3080 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3081 arm_cache_set_active_sp_value (cache, tdep, vsp);
3082
3083 return cache;
3084 }
3085
3086 /* Unwinding via ARM exception table entries. Note that the sniffer
3087 already computes a filled-in prologue cache, which is then used
3088 with the same arm_prologue_this_id and arm_prologue_prev_register
3089 routines also used for prologue-parsing based unwinding. */
3090
3091 static int
3092 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
3093 const frame_info_ptr &this_frame,
3094 void **this_prologue_cache)
3095 {
3096 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3097 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3098 CORE_ADDR addr_in_block, exidx_region, func_start;
3099 struct arm_prologue_cache *cache;
3100 gdb_byte *entry;
3101
3102 /* See if we have an ARM exception table entry covering this address. */
3103 addr_in_block = get_frame_address_in_block (this_frame);
3104 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
3105 if (!entry)
3106 return 0;
3107
3108 /* The ARM exception table does not describe unwind information
3109 for arbitrary PC values, but is guaranteed to be correct only
3110 at call sites. We have to decide here whether we want to use
3111 ARM exception table information for this frame, or fall back
3112 to using prologue parsing. (Note that if we have DWARF CFI,
3113 this sniffer isn't even called -- CFI is always preferred.)
3114
3115 Before we make this decision, however, we check whether we
3116 actually have *symbol* information for the current frame.
3117 If not, prologue parsing would not work anyway, so we might
3118 as well use the exception table and hope for the best. */
3119 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
3120 {
3121 int exc_valid = 0;
3122
3123 /* If the next frame is "normal", we are at a call site in this
3124 frame, so exception information is guaranteed to be valid. */
3125 if (get_next_frame (this_frame)
3126 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
3127 exc_valid = 1;
3128
3129 /* Some syscalls keep PC pointing to the SVC instruction itself. */
3130 for (int shift = 0; shift <= 1 && !exc_valid; ++shift)
3131 {
3132 /* We also assume exception information is valid if we're currently
3133 blocked in a system call. The system library is supposed to
3134 ensure this, so that e.g. pthread cancellation works. */
3135 if (arm_frame_is_thumb (this_frame))
3136 {
3137 ULONGEST insn;
3138
3139 if (safe_read_memory_unsigned_integer ((get_frame_pc (this_frame)
3140 - (shift ? 2 : 0)),
3141 2, byte_order_for_code,
3142 &insn)
3143 && (insn & 0xff00) == 0xdf00 /* svc */)
3144 exc_valid = 1;
3145 }
3146 else
3147 {
3148 ULONGEST insn;
3149
3150 if (safe_read_memory_unsigned_integer ((get_frame_pc (this_frame)
3151 - (shift ? 4 : 0)),
3152 4, byte_order_for_code,
3153 &insn)
3154 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
3155 exc_valid = 1;
3156 }
3157 }
3158
3159 /* Bail out if we don't know that exception information is valid. */
3160 if (!exc_valid)
3161 return 0;
3162
3163 /* The ARM exception index does not mark the *end* of the region
3164 covered by the entry, and some functions will not have any entry.
3165 To correctly recognize the end of the covered region, the linker
3166 should have inserted dummy records with a CANTUNWIND marker.
3167
3168 Unfortunately, current versions of GNU ld do not reliably do
3169 this, and thus we may have found an incorrect entry above.
3170 As a (temporary) sanity check, we only use the entry if it
3171 lies *within* the bounds of the function. Note that this check
3172 might reject perfectly valid entries that just happen to cover
3173 multiple functions; therefore this check ought to be removed
3174 once the linker is fixed. */
3175 if (func_start > exidx_region)
3176 return 0;
3177 }
3178
3179 /* Decode the list of unwinding instructions into a prologue cache.
3180 Note that this may fail due to e.g. a "refuse to unwind" code. */
3181 cache = arm_exidx_fill_cache (this_frame, entry);
3182 if (!cache)
3183 return 0;
3184
3185 *this_prologue_cache = cache;
3186 return 1;
3187 }
3188
3189 struct frame_unwind_legacy arm_exidx_unwind (
3190 "arm exidx",
3191 NORMAL_FRAME,
3192 FRAME_UNWIND_ARCH,
3193 default_frame_unwind_stop_reason,
3194 arm_prologue_this_id,
3195 arm_prologue_prev_register,
3196 NULL,
3197 arm_exidx_unwind_sniffer
3198 );
3199
3200 static struct arm_prologue_cache *
3201 arm_make_epilogue_frame_cache (const frame_info_ptr &this_frame)
3202 {
3203 struct arm_prologue_cache *cache;
3204 int reg;
3205
3206 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3207 arm_cache_init (cache, this_frame);
3208
3209 /* Still rely on the offset calculated from prologue. */
3210 arm_scan_prologue (this_frame, cache);
3211
3212 /* Since we are in epilogue, the SP has been restored. */
3213 arm_gdbarch_tdep *tdep
3214 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3215 arm_cache_set_active_sp_value (cache, tdep,
3216 get_frame_register_unsigned (this_frame,
3217 ARM_SP_REGNUM));
3218
3219 /* Calculate actual addresses of saved registers using offsets
3220 determined by arm_scan_prologue. */
3221 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
3222 if (cache->saved_regs[reg].is_addr ())
3223 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
3224 + arm_cache_get_prev_sp_value (cache, tdep));
3225
3226 return cache;
3227 }
3228
3229 /* Implementation of function hook 'this_id' in
3230 'struct frame_uwnind' for epilogue unwinder. */
3231
3232 static void
3233 arm_epilogue_frame_this_id (const frame_info_ptr &this_frame,
3234 void **this_cache,
3235 struct frame_id *this_id)
3236 {
3237 struct arm_prologue_cache *cache;
3238 CORE_ADDR pc, func;
3239
3240 if (*this_cache == NULL)
3241 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3242 cache = (struct arm_prologue_cache *) *this_cache;
3243
3244 /* Use function start address as part of the frame ID. If we cannot
3245 identify the start address (due to missing symbol information),
3246 fall back to just using the current PC. */
3247 pc = get_frame_pc (this_frame);
3248 func = get_frame_func (this_frame);
3249 if (func == 0)
3250 func = pc;
3251
3252 arm_gdbarch_tdep *tdep
3253 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3254 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), func);
3255 }
3256
3257 /* Implementation of function hook 'prev_register' in
3258 'struct frame_uwnind' for epilogue unwinder. */
3259
3260 static struct value *
3261 arm_epilogue_frame_prev_register (const frame_info_ptr &this_frame,
3262 void **this_cache, int regnum)
3263 {
3264 if (*this_cache == NULL)
3265 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3266
3267 return arm_prologue_prev_register (this_frame, this_cache, regnum);
3268 }
3269
3270 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
3271 CORE_ADDR pc);
3272 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
3273 CORE_ADDR pc);
3274
3275 /* Implementation of function hook 'sniffer' in
3276 'struct frame_uwnind' for epilogue unwinder. */
3277
3278 static int
3279 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
3280 const frame_info_ptr &this_frame,
3281 void **this_prologue_cache)
3282 {
3283 if (frame_relative_level (this_frame) == 0)
3284 {
3285 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3286 CORE_ADDR pc = get_frame_pc (this_frame);
3287
3288 if (arm_frame_is_thumb (this_frame))
3289 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3290 else
3291 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3292 }
3293 else
3294 return 0;
3295 }
3296
3297 /* Frame unwinder from epilogue. */
3298
3299 static const struct frame_unwind_legacy arm_epilogue_frame_unwind (
3300 "arm epilogue",
3301 NORMAL_FRAME,
3302 FRAME_UNWIND_ARCH,
3303 default_frame_unwind_stop_reason,
3304 arm_epilogue_frame_this_id,
3305 arm_epilogue_frame_prev_register,
3306 NULL,
3307 arm_epilogue_frame_sniffer
3308 );
3309
3310 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
3311 trampoline, return the target PC. Otherwise return 0.
3312
3313 void call0a (char c, short s, int i, long l) {}
3314
3315 int main (void)
3316 {
3317 (*pointer_to_call0a) (c, s, i, l);
3318 }
3319
3320 Instead of calling a stub library function _call_via_xx (xx is
3321 the register name), GCC may inline the trampoline in the object
3322 file as below (register r2 has the address of call0a).
3323
3324 .global main
3325 .type main, %function
3326 ...
3327 bl .L1
3328 ...
3329 .size main, .-main
3330
3331 .L1:
3332 bx r2
3333
3334 The trampoline 'bx r2' doesn't belong to main. */
3335
3336 static CORE_ADDR
3337 arm_skip_bx_reg (const frame_info_ptr &frame, CORE_ADDR pc)
3338 {
3339 /* The heuristics of recognizing such trampoline is that FRAME is
3340 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
3341 if (arm_frame_is_thumb (frame))
3342 {
3343 gdb_byte buf[2];
3344
3345 if (target_read_memory (pc, buf, 2) == 0)
3346 {
3347 struct gdbarch *gdbarch = get_frame_arch (frame);
3348 enum bfd_endian byte_order_for_code
3349 = gdbarch_byte_order_for_code (gdbarch);
3350 uint16_t insn
3351 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3352
3353 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3354 {
3355 CORE_ADDR dest
3356 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
3357
3358 /* Clear the LSB so that gdb core sets step-resume
3359 breakpoint at the right address. */
3360 return UNMAKE_THUMB_ADDR (dest);
3361 }
3362 }
3363 }
3364
3365 return 0;
3366 }
3367
3368 static struct arm_prologue_cache *
3369 arm_make_stub_cache (const frame_info_ptr &this_frame)
3370 {
3371 struct arm_prologue_cache *cache;
3372
3373 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3374 arm_cache_init (cache, this_frame);
3375
3376 arm_gdbarch_tdep *tdep
3377 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3378 arm_cache_set_active_sp_value (cache, tdep,
3379 get_frame_register_unsigned (this_frame,
3380 ARM_SP_REGNUM));
3381
3382 return cache;
3383 }
3384
3385 /* Our frame ID for a stub frame is the current SP and LR. */
3386
3387 static void
3388 arm_stub_this_id (const frame_info_ptr &this_frame,
3389 void **this_cache,
3390 struct frame_id *this_id)
3391 {
3392 struct arm_prologue_cache *cache;
3393
3394 if (*this_cache == NULL)
3395 *this_cache = arm_make_stub_cache (this_frame);
3396 cache = (struct arm_prologue_cache *) *this_cache;
3397
3398 arm_gdbarch_tdep *tdep
3399 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3400 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3401 get_frame_pc (this_frame));
3402 }
3403
3404 static int
3405 arm_stub_unwind_sniffer (const struct frame_unwind *self,
3406 const frame_info_ptr &this_frame,
3407 void **this_prologue_cache)
3408 {
3409 CORE_ADDR addr_in_block;
3410 gdb_byte dummy[4];
3411 CORE_ADDR pc, start_addr;
3412 const char *name;
3413
3414 addr_in_block = get_frame_address_in_block (this_frame);
3415 pc = get_frame_pc (this_frame);
3416 if (in_plt_section (addr_in_block)
3417 /* We also use the stub winder if the target memory is unreadable
3418 to avoid having the prologue unwinder trying to read it. */
3419 || target_read_memory (pc, dummy, 4) != 0)
3420 return 1;
3421
3422 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
3423 && arm_skip_bx_reg (this_frame, pc) != 0)
3424 return 1;
3425
3426 return 0;
3427 }
3428
3429 struct frame_unwind_legacy arm_stub_unwind (
3430 "arm stub",
3431 NORMAL_FRAME,
3432 FRAME_UNWIND_ARCH,
3433 default_frame_unwind_stop_reason,
3434 arm_stub_this_id,
3435 arm_prologue_prev_register,
3436 NULL,
3437 arm_stub_unwind_sniffer
3438 );
3439
3440 /* Put here the code to store, into CACHE->saved_regs, the addresses
3441 of the saved registers of frame described by THIS_FRAME. CACHE is
3442 returned. */
3443
3444 static struct arm_prologue_cache *
3445 arm_m_exception_cache (const frame_info_ptr &this_frame)
3446 {
3447 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3448 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
3449 struct arm_prologue_cache *cache;
3450
3451 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3452 arm_cache_init (cache, this_frame);
3453
3454 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
3455 describes which bits in LR that define which stack was used prior
3456 to the exception and if FPU is used (causing extended stack frame). */
3457
3458 /* In the lockup state PC contains a lockup magic value.
3459 The PC value of the the next outer frame is irreversibly
3460 lost. The other registers are intact so LR likely contains
3461 PC of some frame next to the outer one, but we cannot analyze
3462 the next outer frame without knowing its PC
3463 therefore we do not know SP fixup for this frame.
3464 Some heuristics to resynchronize SP might be possible.
3465 For simplicity, just terminate the unwinding to prevent it going
3466 astray and attempting to read data/addresses it shouldn't,
3467 which may cause further issues due to side-effects. */
3468 CORE_ADDR pc = get_frame_pc (this_frame);
3469 if (arm_m_addr_is_lockup (pc))
3470 {
3471 /* The lockup can be real just in the innermost frame
3472 as the CPU is stopped and cannot create more frames.
3473 If we hit lockup magic PC in the other frame, it is
3474 just a sentinel at the top of stack: do not warn then. */
3475 if (frame_relative_level (this_frame) == 0)
3476 warning (_("ARM M in lockup state, stack unwinding terminated."));
3477
3478 /* Terminate any further stack unwinding. */
3479 arm_cache_set_active_sp_value (cache, tdep, 0);
3480 return cache;
3481 }
3482
3483 CORE_ADDR lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
3484
3485 /* ARMv7-M Architecture Reference "A2.3.1 Arm core registers"
3486 states that LR is set to 0xffffffff on reset. ARMv8-M Architecture
3487 Reference "B3.3 Registers" states that LR is set to 0xffffffff on warm
3488 reset if Main Extension is implemented, otherwise the value is unknown. */
3489 if (lr == 0xffffffff)
3490 {
3491 /* Terminate any further stack unwinding. */
3492 arm_cache_set_active_sp_value (cache, tdep, 0);
3493 return cache;
3494 }
3495
3496 /* Check FNC_RETURN indicator bits (24-31). */
3497 bool fnc_return = (((lr >> 24) & 0xff) == 0xfe);
3498 if (fnc_return)
3499 {
3500 /* FNC_RETURN is only valid for targets with Security Extension. */
3501 if (!tdep->have_sec_ext)
3502 {
3503 error (_("While unwinding an exception frame, found unexpected Link "
3504 "Register value %s that requires the security extension, "
3505 "but the extension was not found or is disabled. This "
3506 "should not happen and may be caused by corrupt data or a "
3507 "bug in GDB."), phex (lr, ARM_INT_REGISTER_SIZE));
3508 }
3509
3510 if (!arm_unwind_secure_frames)
3511 {
3512 warning (_("Non-secure to secure stack unwinding disabled."));
3513
3514 /* Terminate any further stack unwinding. */
3515 arm_cache_set_active_sp_value (cache, tdep, 0);
3516 return cache;
3517 }
3518
3519 ULONGEST xpsr = get_frame_register_unsigned (this_frame, ARM_PS_REGNUM);
3520 if ((xpsr & 0x1ff) != 0)
3521 /* Handler mode: This is the mode that exceptions are handled in. */
3522 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_s_regnum);
3523 else
3524 /* Thread mode: This is the normal mode that programs run in. */
3525 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_s_regnum);
3526
3527 CORE_ADDR unwound_sp = arm_cache_get_prev_sp_value (cache, tdep);
3528
3529 /* Stack layout for a function call from Secure to Non-Secure state
3530 (ARMv8-M section B3.16):
3531
3532 SP Offset
3533
3534 +-------------------+
3535 0x08 | |
3536 +-------------------+ <-- Original SP
3537 0x04 | Partial xPSR |
3538 +-------------------+
3539 0x00 | Return Address |
3540 +===================+ <-- New SP */
3541
3542 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + 0x00);
3543 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + 0x00);
3544 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + 0x04);
3545
3546 arm_cache_set_active_sp_value (cache, tdep, unwound_sp + 0x08);
3547
3548 return cache;
3549 }
3550
3551 /* Check EXC_RETURN indicator bits (24-31). */
3552 bool exc_return = (((lr >> 24) & 0xff) == 0xff);
3553 if (exc_return)
3554 {
3555 int sp_regnum;
3556 bool secure_stack_used = false;
3557 bool default_callee_register_stacking = false;
3558 bool exception_domain_is_secure = false;
3559 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3560
3561 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
3562 bool process_stack_used = (bit (lr, 2) != 0);
3563
3564 if (tdep->have_sec_ext)
3565 {
3566 secure_stack_used = (bit (lr, 6) != 0);
3567 default_callee_register_stacking = (bit (lr, 5) != 0);
3568 exception_domain_is_secure = (bit (lr, 0) != 0);
3569
3570 /* Unwinding from non-secure to secure can trip security
3571 measures. In order to avoid the debugger being
3572 intrusive, rely on the user to configure the requested
3573 mode. */
3574 if (secure_stack_used && !exception_domain_is_secure
3575 && !arm_unwind_secure_frames)
3576 {
3577 warning (_("Non-secure to secure stack unwinding disabled."));
3578
3579 /* Terminate any further stack unwinding. */
3580 arm_cache_set_active_sp_value (cache, tdep, 0);
3581 return cache;
3582 }
3583
3584 if (process_stack_used)
3585 {
3586 if (secure_stack_used)
3587 /* Secure thread (process) stack used, use PSP_S as SP. */
3588 sp_regnum = tdep->m_profile_psp_s_regnum;
3589 else
3590 /* Non-secure thread (process) stack used, use PSP_NS as SP. */
3591 sp_regnum = tdep->m_profile_psp_ns_regnum;
3592 }
3593 else
3594 {
3595 if (secure_stack_used)
3596 /* Secure main stack used, use MSP_S as SP. */
3597 sp_regnum = tdep->m_profile_msp_s_regnum;
3598 else
3599 /* Non-secure main stack used, use MSP_NS as SP. */
3600 sp_regnum = tdep->m_profile_msp_ns_regnum;
3601 }
3602 }
3603 else
3604 {
3605 if (process_stack_used)
3606 /* Thread (process) stack used, use PSP as SP. */
3607 sp_regnum = tdep->m_profile_psp_regnum;
3608 else
3609 /* Main stack used, use MSP as SP. */
3610 sp_regnum = tdep->m_profile_msp_regnum;
3611 }
3612
3613 /* Set the active SP regnum. */
3614 arm_cache_switch_prev_sp (cache, tdep, sp_regnum);
3615
3616 /* Fetch the SP to use for this frame. */
3617 CORE_ADDR unwound_sp = arm_cache_get_prev_sp_value (cache, tdep);
3618
3619 /* Exception entry context stacking are described in ARMv8-M (section
3620 B3.19) and ARMv7-M (sections B1.5.6 and B1.5.7) Architecture Reference
3621 Manuals.
3622
3623 The following figure shows the structure of the stack frame when
3624 Security and Floating-point extensions are present.
3625
3626 SP Offsets
3627 Without With
3628 Callee Regs Callee Regs
3629 (Secure -> Non-Secure)
3630 +-------------------+
3631 0xA8 | | 0xD0
3632 +===================+ --+ <-- Original SP
3633 0xA4 | S31 | 0xCC |
3634 +-------------------+ |
3635 ... | Additional FP context
3636 +-------------------+ |
3637 0x68 | S16 | 0x90 |
3638 +===================+ --+
3639 0x64 | Reserved | 0x8C |
3640 +-------------------+ |
3641 0x60 | FPSCR | 0x88 |
3642 +-------------------+ |
3643 0x5C | S15 | 0x84 | FP context
3644 +-------------------+ |
3645 ... |
3646 +-------------------+ |
3647 0x20 | S0 | 0x48 |
3648 +===================+ --+
3649 0x1C | xPSR | 0x44 |
3650 +-------------------+ |
3651 0x18 | Return address | 0x40 |
3652 +-------------------+ |
3653 0x14 | LR(R14) | 0x3C |
3654 +-------------------+ |
3655 0x10 | R12 | 0x38 | State context
3656 +-------------------+ |
3657 0x0C | R3 | 0x34 |
3658 +-------------------+ |
3659 ... |
3660 +-------------------+ |
3661 0x00 | R0 | 0x28 |
3662 +===================+ --+
3663 | R11 | 0x24 |
3664 +-------------------+ |
3665 ... |
3666 +-------------------+ | Additional state
3667 | R4 | 0x08 | context when
3668 +-------------------+ | transitioning from
3669 | Reserved | 0x04 | Secure to Non-Secure
3670 +-------------------+ |
3671 | Magic signature | 0x00 |
3672 +===================+ --+ <-- New SP */
3673
3674 uint32_t sp_r0_offset = 0;
3675
3676 /* With the Security extension, the hardware saves R4..R11 too. */
3677 if (tdep->have_sec_ext && secure_stack_used
3678 && (!default_callee_register_stacking || !exception_domain_is_secure))
3679 {
3680 /* Read R4..R11 from the integer callee registers. */
3681 cache->saved_regs[4].set_addr (unwound_sp + 0x08);
3682 cache->saved_regs[5].set_addr (unwound_sp + 0x0C);
3683 cache->saved_regs[6].set_addr (unwound_sp + 0x10);
3684 cache->saved_regs[7].set_addr (unwound_sp + 0x14);
3685 cache->saved_regs[8].set_addr (unwound_sp + 0x18);
3686 cache->saved_regs[9].set_addr (unwound_sp + 0x1C);
3687 cache->saved_regs[10].set_addr (unwound_sp + 0x20);
3688 cache->saved_regs[11].set_addr (unwound_sp + 0x24);
3689 sp_r0_offset = 0x28;
3690 }
3691
3692 /* The hardware saves eight 32-bit words, comprising xPSR,
3693 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3694 "B1.5.6 Exception entry behavior" in
3695 "ARMv7-M Architecture Reference Manual". */
3696 cache->saved_regs[0].set_addr (unwound_sp + sp_r0_offset);
3697 cache->saved_regs[1].set_addr (unwound_sp + sp_r0_offset + 0x04);
3698 cache->saved_regs[2].set_addr (unwound_sp + sp_r0_offset + 0x08);
3699 cache->saved_regs[3].set_addr (unwound_sp + sp_r0_offset + 0x0C);
3700 cache->saved_regs[ARM_IP_REGNUM].set_addr (unwound_sp + sp_r0_offset
3701 + 0x10);
3702 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + sp_r0_offset
3703 + 0x14);
3704 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + sp_r0_offset
3705 + 0x18);
3706 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + sp_r0_offset
3707 + 0x1C);
3708
3709 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3710 type used. */
3711 bool extended_frame_used = (bit (lr, 4) == 0);
3712 if (extended_frame_used)
3713 {
3714 ULONGEST fpccr;
3715 ULONGEST fpcar;
3716
3717 /* Read FPCCR register. */
3718 if (!safe_read_memory_unsigned_integer (FPCCR, ARM_INT_REGISTER_SIZE,
3719 byte_order, &fpccr))
3720 {
3721 warning (_("Could not fetch required FPCCR content. Further "
3722 "unwinding is impossible."));
3723 arm_cache_set_active_sp_value (cache, tdep, 0);
3724 return cache;
3725 }
3726
3727 /* Read FPCAR register. */
3728 if (!safe_read_memory_unsigned_integer (FPCAR, ARM_INT_REGISTER_SIZE,
3729 byte_order, &fpcar))
3730 {
3731 warning (_("Could not fetch FPCAR content. Further unwinding of "
3732 "FP register values will be unreliable."));
3733 fpcar = 0;
3734 }
3735
3736 bool fpccr_aspen = bit (fpccr, 31);
3737 bool fpccr_lspen = bit (fpccr, 30);
3738 bool fpccr_ts = bit (fpccr, 26);
3739 bool fpccr_lspact = bit (fpccr, 0);
3740
3741 /* The LSPEN and ASPEN bits indicate if the lazy state preservation
3742 for FP registers is enabled or disabled. The LSPACT bit indicate,
3743 together with FPCAR, if the lazy state preservation feature is
3744 active for the current frame or for another frame.
3745 See "Lazy context save of FP state", in B1.5.7, also ARM AN298,
3746 supported by Cortex-M4F architecture for details. */
3747 bool fpcar_points_to_this_frame = ((unwound_sp + sp_r0_offset + 0x20)
3748 == (fpcar & ~0x7));
3749 bool read_fp_regs_from_stack = (!(fpccr_aspen && fpccr_lspen
3750 && fpccr_lspact
3751 && fpcar_points_to_this_frame));
3752
3753 /* Extended stack frame type used. */
3754 if (read_fp_regs_from_stack)
3755 {
3756 CORE_ADDR addr = unwound_sp + sp_r0_offset + 0x20;
3757 for (int i = 0; i < 8; i++)
3758 {
3759 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (addr);
3760 addr += 8;
3761 }
3762 }
3763 cache->saved_regs[ARM_FPSCR_REGNUM].set_addr (unwound_sp
3764 + sp_r0_offset + 0x60);
3765
3766 if (tdep->have_sec_ext && !default_callee_register_stacking
3767 && fpccr_ts)
3768 {
3769 /* Handle floating-point callee saved registers. */
3770 if (read_fp_regs_from_stack)
3771 {
3772 CORE_ADDR addr = unwound_sp + sp_r0_offset + 0x68;
3773 for (int i = 8; i < 16; i++)
3774 {
3775 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (addr);
3776 addr += 8;
3777 }
3778 }
3779
3780 arm_cache_set_active_sp_value (cache, tdep,
3781 unwound_sp + sp_r0_offset + 0xA8);
3782 }
3783 else
3784 {
3785 /* Offset 0x64 is reserved. */
3786 arm_cache_set_active_sp_value (cache, tdep,
3787 unwound_sp + sp_r0_offset + 0x68);
3788 }
3789 }
3790 else
3791 {
3792 /* Standard stack frame type used. */
3793 arm_cache_set_active_sp_value (cache, tdep,
3794 unwound_sp + sp_r0_offset + 0x20);
3795 }
3796
3797 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3798 aligner between the top of the 32-byte stack frame and the
3799 previous context's stack pointer. */
3800 ULONGEST xpsr;
3801 if (!safe_read_memory_unsigned_integer (cache->saved_regs[ARM_PS_REGNUM]
3802 .addr (), ARM_INT_REGISTER_SIZE,
3803 byte_order, &xpsr))
3804 {
3805 warning (_("Could not fetch required XPSR content. Further "
3806 "unwinding is impossible."));
3807 arm_cache_set_active_sp_value (cache, tdep, 0);
3808 return cache;
3809 }
3810
3811 if (bit (xpsr, 9) != 0)
3812 {
3813 CORE_ADDR new_sp = arm_cache_get_prev_sp_value (cache, tdep) + 4;
3814 arm_cache_set_active_sp_value (cache, tdep, new_sp);
3815 }
3816
3817 return cache;
3818 }
3819
3820 internal_error (_("While unwinding an exception frame, "
3821 "found unexpected Link Register value "
3822 "%s. This should not happen and may "
3823 "be caused by corrupt data or a bug in"
3824 " GDB."),
3825 phex (lr, ARM_INT_REGISTER_SIZE));
3826 }
3827
3828 /* Implementation of the stop_reason hook for arm_m_exception frames. */
3829
3830 static enum unwind_stop_reason
3831 arm_m_exception_frame_unwind_stop_reason (const frame_info_ptr &this_frame,
3832 void **this_cache)
3833 {
3834 struct arm_prologue_cache *cache;
3835 arm_gdbarch_tdep *tdep
3836 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3837
3838 if (*this_cache == NULL)
3839 *this_cache = arm_m_exception_cache (this_frame);
3840 cache = (struct arm_prologue_cache *) *this_cache;
3841
3842 /* If we've hit a wall, stop. */
3843 if (arm_cache_get_prev_sp_value (cache, tdep) == 0)
3844 return UNWIND_OUTERMOST;
3845
3846 return UNWIND_NO_REASON;
3847 }
3848
3849 /* Implementation of function hook 'this_id' in
3850 'struct frame_uwnind'. */
3851
3852 static void
3853 arm_m_exception_this_id (const frame_info_ptr &this_frame,
3854 void **this_cache,
3855 struct frame_id *this_id)
3856 {
3857 struct arm_prologue_cache *cache;
3858
3859 if (*this_cache == NULL)
3860 *this_cache = arm_m_exception_cache (this_frame);
3861 cache = (struct arm_prologue_cache *) *this_cache;
3862
3863 /* Our frame ID for a stub frame is the current SP and LR. */
3864 arm_gdbarch_tdep *tdep
3865 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3866 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3867 get_frame_pc (this_frame));
3868 }
3869
3870 /* Implementation of function hook 'prev_register' in
3871 'struct frame_uwnind'. */
3872
3873 static struct value *
3874 arm_m_exception_prev_register (const frame_info_ptr &this_frame,
3875 void **this_cache,
3876 int prev_regnum)
3877 {
3878 struct arm_prologue_cache *cache;
3879 CORE_ADDR sp_value;
3880
3881 if (*this_cache == NULL)
3882 *this_cache = arm_m_exception_cache (this_frame);
3883 cache = (struct arm_prologue_cache *) *this_cache;
3884
3885 /* The value was already reconstructed into PREV_SP. */
3886 arm_gdbarch_tdep *tdep
3887 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3888 if (prev_regnum == ARM_SP_REGNUM)
3889 return frame_unwind_got_constant (this_frame, prev_regnum,
3890 arm_cache_get_prev_sp_value (cache, tdep));
3891
3892 /* If we are asked to unwind the PC, strip the saved T bit. */
3893 if (prev_regnum == ARM_PC_REGNUM)
3894 {
3895 struct value *value = trad_frame_get_prev_register (this_frame,
3896 cache->saved_regs,
3897 prev_regnum);
3898 CORE_ADDR pc = value_as_address (value);
3899 return frame_unwind_got_constant (this_frame, prev_regnum,
3900 UNMAKE_THUMB_ADDR (pc));
3901 }
3902
3903 /* The value might be one of the alternative SP, if so, use the
3904 value already constructed. */
3905 if (arm_is_alternative_sp_register (tdep, prev_regnum))
3906 {
3907 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
3908 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
3909 }
3910
3911 /* If we are asked to unwind the xPSR, set T bit if PC is in thumb mode.
3912 LR register is unreliable as it contains FNC_RETURN or EXC_RETURN
3913 pattern. */
3914 if (prev_regnum == ARM_PS_REGNUM)
3915 {
3916 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3917 struct value *value = trad_frame_get_prev_register (this_frame,
3918 cache->saved_regs,
3919 ARM_PC_REGNUM);
3920 CORE_ADDR pc = value_as_address (value);
3921 value = trad_frame_get_prev_register (this_frame, cache->saved_regs,
3922 ARM_PS_REGNUM);
3923 ULONGEST xpsr = value_as_long (value);
3924
3925 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3926 xpsr = reconstruct_t_bit (gdbarch, pc, xpsr);
3927 return frame_unwind_got_constant (this_frame, ARM_PS_REGNUM, xpsr);
3928 }
3929
3930 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3931 prev_regnum);
3932 }
3933
3934 /* Implementation of function hook 'sniffer' in
3935 'struct frame_uwnind'. */
3936
3937 static int
3938 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3939 const frame_info_ptr &this_frame,
3940 void **this_prologue_cache)
3941 {
3942 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3943 CORE_ADDR this_pc = get_frame_pc (this_frame);
3944
3945 /* No need to check is_m; this sniffer is only registered for
3946 M-profile architectures. */
3947
3948 /* Check if exception frame returns to a magic PC value. */
3949 return arm_m_addr_is_magic (gdbarch, this_pc);
3950 }
3951
3952 /* Frame unwinder for M-profile exceptions (EXC_RETURN on stack),
3953 lockup and secure/nonsecure interstate function calls (FNC_RETURN). */
3954
3955 struct frame_unwind_legacy arm_m_exception_unwind (
3956 "arm m exception lockup sec_fnc",
3957 SIGTRAMP_FRAME,
3958 FRAME_UNWIND_ARCH,
3959 arm_m_exception_frame_unwind_stop_reason,
3960 arm_m_exception_this_id,
3961 arm_m_exception_prev_register,
3962 NULL,
3963 arm_m_exception_unwind_sniffer
3964 );
3965
3966 static CORE_ADDR
3967 arm_normal_frame_base (const frame_info_ptr &this_frame, void **this_cache)
3968 {
3969 struct arm_prologue_cache *cache;
3970
3971 if (*this_cache == NULL)
3972 *this_cache = arm_make_prologue_cache (this_frame);
3973 cache = (struct arm_prologue_cache *) *this_cache;
3974
3975 arm_gdbarch_tdep *tdep
3976 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3977 return arm_cache_get_prev_sp_value (cache, tdep) - cache->framesize;
3978 }
3979
3980 struct frame_base arm_normal_base = {
3981 &arm_prologue_unwind,
3982 arm_normal_frame_base,
3983 arm_normal_frame_base,
3984 arm_normal_frame_base
3985 };
3986
3987 struct arm_dwarf2_prev_register_cache
3988 {
3989 /* Cached value of the corresponding stack pointer for the inner frame. */
3990 CORE_ADDR sp;
3991 CORE_ADDR msp;
3992 CORE_ADDR msp_s;
3993 CORE_ADDR msp_ns;
3994 CORE_ADDR psp;
3995 CORE_ADDR psp_s;
3996 CORE_ADDR psp_ns;
3997 };
3998
3999 static struct value *
4000 arm_dwarf2_prev_register (const frame_info_ptr &this_frame, void **this_cache,
4001 int regnum)
4002 {
4003 struct gdbarch * gdbarch = get_frame_arch (this_frame);
4004 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4005 CORE_ADDR lr;
4006 ULONGEST cpsr;
4007 arm_dwarf2_prev_register_cache *cache
4008 = ((arm_dwarf2_prev_register_cache *)
4009 dwarf2_frame_get_fn_data (this_frame, this_cache,
4010 arm_dwarf2_prev_register));
4011
4012 if (!cache)
4013 {
4014 const unsigned int size = sizeof (struct arm_dwarf2_prev_register_cache);
4015 cache = ((arm_dwarf2_prev_register_cache *)
4016 dwarf2_frame_allocate_fn_data (this_frame, this_cache,
4017 arm_dwarf2_prev_register, size));
4018
4019 if (tdep->have_sec_ext)
4020 {
4021 cache->sp
4022 = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
4023
4024 cache->msp_s
4025 = get_frame_register_unsigned (this_frame,
4026 tdep->m_profile_msp_s_regnum);
4027 cache->msp_ns
4028 = get_frame_register_unsigned (this_frame,
4029 tdep->m_profile_msp_ns_regnum);
4030 cache->psp_s
4031 = get_frame_register_unsigned (this_frame,
4032 tdep->m_profile_psp_s_regnum);
4033 cache->psp_ns
4034 = get_frame_register_unsigned (this_frame,
4035 tdep->m_profile_psp_ns_regnum);
4036 }
4037 else if (tdep->is_m)
4038 {
4039 cache->sp
4040 = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
4041
4042 cache->msp
4043 = get_frame_register_unsigned (this_frame,
4044 tdep->m_profile_msp_regnum);
4045 cache->psp
4046 = get_frame_register_unsigned (this_frame,
4047 tdep->m_profile_psp_regnum);
4048 }
4049 }
4050
4051 if (regnum == ARM_PC_REGNUM)
4052 {
4053 /* The PC is normally copied from the return column, which
4054 describes saves of LR. However, that version may have an
4055 extra bit set to indicate Thumb state. The bit is not
4056 part of the PC. */
4057
4058 /* Record in the frame whether the return address was signed. */
4059 if (tdep->have_pacbti)
4060 {
4061 CORE_ADDR ra_auth_code
4062 = frame_unwind_register_unsigned (this_frame,
4063 tdep->pacbti_pseudo_base);
4064
4065 if (ra_auth_code != 0)
4066 set_frame_previous_pc_masked (this_frame);
4067 }
4068
4069 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
4070 return frame_unwind_got_constant (this_frame, regnum,
4071 arm_addr_bits_remove (gdbarch, lr));
4072 }
4073 else if (regnum == ARM_PS_REGNUM)
4074 {
4075 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
4076 cpsr = get_frame_register_unsigned (this_frame, regnum);
4077 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
4078 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr);
4079 return frame_unwind_got_constant (this_frame, regnum, cpsr);
4080 }
4081 else if (arm_is_alternative_sp_register (tdep, regnum))
4082 {
4083 /* Handle the alternative SP registers on Cortex-M. */
4084 bool override_with_sp_value = false;
4085 CORE_ADDR val;
4086
4087 if (tdep->have_sec_ext)
4088 {
4089 bool is_msp = (regnum == tdep->m_profile_msp_regnum)
4090 && (cache->msp_s == cache->sp || cache->msp_ns == cache->sp);
4091 bool is_msp_s = (regnum == tdep->m_profile_msp_s_regnum)
4092 && (cache->msp_s == cache->sp);
4093 bool is_msp_ns = (regnum == tdep->m_profile_msp_ns_regnum)
4094 && (cache->msp_ns == cache->sp);
4095 bool is_psp = (regnum == tdep->m_profile_psp_regnum)
4096 && (cache->psp_s == cache->sp || cache->psp_ns == cache->sp);
4097 bool is_psp_s = (regnum == tdep->m_profile_psp_s_regnum)
4098 && (cache->psp_s == cache->sp);
4099 bool is_psp_ns = (regnum == tdep->m_profile_psp_ns_regnum)
4100 && (cache->psp_ns == cache->sp);
4101
4102 override_with_sp_value = is_msp || is_msp_s || is_msp_ns
4103 || is_psp || is_psp_s || is_psp_ns;
4104
4105 }
4106 else if (tdep->is_m)
4107 {
4108 bool is_msp = (regnum == tdep->m_profile_msp_regnum)
4109 && (cache->sp == cache->msp);
4110 bool is_psp = (regnum == tdep->m_profile_psp_regnum)
4111 && (cache->sp == cache->psp);
4112
4113 override_with_sp_value = is_msp || is_psp;
4114 }
4115
4116 if (override_with_sp_value)
4117 {
4118 /* Use value of SP from previous frame. */
4119 frame_info_ptr prev_frame = get_prev_frame (this_frame);
4120 if (prev_frame)
4121 val = get_frame_register_unsigned (prev_frame, ARM_SP_REGNUM);
4122 else
4123 val = get_frame_base (this_frame);
4124 }
4125 else
4126 /* Use value for the register from previous frame. */
4127 val = get_frame_register_unsigned (this_frame, regnum);
4128
4129 return frame_unwind_got_constant (this_frame, regnum, val);
4130 }
4131
4132 internal_error (_("Unexpected register %d"), regnum);
4133 }
4134
4135 /* Implement the stack_frame_destroyed_p gdbarch method. */
4136
4137 static int
4138 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4139 {
4140 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4141 unsigned int insn, insn2;
4142 int found_return = 0, found_stack_adjust = 0;
4143 CORE_ADDR func_start, func_end;
4144 CORE_ADDR scan_pc;
4145 gdb_byte buf[4];
4146
4147 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
4148 return 0;
4149
4150 /* The epilogue is a sequence of instructions along the following lines:
4151
4152 - add stack frame size to SP or FP
4153 - [if frame pointer used] restore SP from FP
4154 - restore registers from SP [may include PC]
4155 - a return-type instruction [if PC wasn't already restored]
4156
4157 In a first pass, we scan forward from the current PC and verify the
4158 instructions we find as compatible with this sequence, ending in a
4159 return instruction.
4160
4161 However, this is not sufficient to distinguish indirect function calls
4162 within a function from indirect tail calls in the epilogue in some cases.
4163 Therefore, if we didn't already find any SP-changing instruction during
4164 forward scan, we add a backward scanning heuristic to ensure we actually
4165 are in the epilogue. */
4166
4167 scan_pc = pc;
4168 while (scan_pc < func_end && !found_return)
4169 {
4170 if (target_read_memory (scan_pc, buf, 2))
4171 break;
4172
4173 scan_pc += 2;
4174 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
4175
4176 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
4177 found_return = 1;
4178 else if (insn == 0x46f7) /* mov pc, lr */
4179 found_return = 1;
4180 else if (thumb_instruction_restores_sp (insn))
4181 {
4182 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4183 found_return = 1;
4184 }
4185 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4186 {
4187 if (target_read_memory (scan_pc, buf, 2))
4188 break;
4189
4190 scan_pc += 2;
4191 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
4192
4193 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
4194 {
4195 if (insn2 & 0x8000) /* <registers> include PC. */
4196 found_return = 1;
4197 }
4198 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
4199 && (insn2 & 0x0fff) == 0x0b04)
4200 {
4201 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
4202 found_return = 1;
4203 }
4204 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
4205 && (insn2 & 0x0e00) == 0x0a00)
4206 ;
4207 else
4208 break;
4209 }
4210 else
4211 break;
4212 }
4213
4214 if (!found_return)
4215 return 0;
4216
4217 /* Since any instruction in the epilogue sequence, with the possible
4218 exception of return itself, updates the stack pointer, we need to
4219 scan backwards for at most one instruction. Try either a 16-bit or
4220 a 32-bit instruction. This is just a heuristic, so we do not worry
4221 too much about false positives. */
4222
4223 if (pc - 4 < func_start)
4224 return 0;
4225 if (target_read_memory (pc - 4, buf, 4))
4226 return 0;
4227
4228 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
4229 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
4230
4231 if (thumb_instruction_restores_sp (insn2))
4232 found_stack_adjust = 1;
4233 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
4234 found_stack_adjust = 1;
4235 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
4236 && (insn2 & 0x0fff) == 0x0b04)
4237 found_stack_adjust = 1;
4238 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
4239 && (insn2 & 0x0e00) == 0x0a00)
4240 found_stack_adjust = 1;
4241
4242 return found_stack_adjust;
4243 }
4244
4245 static int
4246 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
4247 {
4248 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4249 unsigned int insn;
4250 int found_return;
4251 CORE_ADDR func_start, func_end;
4252
4253 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
4254 return 0;
4255
4256 /* We are in the epilogue if the previous instruction was a stack
4257 adjustment and the next instruction is a possible return (bx, mov
4258 pc, or pop). We could have to scan backwards to find the stack
4259 adjustment, or forwards to find the return, but this is a decent
4260 approximation. First scan forwards. */
4261
4262 found_return = 0;
4263 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4264 if (bits (insn, 28, 31) != INST_NV)
4265 {
4266 if ((insn & 0x0ffffff0) == 0x012fff10)
4267 /* BX. */
4268 found_return = 1;
4269 else if ((insn & 0x0ffffff0) == 0x01a0f000)
4270 /* MOV PC. */
4271 found_return = 1;
4272 else if ((insn & 0x0fff0000) == 0x08bd0000
4273 && (insn & 0x0000c000) != 0)
4274 /* POP (LDMIA), including PC or LR. */
4275 found_return = 1;
4276 }
4277
4278 if (!found_return)
4279 return 0;
4280
4281 /* Scan backwards. This is just a heuristic, so do not worry about
4282 false positives from mode changes. */
4283
4284 if (pc < func_start + 4)
4285 return 0;
4286
4287 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
4288 if (arm_instruction_restores_sp (insn))
4289 return 1;
4290
4291 return 0;
4292 }
4293
4294 /* Implement the stack_frame_destroyed_p gdbarch method. */
4295
4296 static int
4297 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4298 {
4299 if (arm_pc_is_thumb (gdbarch, pc))
4300 return thumb_stack_frame_destroyed_p (gdbarch, pc);
4301 else
4302 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
4303 }
4304
4305 /* When arguments must be pushed onto the stack, they go on in reverse
4306 order. The code below implements a FILO (stack) to do this. */
4307
4308 struct arm_stack_item
4309 {
4310 int len;
4311 struct arm_stack_item *prev;
4312 gdb_byte *data;
4313 };
4314
4315 static struct arm_stack_item *
4316 push_stack_item (struct arm_stack_item *prev, const gdb_byte *contents,
4317 int len)
4318 {
4319 struct arm_stack_item *si;
4320 si = XNEW (struct arm_stack_item);
4321 si->data = (gdb_byte *) xmalloc (len);
4322 si->len = len;
4323 si->prev = prev;
4324 memcpy (si->data, contents, len);
4325 return si;
4326 }
4327
4328 static struct arm_stack_item *
4329 pop_stack_item (struct arm_stack_item *si)
4330 {
4331 struct arm_stack_item *dead = si;
4332 si = si->prev;
4333 xfree (dead->data);
4334 xfree (dead);
4335 return si;
4336 }
4337
4338 /* Implement the gdbarch type alignment method, overrides the generic
4339 alignment algorithm for anything that is arm specific. */
4340
4341 static ULONGEST
4342 arm_type_align (gdbarch *gdbarch, struct type *t)
4343 {
4344 t = check_typedef (t);
4345 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
4346 {
4347 /* Use the natural alignment for vector types (the same for
4348 scalar type), but the maximum alignment is 64-bit. */
4349 if (t->length () > 8)
4350 return 8;
4351 else
4352 return t->length ();
4353 }
4354
4355 /* Allow the common code to calculate the alignment. */
4356 return 0;
4357 }
4358
4359 /* Possible base types for a candidate for passing and returning in
4360 VFP registers. */
4361
4362 enum arm_vfp_cprc_base_type
4363 {
4364 VFP_CPRC_UNKNOWN,
4365 VFP_CPRC_SINGLE,
4366 VFP_CPRC_DOUBLE,
4367 VFP_CPRC_VEC64,
4368 VFP_CPRC_VEC128
4369 };
4370
4371 /* The length of one element of base type B. */
4372
4373 static unsigned
4374 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
4375 {
4376 switch (b)
4377 {
4378 case VFP_CPRC_SINGLE:
4379 return 4;
4380 case VFP_CPRC_DOUBLE:
4381 return 8;
4382 case VFP_CPRC_VEC64:
4383 return 8;
4384 case VFP_CPRC_VEC128:
4385 return 16;
4386 default:
4387 internal_error (_("Invalid VFP CPRC type: %d."),
4388 (int) b);
4389 }
4390 }
4391
4392 /* The character ('s', 'd' or 'q') for the type of VFP register used
4393 for passing base type B. */
4394
4395 static int
4396 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
4397 {
4398 switch (b)
4399 {
4400 case VFP_CPRC_SINGLE:
4401 return 's';
4402 case VFP_CPRC_DOUBLE:
4403 return 'd';
4404 case VFP_CPRC_VEC64:
4405 return 'd';
4406 case VFP_CPRC_VEC128:
4407 return 'q';
4408 default:
4409 internal_error (_("Invalid VFP CPRC type: %d."),
4410 (int) b);
4411 }
4412 }
4413
4414 /* Determine whether T may be part of a candidate for passing and
4415 returning in VFP registers, ignoring the limit on the total number
4416 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
4417 classification of the first valid component found; if it is not
4418 VFP_CPRC_UNKNOWN, all components must have the same classification
4419 as *BASE_TYPE. If it is found that T contains a type not permitted
4420 for passing and returning in VFP registers, a type differently
4421 classified from *BASE_TYPE, or two types differently classified
4422 from each other, return -1, otherwise return the total number of
4423 base-type elements found (possibly 0 in an empty structure or
4424 array). Vector types are not currently supported, matching the
4425 generic AAPCS support. */
4426
4427 static int
4428 arm_vfp_cprc_sub_candidate (struct type *t,
4429 enum arm_vfp_cprc_base_type *base_type)
4430 {
4431 t = check_typedef (t);
4432 switch (t->code ())
4433 {
4434 case TYPE_CODE_FLT:
4435 switch (t->length ())
4436 {
4437 case 4:
4438 if (*base_type == VFP_CPRC_UNKNOWN)
4439 *base_type = VFP_CPRC_SINGLE;
4440 else if (*base_type != VFP_CPRC_SINGLE)
4441 return -1;
4442 return 1;
4443
4444 case 8:
4445 if (*base_type == VFP_CPRC_UNKNOWN)
4446 *base_type = VFP_CPRC_DOUBLE;
4447 else if (*base_type != VFP_CPRC_DOUBLE)
4448 return -1;
4449 return 1;
4450
4451 default:
4452 return -1;
4453 }
4454 break;
4455
4456 case TYPE_CODE_COMPLEX:
4457 /* Arguments of complex T where T is one of the types float or
4458 double get treated as if they are implemented as:
4459
4460 struct complexT
4461 {
4462 T real;
4463 T imag;
4464 };
4465
4466 */
4467 switch (t->length ())
4468 {
4469 case 8:
4470 if (*base_type == VFP_CPRC_UNKNOWN)
4471 *base_type = VFP_CPRC_SINGLE;
4472 else if (*base_type != VFP_CPRC_SINGLE)
4473 return -1;
4474 return 2;
4475
4476 case 16:
4477 if (*base_type == VFP_CPRC_UNKNOWN)
4478 *base_type = VFP_CPRC_DOUBLE;
4479 else if (*base_type != VFP_CPRC_DOUBLE)
4480 return -1;
4481 return 2;
4482
4483 default:
4484 return -1;
4485 }
4486 break;
4487
4488 case TYPE_CODE_ARRAY:
4489 {
4490 if (t->is_vector ())
4491 {
4492 /* A 64-bit or 128-bit containerized vector type are VFP
4493 CPRCs. */
4494 switch (t->length ())
4495 {
4496 case 8:
4497 if (*base_type == VFP_CPRC_UNKNOWN)
4498 *base_type = VFP_CPRC_VEC64;
4499 return 1;
4500 case 16:
4501 if (*base_type == VFP_CPRC_UNKNOWN)
4502 *base_type = VFP_CPRC_VEC128;
4503 return 1;
4504 default:
4505 return -1;
4506 }
4507 }
4508 else
4509 {
4510 int count;
4511 unsigned unitlen;
4512
4513 count = arm_vfp_cprc_sub_candidate (t->target_type (),
4514 base_type);
4515 if (count == -1)
4516 return -1;
4517 if (t->length () == 0)
4518 {
4519 gdb_assert (count == 0);
4520 return 0;
4521 }
4522 else if (count == 0)
4523 return -1;
4524 unitlen = arm_vfp_cprc_unit_length (*base_type);
4525 gdb_assert ((t->length () % unitlen) == 0);
4526 return t->length () / unitlen;
4527 }
4528 }
4529 break;
4530
4531 case TYPE_CODE_STRUCT:
4532 {
4533 int count = 0;
4534 unsigned unitlen;
4535 int i;
4536 for (i = 0; i < t->num_fields (); i++)
4537 {
4538 int sub_count = 0;
4539
4540 if (!t->field (i).is_static ())
4541 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4542 base_type);
4543 if (sub_count == -1)
4544 return -1;
4545 count += sub_count;
4546 }
4547 if (t->length () == 0)
4548 {
4549 gdb_assert (count == 0);
4550 return 0;
4551 }
4552 else if (count == 0)
4553 return -1;
4554 unitlen = arm_vfp_cprc_unit_length (*base_type);
4555 if (t->length () != unitlen * count)
4556 return -1;
4557 return count;
4558 }
4559
4560 case TYPE_CODE_UNION:
4561 {
4562 int count = 0;
4563 unsigned unitlen;
4564 int i;
4565 for (i = 0; i < t->num_fields (); i++)
4566 {
4567 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4568 base_type);
4569 if (sub_count == -1)
4570 return -1;
4571 count = (count > sub_count ? count : sub_count);
4572 }
4573 if (t->length () == 0)
4574 {
4575 gdb_assert (count == 0);
4576 return 0;
4577 }
4578 else if (count == 0)
4579 return -1;
4580 unitlen = arm_vfp_cprc_unit_length (*base_type);
4581 if (t->length () != unitlen * count)
4582 return -1;
4583 return count;
4584 }
4585
4586 default:
4587 break;
4588 }
4589
4590 return -1;
4591 }
4592
4593 /* Determine whether T is a VFP co-processor register candidate (CPRC)
4594 if passed to or returned from a non-variadic function with the VFP
4595 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
4596 *BASE_TYPE to the base type for T and *COUNT to the number of
4597 elements of that base type before returning. */
4598
4599 static int
4600 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
4601 int *count)
4602 {
4603 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
4604 int c = arm_vfp_cprc_sub_candidate (t, &b);
4605 if (c <= 0 || c > 4)
4606 return 0;
4607 *base_type = b;
4608 *count = c;
4609 return 1;
4610 }
4611
4612 /* Return 1 if the VFP ABI should be used for passing arguments to and
4613 returning values from a function of type FUNC_TYPE, 0
4614 otherwise. */
4615
4616 static int
4617 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
4618 {
4619 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4620
4621 /* Variadic functions always use the base ABI. Assume that functions
4622 without debug info are not variadic. */
4623 if (func_type && check_typedef (func_type)->has_varargs ())
4624 return 0;
4625
4626 /* The VFP ABI is only supported as a variant of AAPCS. */
4627 if (tdep->arm_abi != ARM_ABI_AAPCS)
4628 return 0;
4629
4630 return tdep->fp_model == ARM_FLOAT_VFP;
4631 }
4632
4633 /* We currently only support passing parameters in integer registers, which
4634 conforms with GCC's default model, and VFP argument passing following
4635 the VFP variant of AAPCS. Several other variants exist and
4636 we should probably support some of them based on the selected ABI. */
4637
4638 static CORE_ADDR
4639 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
4640 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
4641 struct value **args, CORE_ADDR sp,
4642 function_call_return_method return_method,
4643 CORE_ADDR struct_addr)
4644 {
4645 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4646 int argnum;
4647 int argreg;
4648 int nstack;
4649 struct arm_stack_item *si = NULL;
4650 int use_vfp_abi;
4651 struct type *ftype;
4652 unsigned vfp_regs_free = (1 << 16) - 1;
4653 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4654
4655 /* Determine the type of this function and whether the VFP ABI
4656 applies. */
4657 ftype = check_typedef (function->type ());
4658 if (ftype->code () == TYPE_CODE_PTR)
4659 ftype = check_typedef (ftype->target_type ());
4660 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
4661
4662 /* Set the return address. For the ARM, the return breakpoint is
4663 always at BP_ADDR. */
4664 if (arm_pc_is_thumb (gdbarch, bp_addr))
4665 bp_addr |= 1;
4666 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
4667
4668 /* Walk through the list of args and determine how large a temporary
4669 stack is required. Need to take care here as structs may be
4670 passed on the stack, and we have to push them. */
4671 nstack = 0;
4672
4673 argreg = ARM_A1_REGNUM;
4674 nstack = 0;
4675
4676 /* The struct_return pointer occupies the first parameter
4677 passing register. */
4678 if (return_method == return_method_struct)
4679 {
4680 arm_debug_printf ("struct return in %s = %s",
4681 gdbarch_register_name (gdbarch, argreg),
4682 paddress (gdbarch, struct_addr));
4683
4684 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
4685 argreg++;
4686 }
4687
4688 for (argnum = 0; argnum < nargs; argnum++)
4689 {
4690 int len;
4691 struct type *arg_type;
4692 struct type *target_type;
4693 enum type_code typecode;
4694 const bfd_byte *val;
4695 int align;
4696 enum arm_vfp_cprc_base_type vfp_base_type;
4697 int vfp_base_count;
4698 int may_use_core_reg = 1;
4699
4700 arg_type = check_typedef (args[argnum]->type ());
4701 len = arg_type->length ();
4702 target_type = arg_type->target_type ();
4703 typecode = arg_type->code ();
4704 val = args[argnum]->contents ().data ();
4705
4706 align = type_align (arg_type);
4707 /* Round alignment up to a whole number of words. */
4708 align = (align + ARM_INT_REGISTER_SIZE - 1)
4709 & ~(ARM_INT_REGISTER_SIZE - 1);
4710 /* Different ABIs have different maximum alignments. */
4711 if (tdep->arm_abi == ARM_ABI_APCS)
4712 {
4713 /* The APCS ABI only requires word alignment. */
4714 align = ARM_INT_REGISTER_SIZE;
4715 }
4716 else
4717 {
4718 /* The AAPCS requires at most doubleword alignment. */
4719 if (align > ARM_INT_REGISTER_SIZE * 2)
4720 align = ARM_INT_REGISTER_SIZE * 2;
4721 }
4722
4723 if (use_vfp_abi
4724 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
4725 &vfp_base_count))
4726 {
4727 int regno;
4728 int unit_length;
4729 int shift;
4730 unsigned mask;
4731
4732 /* Because this is a CPRC it cannot go in a core register or
4733 cause a core register to be skipped for alignment.
4734 Either it goes in VFP registers and the rest of this loop
4735 iteration is skipped for this argument, or it goes on the
4736 stack (and the stack alignment code is correct for this
4737 case). */
4738 may_use_core_reg = 0;
4739
4740 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
4741 shift = unit_length / 4;
4742 mask = (1 << (shift * vfp_base_count)) - 1;
4743 for (regno = 0; regno < 16; regno += shift)
4744 if (((vfp_regs_free >> regno) & mask) == mask)
4745 break;
4746
4747 if (regno < 16)
4748 {
4749 int reg_char;
4750 int reg_scaled;
4751 int i;
4752
4753 vfp_regs_free &= ~(mask << regno);
4754 reg_scaled = regno / shift;
4755 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
4756 for (i = 0; i < vfp_base_count; i++)
4757 {
4758 char name_buf[4];
4759 int regnum;
4760 if (reg_char == 'q')
4761 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
4762 val + i * unit_length);
4763 else
4764 {
4765 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
4766 reg_char, reg_scaled + i);
4767 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
4768 strlen (name_buf));
4769 regcache->cooked_write (regnum, val + i * unit_length);
4770 }
4771 }
4772 continue;
4773 }
4774 else
4775 {
4776 /* This CPRC could not go in VFP registers, so all VFP
4777 registers are now marked as used. */
4778 vfp_regs_free = 0;
4779 }
4780 }
4781
4782 /* Push stack padding for doubleword alignment. */
4783 if (nstack & (align - 1))
4784 {
4785 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
4786 nstack += ARM_INT_REGISTER_SIZE;
4787 }
4788
4789 /* Doubleword aligned quantities must go in even register pairs. */
4790 if (may_use_core_reg
4791 && argreg <= ARM_LAST_ARG_REGNUM
4792 && align > ARM_INT_REGISTER_SIZE
4793 && argreg & 1)
4794 argreg++;
4795
4796 /* If the argument is a pointer to a function, and it is a
4797 Thumb function, create a LOCAL copy of the value and set
4798 the THUMB bit in it. */
4799 if (TYPE_CODE_PTR == typecode
4800 && target_type != NULL
4801 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
4802 {
4803 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
4804 if (arm_pc_is_thumb (gdbarch, regval))
4805 {
4806 bfd_byte *copy = (bfd_byte *) alloca (len);
4807 store_unsigned_integer (copy, len, byte_order,
4808 MAKE_THUMB_ADDR (regval));
4809 val = copy;
4810 }
4811 }
4812
4813 /* Copy the argument to general registers or the stack in
4814 register-sized pieces. Large arguments are split between
4815 registers and stack. */
4816 while (len > 0)
4817 {
4818 int partial_len = len < ARM_INT_REGISTER_SIZE
4819 ? len : ARM_INT_REGISTER_SIZE;
4820 CORE_ADDR regval
4821 = extract_unsigned_integer (val, partial_len, byte_order);
4822
4823 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
4824 {
4825 /* The argument is being passed in a general purpose
4826 register. */
4827 arm_debug_printf ("arg %d in %s = 0x%s", argnum,
4828 gdbarch_register_name (gdbarch, argreg),
4829 phex (regval, ARM_INT_REGISTER_SIZE));
4830
4831 regcache_cooked_write_unsigned (regcache, argreg, regval);
4832 argreg++;
4833 }
4834 else
4835 {
4836 gdb_byte buf[ARM_INT_REGISTER_SIZE];
4837
4838 memset (buf, 0, sizeof (buf));
4839 store_unsigned_integer (buf, partial_len, byte_order, regval);
4840
4841 /* Push the arguments onto the stack. */
4842 arm_debug_printf ("arg %d @ sp + %d", argnum, nstack);
4843 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
4844 nstack += ARM_INT_REGISTER_SIZE;
4845 }
4846
4847 len -= partial_len;
4848 val += partial_len;
4849 }
4850 }
4851 /* If we have an odd number of words to push, then decrement the stack
4852 by one word now, so first stack argument will be dword aligned. */
4853 if (nstack & 4)
4854 sp -= 4;
4855
4856 while (si)
4857 {
4858 sp -= si->len;
4859 write_memory (sp, si->data, si->len);
4860 si = pop_stack_item (si);
4861 }
4862
4863 /* Finally, update the SP register. */
4864 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
4865
4866 return sp;
4867 }
4868
4869
4870 /* Always align the frame to an 8-byte boundary. This is required on
4871 some platforms and harmless on the rest. */
4872
4873 static CORE_ADDR
4874 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
4875 {
4876 /* Align the stack to eight bytes. */
4877 return sp & ~ (CORE_ADDR) 7;
4878 }
4879
4880 static void
4881 print_fpu_flags (struct ui_file *file, int flags)
4882 {
4883 if (flags & (1 << 0))
4884 gdb_puts ("IVO ", file);
4885 if (flags & (1 << 1))
4886 gdb_puts ("DVZ ", file);
4887 if (flags & (1 << 2))
4888 gdb_puts ("OFL ", file);
4889 if (flags & (1 << 3))
4890 gdb_puts ("UFL ", file);
4891 if (flags & (1 << 4))
4892 gdb_puts ("INX ", file);
4893 gdb_putc ('\n', file);
4894 }
4895
4896 /* Print interesting information about the floating point processor
4897 (if present) or emulator. */
4898 static void
4899 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4900 const frame_info_ptr &frame, const char *args)
4901 {
4902 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4903 int type;
4904
4905 type = (status >> 24) & 127;
4906 if (status & (1 << 31))
4907 gdb_printf (file, _("Hardware FPU type %d\n"), type);
4908 else
4909 gdb_printf (file, _("Software FPU type %d\n"), type);
4910 /* i18n: [floating point unit] mask */
4911 gdb_puts (_("mask: "), file);
4912 print_fpu_flags (file, status >> 16);
4913 /* i18n: [floating point unit] flags */
4914 gdb_puts (_("flags: "), file);
4915 print_fpu_flags (file, status);
4916 }
4917
4918 /* Construct the ARM extended floating point type. */
4919 static struct type *
4920 arm_ext_type (struct gdbarch *gdbarch)
4921 {
4922 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4923
4924 if (!tdep->arm_ext_type)
4925 {
4926 type_allocator alloc (gdbarch);
4927 tdep->arm_ext_type
4928 = init_float_type (alloc, -1, "builtin_type_arm_ext",
4929 floatformats_arm_ext);
4930 }
4931
4932 return tdep->arm_ext_type;
4933 }
4934
4935 static struct type *
4936 arm_neon_double_type (struct gdbarch *gdbarch)
4937 {
4938 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4939
4940 if (tdep->neon_double_type == NULL)
4941 {
4942 struct type *t, *elem;
4943
4944 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4945 TYPE_CODE_UNION);
4946 elem = builtin_type (gdbarch)->builtin_uint8;
4947 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4948 elem = builtin_type (gdbarch)->builtin_uint16;
4949 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4950 elem = builtin_type (gdbarch)->builtin_uint32;
4951 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4952 elem = builtin_type (gdbarch)->builtin_uint64;
4953 append_composite_type_field (t, "u64", elem);
4954 elem = builtin_type (gdbarch)->builtin_float;
4955 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4956 elem = builtin_type (gdbarch)->builtin_double;
4957 append_composite_type_field (t, "f64", elem);
4958
4959 t->set_is_vector (true);
4960 t->set_name ("neon_d");
4961 tdep->neon_double_type = t;
4962 }
4963
4964 return tdep->neon_double_type;
4965 }
4966
4967 /* FIXME: The vector types are not correctly ordered on big-endian
4968 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4969 bits of d0 - regardless of what unit size is being held in d0. So
4970 the offset of the first uint8 in d0 is 7, but the offset of the
4971 first float is 4. This code works as-is for little-endian
4972 targets. */
4973
4974 static struct type *
4975 arm_neon_quad_type (struct gdbarch *gdbarch)
4976 {
4977 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4978
4979 if (tdep->neon_quad_type == NULL)
4980 {
4981 struct type *t, *elem;
4982
4983 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4984 TYPE_CODE_UNION);
4985 elem = builtin_type (gdbarch)->builtin_uint8;
4986 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4987 elem = builtin_type (gdbarch)->builtin_uint16;
4988 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4989 elem = builtin_type (gdbarch)->builtin_uint32;
4990 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4991 elem = builtin_type (gdbarch)->builtin_uint64;
4992 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4993 elem = builtin_type (gdbarch)->builtin_float;
4994 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4995 elem = builtin_type (gdbarch)->builtin_double;
4996 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4997
4998 t->set_is_vector (true);
4999 t->set_name ("neon_q");
5000 tdep->neon_quad_type = t;
5001 }
5002
5003 return tdep->neon_quad_type;
5004 }
5005
5006 /* Return true if REGNUM is a Q pseudo register. Return false
5007 otherwise.
5008
5009 REGNUM is the raw register number and not a pseudo-relative register
5010 number. */
5011
5012 static bool
5013 is_q_pseudo (struct gdbarch *gdbarch, int regnum)
5014 {
5015 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5016
5017 /* Q pseudo registers are available for both NEON (Q0~Q15) and
5018 MVE (Q0~Q7) features. */
5019 if (tdep->have_q_pseudos
5020 && regnum >= tdep->q_pseudo_base
5021 && regnum < (tdep->q_pseudo_base + tdep->q_pseudo_count))
5022 return true;
5023
5024 return false;
5025 }
5026
5027 /* Return true if REGNUM is a VFP S pseudo register. Return false
5028 otherwise.
5029
5030 REGNUM is the raw register number and not a pseudo-relative register
5031 number. */
5032
5033 static bool
5034 is_s_pseudo (struct gdbarch *gdbarch, int regnum)
5035 {
5036 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5037
5038 if (tdep->have_s_pseudos
5039 && regnum >= tdep->s_pseudo_base
5040 && regnum < (tdep->s_pseudo_base + tdep->s_pseudo_count))
5041 return true;
5042
5043 return false;
5044 }
5045
5046 /* Return true if REGNUM is a MVE pseudo register (P0). Return false
5047 otherwise.
5048
5049 REGNUM is the raw register number and not a pseudo-relative register
5050 number. */
5051
5052 static bool
5053 is_mve_pseudo (struct gdbarch *gdbarch, int regnum)
5054 {
5055 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5056
5057 if (tdep->have_mve
5058 && regnum >= tdep->mve_pseudo_base
5059 && regnum < tdep->mve_pseudo_base + tdep->mve_pseudo_count)
5060 return true;
5061
5062 return false;
5063 }
5064
5065 /* Return true if REGNUM is a PACBTI pseudo register (ra_auth_code). Return
5066 false otherwise.
5067
5068 REGNUM is the raw register number and not a pseudo-relative register
5069 number. */
5070
5071 static bool
5072 is_pacbti_pseudo (struct gdbarch *gdbarch, int regnum)
5073 {
5074 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5075
5076 if (tdep->have_pacbti
5077 && regnum >= tdep->pacbti_pseudo_base
5078 && regnum < tdep->pacbti_pseudo_base + tdep->pacbti_pseudo_count)
5079 return true;
5080
5081 return false;
5082 }
5083
5084 /* Return the GDB type object for the "standard" data type of data in
5085 register N. */
5086
5087 static struct type *
5088 arm_register_type (struct gdbarch *gdbarch, int regnum)
5089 {
5090 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5091
5092 if (is_s_pseudo (gdbarch, regnum))
5093 return builtin_type (gdbarch)->builtin_float;
5094
5095 if (is_q_pseudo (gdbarch, regnum))
5096 return arm_neon_quad_type (gdbarch);
5097
5098 if (is_mve_pseudo (gdbarch, regnum))
5099 return builtin_type (gdbarch)->builtin_int16;
5100
5101 if (is_pacbti_pseudo (gdbarch, regnum))
5102 return builtin_type (gdbarch)->builtin_uint32;
5103
5104 /* If the target description has register information, we are only
5105 in this function so that we can override the types of
5106 double-precision registers for NEON. */
5107 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
5108 {
5109 struct type *t = tdesc_register_type (gdbarch, regnum);
5110
5111 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
5112 && t->code () == TYPE_CODE_FLT
5113 && tdep->have_neon)
5114 return arm_neon_double_type (gdbarch);
5115 else
5116 return t;
5117 }
5118
5119 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
5120 {
5121 if (!tdep->have_fpa_registers)
5122 return builtin_type (gdbarch)->builtin_void;
5123
5124 return arm_ext_type (gdbarch);
5125 }
5126 else if (regnum == ARM_SP_REGNUM)
5127 return builtin_type (gdbarch)->builtin_data_ptr;
5128 else if (regnum == ARM_PC_REGNUM)
5129 return builtin_type (gdbarch)->builtin_func_ptr;
5130 else if (regnum >= ARRAY_SIZE (arm_register_names))
5131 /* These registers are only supported on targets which supply
5132 an XML description. */
5133 return builtin_type (gdbarch)->builtin_int0;
5134 else
5135 return builtin_type (gdbarch)->builtin_uint32;
5136 }
5137
5138 /* Map a DWARF register REGNUM onto the appropriate GDB register
5139 number. */
5140
5141 static int
5142 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
5143 {
5144 /* Core integer regs. */
5145 if (reg >= 0 && reg <= 15)
5146 return reg;
5147
5148 /* Legacy FPA encoding. These were once used in a way which
5149 overlapped with VFP register numbering, so their use is
5150 discouraged, but GDB doesn't support the ARM toolchain
5151 which used them for VFP. */
5152 if (reg >= 16 && reg <= 23)
5153 return ARM_F0_REGNUM + reg - 16;
5154
5155 /* New assignments for the FPA registers. */
5156 if (reg >= 96 && reg <= 103)
5157 return ARM_F0_REGNUM + reg - 96;
5158
5159 /* WMMX register assignments. */
5160 if (reg >= 104 && reg <= 111)
5161 return ARM_WCGR0_REGNUM + reg - 104;
5162
5163 if (reg >= 112 && reg <= 127)
5164 return ARM_WR0_REGNUM + reg - 112;
5165
5166 /* PACBTI register containing the Pointer Authentication Code. */
5167 if (reg == ARM_DWARF_RA_AUTH_CODE)
5168 {
5169 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5170
5171 if (tdep->have_pacbti)
5172 return tdep->pacbti_pseudo_base;
5173
5174 return -1;
5175 }
5176
5177 if (reg >= 192 && reg <= 199)
5178 return ARM_WC0_REGNUM + reg - 192;
5179
5180 /* VFP v2 registers. A double precision value is actually
5181 in d1 rather than s2, but the ABI only defines numbering
5182 for the single precision registers. This will "just work"
5183 in GDB for little endian targets (we'll read eight bytes,
5184 starting in s0 and then progressing to s1), but will be
5185 reversed on big endian targets with VFP. This won't
5186 be a problem for the new Neon quad registers; you're supposed
5187 to use DW_OP_piece for those. */
5188 if (reg >= 64 && reg <= 95)
5189 {
5190 char name_buf[4];
5191
5192 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
5193 return user_reg_map_name_to_regnum (gdbarch, name_buf,
5194 strlen (name_buf));
5195 }
5196
5197 /* VFP v3 / Neon registers. This range is also used for VFP v2
5198 registers, except that it now describes d0 instead of s0. */
5199 if (reg >= 256 && reg <= 287)
5200 {
5201 char name_buf[4];
5202
5203 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
5204 return user_reg_map_name_to_regnum (gdbarch, name_buf,
5205 strlen (name_buf));
5206 }
5207
5208 return -1;
5209 }
5210
5211 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
5212 static int
5213 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
5214 {
5215 int reg = regnum;
5216 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
5217
5218 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
5219 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
5220
5221 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
5222 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
5223
5224 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
5225 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
5226
5227 if (reg < NUM_GREGS)
5228 return SIM_ARM_R0_REGNUM + reg;
5229 reg -= NUM_GREGS;
5230
5231 if (reg < NUM_FREGS)
5232 return SIM_ARM_FP0_REGNUM + reg;
5233 reg -= NUM_FREGS;
5234
5235 if (reg < NUM_SREGS)
5236 return SIM_ARM_FPS_REGNUM + reg;
5237 reg -= NUM_SREGS;
5238
5239 internal_error (_("Bad REGNUM %d"), regnum);
5240 }
5241
5242 static const unsigned char op_lit0 = DW_OP_lit0;
5243
5244 static void
5245 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
5246 struct dwarf2_frame_state_reg *reg,
5247 const frame_info_ptr &this_frame)
5248 {
5249 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5250
5251 if (is_pacbti_pseudo (gdbarch, regnum))
5252 {
5253 /* Initialize RA_AUTH_CODE to zero. */
5254 reg->how = DWARF2_FRAME_REG_SAVED_VAL_EXP;
5255 reg->loc.exp.start = &op_lit0;
5256 reg->loc.exp.len = 1;
5257 return;
5258 }
5259
5260 if (regnum == ARM_PC_REGNUM || regnum == ARM_PS_REGNUM)
5261 {
5262 reg->how = DWARF2_FRAME_REG_FN;
5263 reg->loc.fn = arm_dwarf2_prev_register;
5264 }
5265 else if (regnum == ARM_SP_REGNUM)
5266 reg->how = DWARF2_FRAME_REG_CFA;
5267 else if (arm_is_alternative_sp_register (tdep, regnum))
5268 {
5269 /* Handle the alternative SP registers on Cortex-M. */
5270 reg->how = DWARF2_FRAME_REG_FN;
5271 reg->loc.fn = arm_dwarf2_prev_register;
5272 }
5273 }
5274
5275 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5276 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5277 NULL if an error occurs. BUF is freed. */
5278
5279 static gdb_byte *
5280 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5281 int old_len, int new_len)
5282 {
5283 gdb_byte *new_buf;
5284 int bytes_to_read = new_len - old_len;
5285
5286 new_buf = (gdb_byte *) xmalloc (new_len);
5287 memcpy (new_buf + bytes_to_read, buf, old_len);
5288 xfree (buf);
5289 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
5290 {
5291 xfree (new_buf);
5292 return NULL;
5293 }
5294 return new_buf;
5295 }
5296
5297 /* An IT block is at most the 2-byte IT instruction followed by
5298 four 4-byte instructions. The furthest back we must search to
5299 find an IT block that affects the current instruction is thus
5300 2 + 3 * 4 == 14 bytes. */
5301 #define MAX_IT_BLOCK_PREFIX 14
5302
5303 /* Use a quick scan if there are more than this many bytes of
5304 code. */
5305 #define IT_SCAN_THRESHOLD 32
5306
5307 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5308 A breakpoint in an IT block may not be hit, depending on the
5309 condition flags. */
5310 static CORE_ADDR
5311 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5312 {
5313 gdb_byte *buf;
5314 char map_type;
5315 CORE_ADDR boundary, func_start;
5316 int buf_len;
5317 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5318 int i, any, last_it, last_it_count;
5319 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5320
5321 /* If we are using BKPT breakpoints, none of this is necessary. */
5322 if (tdep->thumb2_breakpoint == NULL)
5323 return bpaddr;
5324
5325 /* ARM mode does not have this problem. */
5326 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5327 return bpaddr;
5328
5329 /* We are setting a breakpoint in Thumb code that could potentially
5330 contain an IT block. The first step is to find how much Thumb
5331 code there is; we do not need to read outside of known Thumb
5332 sequences. */
5333 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5334 if (map_type == 0)
5335 /* Thumb-2 code must have mapping symbols to have a chance. */
5336 return bpaddr;
5337
5338 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5339
5340 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL))
5341 {
5342 func_start = gdbarch_addr_bits_remove (gdbarch, func_start);
5343 if (func_start > boundary)
5344 boundary = func_start;
5345 }
5346
5347 /* Search for a candidate IT instruction. We have to do some fancy
5348 footwork to distinguish a real IT instruction from the second
5349 half of a 32-bit instruction, but there is no need for that if
5350 there's no candidate. */
5351 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
5352 if (buf_len == 0)
5353 /* No room for an IT instruction. */
5354 return bpaddr;
5355
5356 buf = (gdb_byte *) xmalloc (buf_len);
5357 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
5358 return bpaddr;
5359 any = 0;
5360 for (i = 0; i < buf_len; i += 2)
5361 {
5362 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5363 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5364 {
5365 any = 1;
5366 break;
5367 }
5368 }
5369
5370 if (any == 0)
5371 {
5372 xfree (buf);
5373 return bpaddr;
5374 }
5375
5376 /* OK, the code bytes before this instruction contain at least one
5377 halfword which resembles an IT instruction. We know that it's
5378 Thumb code, but there are still two possibilities. Either the
5379 halfword really is an IT instruction, or it is the second half of
5380 a 32-bit Thumb instruction. The only way we can tell is to
5381 scan forwards from a known instruction boundary. */
5382 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5383 {
5384 int definite;
5385
5386 /* There's a lot of code before this instruction. Start with an
5387 optimistic search; it's easy to recognize halfwords that can
5388 not be the start of a 32-bit instruction, and use that to
5389 lock on to the instruction boundaries. */
5390 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5391 if (buf == NULL)
5392 return bpaddr;
5393 buf_len = IT_SCAN_THRESHOLD;
5394
5395 definite = 0;
5396 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5397 {
5398 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5399 if (thumb_insn_size (inst1) == 2)
5400 {
5401 definite = 1;
5402 break;
5403 }
5404 }
5405
5406 /* At this point, if DEFINITE, BUF[I] is the first place we
5407 are sure that we know the instruction boundaries, and it is far
5408 enough from BPADDR that we could not miss an IT instruction
5409 affecting BPADDR. If ! DEFINITE, give up - start from a
5410 known boundary. */
5411 if (! definite)
5412 {
5413 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5414 bpaddr - boundary);
5415 if (buf == NULL)
5416 return bpaddr;
5417 buf_len = bpaddr - boundary;
5418 i = 0;
5419 }
5420 }
5421 else
5422 {
5423 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5424 if (buf == NULL)
5425 return bpaddr;
5426 buf_len = bpaddr - boundary;
5427 i = 0;
5428 }
5429
5430 /* Scan forwards. Find the last IT instruction before BPADDR. */
5431 last_it = -1;
5432 last_it_count = 0;
5433 while (i < buf_len)
5434 {
5435 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5436 last_it_count--;
5437 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5438 {
5439 last_it = i;
5440 if (inst1 & 0x0001)
5441 last_it_count = 4;
5442 else if (inst1 & 0x0002)
5443 last_it_count = 3;
5444 else if (inst1 & 0x0004)
5445 last_it_count = 2;
5446 else
5447 last_it_count = 1;
5448 }
5449 i += thumb_insn_size (inst1);
5450 }
5451
5452 xfree (buf);
5453
5454 if (last_it == -1)
5455 /* There wasn't really an IT instruction after all. */
5456 return bpaddr;
5457
5458 if (last_it_count < 1)
5459 /* It was too far away. */
5460 return bpaddr;
5461
5462 /* This really is a trouble spot. Move the breakpoint to the IT
5463 instruction. */
5464 return bpaddr - buf_len + last_it;
5465 }
5466
5467 /* ARM displaced stepping support.
5468
5469 Generally ARM displaced stepping works as follows:
5470
5471 1. When an instruction is to be single-stepped, it is first decoded by
5472 arm_process_displaced_insn. Depending on the type of instruction, it is
5473 then copied to a scratch location, possibly in a modified form. The
5474 copy_* set of functions performs such modification, as necessary. A
5475 breakpoint is placed after the modified instruction in the scratch space
5476 to return control to GDB. Note in particular that instructions which
5477 modify the PC will no longer do so after modification.
5478
5479 2. The instruction is single-stepped, by setting the PC to the scratch
5480 location address, and resuming. Control returns to GDB when the
5481 breakpoint is hit.
5482
5483 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5484 function used for the current instruction. This function's job is to
5485 put the CPU/memory state back to what it would have been if the
5486 instruction had been executed unmodified in its original location. */
5487
5488 /* NOP instruction (mov r0, r0). */
5489 #define ARM_NOP 0xe1a00000
5490 #define THUMB_NOP 0x4600
5491
5492 /* Helper for register reads for displaced stepping. In particular, this
5493 returns the PC as it would be seen by the instruction at its original
5494 location. */
5495
5496 ULONGEST
5497 displaced_read_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5498 int regno)
5499 {
5500 ULONGEST ret;
5501 CORE_ADDR from = dsc->insn_addr;
5502
5503 if (regno == ARM_PC_REGNUM)
5504 {
5505 /* Compute pipeline offset:
5506 - When executing an ARM instruction, PC reads as the address of the
5507 current instruction plus 8.
5508 - When executing a Thumb instruction, PC reads as the address of the
5509 current instruction plus 4. */
5510
5511 if (!dsc->is_thumb)
5512 from += 8;
5513 else
5514 from += 4;
5515
5516 displaced_debug_printf ("read pc value %.8lx",
5517 (unsigned long) from);
5518 return (ULONGEST) from;
5519 }
5520 else
5521 {
5522 regcache_cooked_read_unsigned (regs, regno, &ret);
5523
5524 displaced_debug_printf ("read r%d value %.8lx",
5525 regno, (unsigned long) ret);
5526
5527 return ret;
5528 }
5529 }
5530
5531 static int
5532 displaced_in_arm_mode (struct regcache *regs)
5533 {
5534 ULONGEST ps;
5535 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5536
5537 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5538
5539 return (ps & t_bit) == 0;
5540 }
5541
5542 /* Write to the PC as from a branch instruction. */
5543
5544 static void
5545 branch_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5546 ULONGEST val)
5547 {
5548 if (!dsc->is_thumb)
5549 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5550 architecture versions < 6. */
5551 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5552 val & ~(ULONGEST) 0x3);
5553 else
5554 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5555 val & ~(ULONGEST) 0x1);
5556 }
5557
5558 /* Write to the PC as from a branch-exchange instruction. */
5559
5560 static void
5561 bx_write_pc (struct regcache *regs, ULONGEST val)
5562 {
5563 ULONGEST ps;
5564 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5565
5566 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5567
5568 if ((val & 1) == 1)
5569 {
5570 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5571 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5572 }
5573 else if ((val & 2) == 0)
5574 {
5575 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5576 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5577 }
5578 else
5579 {
5580 /* Unpredictable behavior. Try to do something sensible (switch to ARM
5581 mode, align dest to 4 bytes). */
5582 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5583 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5584 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5585 }
5586 }
5587
5588 /* Write to the PC as if from a load instruction. */
5589
5590 static void
5591 load_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5592 ULONGEST val)
5593 {
5594 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5595 bx_write_pc (regs, val);
5596 else
5597 branch_write_pc (regs, dsc, val);
5598 }
5599
5600 /* Write to the PC as if from an ALU instruction. */
5601
5602 static void
5603 alu_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5604 ULONGEST val)
5605 {
5606 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5607 bx_write_pc (regs, val);
5608 else
5609 branch_write_pc (regs, dsc, val);
5610 }
5611
5612 /* Helper for writing to registers for displaced stepping. Writing to the PC
5613 has a varying effects depending on the instruction which does the write:
5614 this is controlled by the WRITE_PC argument. */
5615
5616 void
5617 displaced_write_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5618 int regno, ULONGEST val, enum pc_write_style write_pc)
5619 {
5620 if (regno == ARM_PC_REGNUM)
5621 {
5622 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val);
5623
5624 switch (write_pc)
5625 {
5626 case BRANCH_WRITE_PC:
5627 branch_write_pc (regs, dsc, val);
5628 break;
5629
5630 case BX_WRITE_PC:
5631 bx_write_pc (regs, val);
5632 break;
5633
5634 case LOAD_WRITE_PC:
5635 load_write_pc (regs, dsc, val);
5636 break;
5637
5638 case ALU_WRITE_PC:
5639 alu_write_pc (regs, dsc, val);
5640 break;
5641
5642 case CANNOT_WRITE_PC:
5643 warning (_("Instruction wrote to PC in an unexpected way when "
5644 "single-stepping"));
5645 break;
5646
5647 default:
5648 internal_error (_("Invalid argument to displaced_write_reg"));
5649 }
5650
5651 dsc->wrote_to_pc = 1;
5652 }
5653 else
5654 {
5655 displaced_debug_printf ("writing r%d value %.8lx",
5656 regno, (unsigned long) val);
5657 regcache_cooked_write_unsigned (regs, regno, val);
5658 }
5659 }
5660
5661 /* This function is used to concisely determine if an instruction INSN
5662 references PC. Register fields of interest in INSN should have the
5663 corresponding fields of BITMASK set to 0b1111. The function
5664 returns return 1 if any of these fields in INSN reference the PC
5665 (also 0b1111, r15), else it returns 0. */
5666
5667 static int
5668 insn_references_pc (uint32_t insn, uint32_t bitmask)
5669 {
5670 uint32_t lowbit = 1;
5671
5672 while (bitmask != 0)
5673 {
5674 uint32_t mask;
5675
5676 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5677 ;
5678
5679 if (!lowbit)
5680 break;
5681
5682 mask = lowbit * 0xf;
5683
5684 if ((insn & mask) == mask)
5685 return 1;
5686
5687 bitmask &= ~mask;
5688 }
5689
5690 return 0;
5691 }
5692
5693 /* The simplest copy function. Many instructions have the same effect no
5694 matter what address they are executed at: in those cases, use this. */
5695
5696 static int
5697 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn, const char *iname,
5698 arm_displaced_step_copy_insn_closure *dsc)
5699 {
5700 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified",
5701 (unsigned long) insn, iname);
5702
5703 dsc->modinsn[0] = insn;
5704
5705 return 0;
5706 }
5707
5708 static int
5709 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5710 uint16_t insn2, const char *iname,
5711 arm_displaced_step_copy_insn_closure *dsc)
5712 {
5713 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' "
5714 "unmodified", insn1, insn2, iname);
5715
5716 dsc->modinsn[0] = insn1;
5717 dsc->modinsn[1] = insn2;
5718 dsc->numinsns = 2;
5719
5720 return 0;
5721 }
5722
5723 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5724 modification. */
5725 static int
5726 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
5727 const char *iname,
5728 arm_displaced_step_copy_insn_closure *dsc)
5729 {
5730 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified",
5731 insn, iname);
5732
5733 dsc->modinsn[0] = insn;
5734
5735 return 0;
5736 }
5737
5738 /* Preload instructions with immediate offset. */
5739
5740 static void
5741 cleanup_preload (struct gdbarch *gdbarch, regcache *regs,
5742 arm_displaced_step_copy_insn_closure *dsc)
5743 {
5744 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5745 if (!dsc->u.preload.immed)
5746 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5747 }
5748
5749 static void
5750 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5751 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn)
5752 {
5753 ULONGEST rn_val;
5754 /* Preload instructions:
5755
5756 {pli/pld} [rn, #+/-imm]
5757 ->
5758 {pli/pld} [r0, #+/-imm]. */
5759
5760 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5761 rn_val = displaced_read_reg (regs, dsc, rn);
5762 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5763 dsc->u.preload.immed = 1;
5764
5765 dsc->cleanup = &cleanup_preload;
5766 }
5767
5768 static int
5769 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5770 arm_displaced_step_copy_insn_closure *dsc)
5771 {
5772 unsigned int rn = bits (insn, 16, 19);
5773
5774 if (!insn_references_pc (insn, 0x000f0000ul))
5775 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5776
5777 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn);
5778
5779 dsc->modinsn[0] = insn & 0xfff0ffff;
5780
5781 install_preload (gdbarch, regs, dsc, rn);
5782
5783 return 0;
5784 }
5785
5786 static int
5787 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5788 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5789 {
5790 unsigned int rn = bits (insn1, 0, 3);
5791 unsigned int u_bit = bit (insn1, 7);
5792 int imm12 = bits (insn2, 0, 11);
5793 ULONGEST pc_val;
5794
5795 if (rn != ARM_PC_REGNUM)
5796 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5797
5798 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5799 PLD (literal) Encoding T1. */
5800 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x",
5801 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5802 imm12);
5803
5804 if (!u_bit)
5805 imm12 = -1 * imm12;
5806
5807 /* Rewrite instruction {pli/pld} PC imm12 into:
5808 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5809
5810 {pli/pld} [r0, r1]
5811
5812 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5813
5814 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5815 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5816
5817 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5818
5819 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5820 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5821 dsc->u.preload.immed = 0;
5822
5823 /* {pli/pld} [r0, r1] */
5824 dsc->modinsn[0] = insn1 & 0xfff0;
5825 dsc->modinsn[1] = 0xf001;
5826 dsc->numinsns = 2;
5827
5828 dsc->cleanup = &cleanup_preload;
5829 return 0;
5830 }
5831
5832 /* Preload instructions with register offset. */
5833
5834 static void
5835 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5836 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn,
5837 unsigned int rm)
5838 {
5839 ULONGEST rn_val, rm_val;
5840
5841 /* Preload register-offset instructions:
5842
5843 {pli/pld} [rn, rm {, shift}]
5844 ->
5845 {pli/pld} [r0, r1 {, shift}]. */
5846
5847 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5848 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5849 rn_val = displaced_read_reg (regs, dsc, rn);
5850 rm_val = displaced_read_reg (regs, dsc, rm);
5851 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5852 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5853 dsc->u.preload.immed = 0;
5854
5855 dsc->cleanup = &cleanup_preload;
5856 }
5857
5858 static int
5859 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5860 struct regcache *regs,
5861 arm_displaced_step_copy_insn_closure *dsc)
5862 {
5863 unsigned int rn = bits (insn, 16, 19);
5864 unsigned int rm = bits (insn, 0, 3);
5865
5866
5867 if (!insn_references_pc (insn, 0x000f000ful))
5868 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5869
5870 displaced_debug_printf ("copying preload insn %.8lx",
5871 (unsigned long) insn);
5872
5873 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5874
5875 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5876 return 0;
5877 }
5878
5879 /* Copy/cleanup coprocessor load and store instructions. */
5880
5881 static void
5882 cleanup_copro_load_store (struct gdbarch *gdbarch,
5883 struct regcache *regs,
5884 arm_displaced_step_copy_insn_closure *dsc)
5885 {
5886 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5887
5888 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5889
5890 if (dsc->u.ldst.writeback)
5891 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5892 }
5893
5894 static void
5895 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5896 arm_displaced_step_copy_insn_closure *dsc,
5897 int writeback, unsigned int rn)
5898 {
5899 ULONGEST rn_val;
5900
5901 /* Coprocessor load/store instructions:
5902
5903 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5904 ->
5905 {stc/stc2} [r0, #+/-imm].
5906
5907 ldc/ldc2 are handled identically. */
5908
5909 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5910 rn_val = displaced_read_reg (regs, dsc, rn);
5911 /* PC should be 4-byte aligned. */
5912 rn_val = rn_val & 0xfffffffc;
5913 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5914
5915 dsc->u.ldst.writeback = writeback;
5916 dsc->u.ldst.rn = rn;
5917
5918 dsc->cleanup = &cleanup_copro_load_store;
5919 }
5920
5921 static int
5922 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5923 struct regcache *regs,
5924 arm_displaced_step_copy_insn_closure *dsc)
5925 {
5926 unsigned int rn = bits (insn, 16, 19);
5927
5928 if (!insn_references_pc (insn, 0x000f0000ul))
5929 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5930
5931 displaced_debug_printf ("copying coprocessor load/store insn %.8lx",
5932 (unsigned long) insn);
5933
5934 dsc->modinsn[0] = insn & 0xfff0ffff;
5935
5936 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5937
5938 return 0;
5939 }
5940
5941 static int
5942 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5943 uint16_t insn2, struct regcache *regs,
5944 arm_displaced_step_copy_insn_closure *dsc)
5945 {
5946 unsigned int rn = bits (insn1, 0, 3);
5947
5948 if (rn != ARM_PC_REGNUM)
5949 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5950 "copro load/store", dsc);
5951
5952 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x",
5953 insn1, insn2);
5954
5955 dsc->modinsn[0] = insn1 & 0xfff0;
5956 dsc->modinsn[1] = insn2;
5957 dsc->numinsns = 2;
5958
5959 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5960 doesn't support writeback, so pass 0. */
5961 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5962
5963 return 0;
5964 }
5965
5966 /* Clean up branch instructions (actually perform the branch, by setting
5967 PC). */
5968
5969 static void
5970 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5971 arm_displaced_step_copy_insn_closure *dsc)
5972 {
5973 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5974 int branch_taken = condition_true (dsc->u.branch.cond, status);
5975 enum pc_write_style write_pc = dsc->u.branch.exchange
5976 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5977
5978 if (!branch_taken)
5979 return;
5980
5981 if (dsc->u.branch.link)
5982 {
5983 /* The value of LR should be the next insn of current one. In order
5984 not to confuse logic handling later insn `bx lr', if current insn mode
5985 is Thumb, the bit 0 of LR value should be set to 1. */
5986 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5987
5988 if (dsc->is_thumb)
5989 next_insn_addr |= 0x1;
5990
5991 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5992 CANNOT_WRITE_PC);
5993 }
5994
5995 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5996 }
5997
5998 /* Copy B/BL/BLX instructions with immediate destinations. */
5999
6000 static void
6001 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6002 arm_displaced_step_copy_insn_closure *dsc,
6003 unsigned int cond, int exchange, int link, long offset)
6004 {
6005 /* Implement "BL<cond> <label>" as:
6006
6007 Preparation: cond <- instruction condition
6008 Insn: mov r0, r0 (nop)
6009 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6010
6011 B<cond> similar, but don't set r14 in cleanup. */
6012
6013 dsc->u.branch.cond = cond;
6014 dsc->u.branch.link = link;
6015 dsc->u.branch.exchange = exchange;
6016
6017 dsc->u.branch.dest = dsc->insn_addr;
6018 if (link && exchange)
6019 /* For BLX, offset is computed from the Align (PC, 4). */
6020 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6021
6022 if (dsc->is_thumb)
6023 dsc->u.branch.dest += 4 + offset;
6024 else
6025 dsc->u.branch.dest += 8 + offset;
6026
6027 dsc->cleanup = &cleanup_branch;
6028 }
6029 static int
6030 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6031 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6032 {
6033 unsigned int cond = bits (insn, 28, 31);
6034 int exchange = (cond == 0xf);
6035 int link = exchange || bit (insn, 24);
6036 long offset;
6037
6038 displaced_debug_printf ("copying %s immediate insn %.8lx",
6039 (exchange) ? "blx" : (link) ? "bl" : "b",
6040 (unsigned long) insn);
6041 if (exchange)
6042 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6043 then arrange the switch into Thumb mode. */
6044 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6045 else
6046 offset = bits (insn, 0, 23) << 2;
6047
6048 if (bit (offset, 25))
6049 offset = offset | ~0x3ffffff;
6050
6051 dsc->modinsn[0] = ARM_NOP;
6052
6053 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6054 return 0;
6055 }
6056
6057 static int
6058 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6059 uint16_t insn2, struct regcache *regs,
6060 arm_displaced_step_copy_insn_closure *dsc)
6061 {
6062 int link = bit (insn2, 14);
6063 int exchange = link && !bit (insn2, 12);
6064 int cond = INST_AL;
6065 long offset = 0;
6066 int j1 = bit (insn2, 13);
6067 int j2 = bit (insn2, 11);
6068 int s = sbits (insn1, 10, 10);
6069 int i1 = !(j1 ^ bit (insn1, 10));
6070 int i2 = !(j2 ^ bit (insn1, 10));
6071
6072 if (!link && !exchange) /* B */
6073 {
6074 offset = (bits (insn2, 0, 10) << 1);
6075 if (bit (insn2, 12)) /* Encoding T4 */
6076 {
6077 offset |= (bits (insn1, 0, 9) << 12)
6078 | (i2 << 22)
6079 | (i1 << 23)
6080 | (s << 24);
6081 cond = INST_AL;
6082 }
6083 else /* Encoding T3 */
6084 {
6085 offset |= (bits (insn1, 0, 5) << 12)
6086 | (j1 << 18)
6087 | (j2 << 19)
6088 | (s << 20);
6089 cond = bits (insn1, 6, 9);
6090 }
6091 }
6092 else
6093 {
6094 offset = (bits (insn1, 0, 9) << 12);
6095 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6096 offset |= exchange ?
6097 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6098 }
6099
6100 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx",
6101 link ? (exchange) ? "blx" : "bl" : "b",
6102 insn1, insn2, offset);
6103
6104 dsc->modinsn[0] = THUMB_NOP;
6105
6106 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6107 return 0;
6108 }
6109
6110 /* Copy B Thumb instructions. */
6111 static int
6112 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
6113 arm_displaced_step_copy_insn_closure *dsc)
6114 {
6115 unsigned int cond = 0;
6116 int offset = 0;
6117 unsigned short bit_12_15 = bits (insn, 12, 15);
6118 CORE_ADDR from = dsc->insn_addr;
6119
6120 if (bit_12_15 == 0xd)
6121 {
6122 /* offset = SignExtend (imm8:0, 32) */
6123 offset = sbits ((insn << 1), 0, 8);
6124 cond = bits (insn, 8, 11);
6125 }
6126 else if (bit_12_15 == 0xe) /* Encoding T2 */
6127 {
6128 offset = sbits ((insn << 1), 0, 11);
6129 cond = INST_AL;
6130 }
6131
6132 displaced_debug_printf ("copying b immediate insn %.4x with offset %d",
6133 insn, offset);
6134
6135 dsc->u.branch.cond = cond;
6136 dsc->u.branch.link = 0;
6137 dsc->u.branch.exchange = 0;
6138 dsc->u.branch.dest = from + 4 + offset;
6139
6140 dsc->modinsn[0] = THUMB_NOP;
6141
6142 dsc->cleanup = &cleanup_branch;
6143
6144 return 0;
6145 }
6146
6147 /* Copy BX/BLX with register-specified destinations. */
6148
6149 static void
6150 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6151 arm_displaced_step_copy_insn_closure *dsc, int link,
6152 unsigned int cond, unsigned int rm)
6153 {
6154 /* Implement {BX,BLX}<cond> <reg>" as:
6155
6156 Preparation: cond <- instruction condition
6157 Insn: mov r0, r0 (nop)
6158 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6159
6160 Don't set r14 in cleanup for BX. */
6161
6162 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6163
6164 dsc->u.branch.cond = cond;
6165 dsc->u.branch.link = link;
6166
6167 dsc->u.branch.exchange = 1;
6168
6169 dsc->cleanup = &cleanup_branch;
6170 }
6171
6172 static int
6173 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6174 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6175 {
6176 unsigned int cond = bits (insn, 28, 31);
6177 /* BX: x12xxx1x
6178 BLX: x12xxx3x. */
6179 int link = bit (insn, 5);
6180 unsigned int rm = bits (insn, 0, 3);
6181
6182 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn);
6183
6184 dsc->modinsn[0] = ARM_NOP;
6185
6186 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6187 return 0;
6188 }
6189
6190 static int
6191 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6192 struct regcache *regs,
6193 arm_displaced_step_copy_insn_closure *dsc)
6194 {
6195 int link = bit (insn, 7);
6196 unsigned int rm = bits (insn, 3, 6);
6197
6198 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn);
6199
6200 dsc->modinsn[0] = THUMB_NOP;
6201
6202 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6203
6204 return 0;
6205 }
6206
6207
6208 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6209
6210 static void
6211 cleanup_alu_imm (struct gdbarch *gdbarch,
6212 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6213 {
6214 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6215 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6216 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6217 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6218 }
6219
6220 static int
6221 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6222 arm_displaced_step_copy_insn_closure *dsc)
6223 {
6224 unsigned int rn = bits (insn, 16, 19);
6225 unsigned int rd = bits (insn, 12, 15);
6226 unsigned int op = bits (insn, 21, 24);
6227 int is_mov = (op == 0xd);
6228 ULONGEST rd_val, rn_val;
6229
6230 if (!insn_references_pc (insn, 0x000ff000ul))
6231 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6232
6233 displaced_debug_printf ("copying immediate %s insn %.8lx",
6234 is_mov ? "move" : "ALU",
6235 (unsigned long) insn);
6236
6237 /* Instruction is of form:
6238
6239 <op><cond> rd, [rn,] #imm
6240
6241 Rewrite as:
6242
6243 Preparation: tmp1, tmp2 <- r0, r1;
6244 r0, r1 <- rd, rn
6245 Insn: <op><cond> r0, r1, #imm
6246 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6247 */
6248
6249 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6250 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6251 rn_val = displaced_read_reg (regs, dsc, rn);
6252 rd_val = displaced_read_reg (regs, dsc, rd);
6253 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6254 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6255 dsc->rd = rd;
6256
6257 if (is_mov)
6258 dsc->modinsn[0] = insn & 0xfff00fff;
6259 else
6260 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6261
6262 dsc->cleanup = &cleanup_alu_imm;
6263
6264 return 0;
6265 }
6266
6267 static int
6268 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6269 uint16_t insn2, struct regcache *regs,
6270 arm_displaced_step_copy_insn_closure *dsc)
6271 {
6272 unsigned int op = bits (insn1, 5, 8);
6273 unsigned int rn, rm, rd;
6274 ULONGEST rd_val, rn_val;
6275
6276 rn = bits (insn1, 0, 3); /* Rn */
6277 rm = bits (insn2, 0, 3); /* Rm */
6278 rd = bits (insn2, 8, 11); /* Rd */
6279
6280 /* This routine is only called for instruction MOV. */
6281 gdb_assert (op == 0x2 && rn == 0xf);
6282
6283 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6284 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6285
6286 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1, insn2);
6287
6288 /* Instruction is of form:
6289
6290 <op><cond> rd, [rn,] #imm
6291
6292 Rewrite as:
6293
6294 Preparation: tmp1, tmp2 <- r0, r1;
6295 r0, r1 <- rd, rn
6296 Insn: <op><cond> r0, r1, #imm
6297 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6298 */
6299
6300 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6301 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6302 rn_val = displaced_read_reg (regs, dsc, rn);
6303 rd_val = displaced_read_reg (regs, dsc, rd);
6304 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6305 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6306 dsc->rd = rd;
6307
6308 dsc->modinsn[0] = insn1;
6309 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6310 dsc->numinsns = 2;
6311
6312 dsc->cleanup = &cleanup_alu_imm;
6313
6314 return 0;
6315 }
6316
6317 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6318
6319 static void
6320 cleanup_alu_reg (struct gdbarch *gdbarch,
6321 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6322 {
6323 ULONGEST rd_val;
6324 int i;
6325
6326 rd_val = displaced_read_reg (regs, dsc, 0);
6327
6328 for (i = 0; i < 3; i++)
6329 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6330
6331 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6332 }
6333
6334 static void
6335 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6336 arm_displaced_step_copy_insn_closure *dsc,
6337 unsigned int rd, unsigned int rn, unsigned int rm)
6338 {
6339 ULONGEST rd_val, rn_val, rm_val;
6340
6341 /* Instruction is of form:
6342
6343 <op><cond> rd, [rn,] rm [, <shift>]
6344
6345 Rewrite as:
6346
6347 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6348 r0, r1, r2 <- rd, rn, rm
6349 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
6350 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6351 */
6352
6353 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6354 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6355 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6356 rd_val = displaced_read_reg (regs, dsc, rd);
6357 rn_val = displaced_read_reg (regs, dsc, rn);
6358 rm_val = displaced_read_reg (regs, dsc, rm);
6359 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6360 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6361 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6362 dsc->rd = rd;
6363
6364 dsc->cleanup = &cleanup_alu_reg;
6365 }
6366
6367 static int
6368 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6369 arm_displaced_step_copy_insn_closure *dsc)
6370 {
6371 unsigned int op = bits (insn, 21, 24);
6372 int is_mov = (op == 0xd);
6373
6374 if (!insn_references_pc (insn, 0x000ff00ful))
6375 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6376
6377 displaced_debug_printf ("copying reg %s insn %.8lx",
6378 is_mov ? "move" : "ALU", (unsigned long) insn);
6379
6380 if (is_mov)
6381 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6382 else
6383 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6384
6385 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6386 bits (insn, 0, 3));
6387 return 0;
6388 }
6389
6390 static int
6391 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6392 struct regcache *regs,
6393 arm_displaced_step_copy_insn_closure *dsc)
6394 {
6395 unsigned rm, rd;
6396
6397 rm = bits (insn, 3, 6);
6398 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6399
6400 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
6401 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6402
6403 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn);
6404
6405 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
6406
6407 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
6408
6409 return 0;
6410 }
6411
6412 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6413
6414 static void
6415 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6416 struct regcache *regs,
6417 arm_displaced_step_copy_insn_closure *dsc)
6418 {
6419 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6420 int i;
6421
6422 for (i = 0; i < 4; i++)
6423 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6424
6425 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6426 }
6427
6428 static void
6429 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6430 arm_displaced_step_copy_insn_closure *dsc,
6431 unsigned int rd, unsigned int rn, unsigned int rm,
6432 unsigned rs)
6433 {
6434 int i;
6435 ULONGEST rd_val, rn_val, rm_val, rs_val;
6436
6437 /* Instruction is of form:
6438
6439 <op><cond> rd, [rn,] rm, <shift> rs
6440
6441 Rewrite as:
6442
6443 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6444 r0, r1, r2, r3 <- rd, rn, rm, rs
6445 Insn: <op><cond> r0, r1, r2, <shift> r3
6446 Cleanup: tmp5 <- r0
6447 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6448 rd <- tmp5
6449 */
6450
6451 for (i = 0; i < 4; i++)
6452 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6453
6454 rd_val = displaced_read_reg (regs, dsc, rd);
6455 rn_val = displaced_read_reg (regs, dsc, rn);
6456 rm_val = displaced_read_reg (regs, dsc, rm);
6457 rs_val = displaced_read_reg (regs, dsc, rs);
6458 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6459 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6460 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6461 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6462 dsc->rd = rd;
6463 dsc->cleanup = &cleanup_alu_shifted_reg;
6464 }
6465
6466 static int
6467 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6468 struct regcache *regs,
6469 arm_displaced_step_copy_insn_closure *dsc)
6470 {
6471 unsigned int op = bits (insn, 21, 24);
6472 int is_mov = (op == 0xd);
6473 unsigned int rd, rn, rm, rs;
6474
6475 if (!insn_references_pc (insn, 0x000fff0ful))
6476 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6477
6478 displaced_debug_printf ("copying shifted reg %s insn %.8lx",
6479 is_mov ? "move" : "ALU",
6480 (unsigned long) insn);
6481
6482 rn = bits (insn, 16, 19);
6483 rm = bits (insn, 0, 3);
6484 rs = bits (insn, 8, 11);
6485 rd = bits (insn, 12, 15);
6486
6487 if (is_mov)
6488 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6489 else
6490 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6491
6492 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6493
6494 return 0;
6495 }
6496
6497 /* Clean up load instructions. */
6498
6499 static void
6500 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6501 arm_displaced_step_copy_insn_closure *dsc)
6502 {
6503 ULONGEST rt_val, rt_val2 = 0, rn_val;
6504
6505 rt_val = displaced_read_reg (regs, dsc, 0);
6506 if (dsc->u.ldst.xfersize == 8)
6507 rt_val2 = displaced_read_reg (regs, dsc, 1);
6508 rn_val = displaced_read_reg (regs, dsc, 2);
6509
6510 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6511 if (dsc->u.ldst.xfersize > 4)
6512 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6513 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6514 if (!dsc->u.ldst.immed)
6515 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6516
6517 /* Handle register writeback. */
6518 if (dsc->u.ldst.writeback)
6519 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6520 /* Put result in right place. */
6521 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6522 if (dsc->u.ldst.xfersize == 8)
6523 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6524 }
6525
6526 /* Clean up store instructions. */
6527
6528 static void
6529 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6530 arm_displaced_step_copy_insn_closure *dsc)
6531 {
6532 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6533
6534 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6535 if (dsc->u.ldst.xfersize > 4)
6536 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6537 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6538 if (!dsc->u.ldst.immed)
6539 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6540 if (!dsc->u.ldst.restore_r4)
6541 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6542
6543 /* Writeback. */
6544 if (dsc->u.ldst.writeback)
6545 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6546 }
6547
6548 /* Copy "extra" load/store instructions. These are halfword/doubleword
6549 transfers, which have a different encoding to byte/word transfers. */
6550
6551 static int
6552 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
6553 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6554 {
6555 unsigned int op1 = bits (insn, 20, 24);
6556 unsigned int op2 = bits (insn, 5, 6);
6557 unsigned int rt = bits (insn, 12, 15);
6558 unsigned int rn = bits (insn, 16, 19);
6559 unsigned int rm = bits (insn, 0, 3);
6560 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6561 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6562 int immed = (op1 & 0x4) != 0;
6563 int opcode;
6564 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6565
6566 if (!insn_references_pc (insn, 0x000ff00ful))
6567 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6568
6569 displaced_debug_printf ("copying %sextra load/store insn %.8lx",
6570 unprivileged ? "unprivileged " : "",
6571 (unsigned long) insn);
6572
6573 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6574
6575 if (opcode < 0)
6576 internal_error (_("copy_extra_ld_st: instruction decode error"));
6577
6578 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6579 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6580 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6581 if (!immed)
6582 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6583
6584 rt_val = displaced_read_reg (regs, dsc, rt);
6585 if (bytesize[opcode] == 8)
6586 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6587 rn_val = displaced_read_reg (regs, dsc, rn);
6588 if (!immed)
6589 rm_val = displaced_read_reg (regs, dsc, rm);
6590
6591 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6592 if (bytesize[opcode] == 8)
6593 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6594 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6595 if (!immed)
6596 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6597
6598 dsc->rd = rt;
6599 dsc->u.ldst.xfersize = bytesize[opcode];
6600 dsc->u.ldst.rn = rn;
6601 dsc->u.ldst.immed = immed;
6602 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6603 dsc->u.ldst.restore_r4 = 0;
6604
6605 if (immed)
6606 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6607 ->
6608 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6609 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6610 else
6611 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6612 ->
6613 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6614 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6615
6616 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6617
6618 return 0;
6619 }
6620
6621 /* Copy byte/half word/word loads and stores. */
6622
6623 static void
6624 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6625 arm_displaced_step_copy_insn_closure *dsc, int load,
6626 int immed, int writeback, int size, int usermode,
6627 int rt, int rm, int rn)
6628 {
6629 ULONGEST rt_val, rn_val, rm_val = 0;
6630
6631 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6632 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6633 if (!immed)
6634 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6635 if (!load)
6636 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6637
6638 rt_val = displaced_read_reg (regs, dsc, rt);
6639 rn_val = displaced_read_reg (regs, dsc, rn);
6640 if (!immed)
6641 rm_val = displaced_read_reg (regs, dsc, rm);
6642
6643 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6644 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6645 if (!immed)
6646 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6647 dsc->rd = rt;
6648 dsc->u.ldst.xfersize = size;
6649 dsc->u.ldst.rn = rn;
6650 dsc->u.ldst.immed = immed;
6651 dsc->u.ldst.writeback = writeback;
6652
6653 /* To write PC we can do:
6654
6655 Before this sequence of instructions:
6656 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6657 r2 is the Rn value got from displaced_read_reg.
6658
6659 Insn1: push {pc} Write address of STR instruction + offset on stack
6660 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6661 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6662 = addr(Insn1) + offset - addr(Insn3) - 8
6663 = offset - 16
6664 Insn4: add r4, r4, #8 r4 = offset - 8
6665 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6666 = from + offset
6667 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6668
6669 Otherwise we don't know what value to write for PC, since the offset is
6670 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6671 of this can be found in Section "Saving from r15" in
6672 https://developer.arm.com/documentation/dui0204/g/ */
6673
6674 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6675 }
6676
6677
6678 static int
6679 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6680 uint16_t insn2, struct regcache *regs,
6681 arm_displaced_step_copy_insn_closure *dsc, int size)
6682 {
6683 unsigned int u_bit = bit (insn1, 7);
6684 unsigned int rt = bits (insn2, 12, 15);
6685 int imm12 = bits (insn2, 0, 11);
6686 ULONGEST pc_val;
6687
6688 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x",
6689 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6690 imm12);
6691
6692 if (!u_bit)
6693 imm12 = -1 * imm12;
6694
6695 /* Rewrite instruction LDR Rt imm12 into:
6696
6697 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6698
6699 LDR R0, R2, R3,
6700
6701 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6702
6703
6704 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6705 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6706 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6707
6708 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6709
6710 pc_val = pc_val & 0xfffffffc;
6711
6712 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6713 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6714
6715 dsc->rd = rt;
6716
6717 dsc->u.ldst.xfersize = size;
6718 dsc->u.ldst.immed = 0;
6719 dsc->u.ldst.writeback = 0;
6720 dsc->u.ldst.restore_r4 = 0;
6721
6722 /* LDR R0, R2, R3 */
6723 dsc->modinsn[0] = 0xf852;
6724 dsc->modinsn[1] = 0x3;
6725 dsc->numinsns = 2;
6726
6727 dsc->cleanup = &cleanup_load;
6728
6729 return 0;
6730 }
6731
6732 static int
6733 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6734 uint16_t insn2, struct regcache *regs,
6735 arm_displaced_step_copy_insn_closure *dsc,
6736 int writeback, int immed)
6737 {
6738 unsigned int rt = bits (insn2, 12, 15);
6739 unsigned int rn = bits (insn1, 0, 3);
6740 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6741 /* In LDR (register), there is also a register Rm, which is not allowed to
6742 be PC, so we don't have to check it. */
6743
6744 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6745 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6746 dsc);
6747
6748 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x",
6749 rt, rn, insn1, insn2);
6750
6751 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6752 0, rt, rm, rn);
6753
6754 dsc->u.ldst.restore_r4 = 0;
6755
6756 if (immed)
6757 /* ldr[b]<cond> rt, [rn, #imm], etc.
6758 ->
6759 ldr[b]<cond> r0, [r2, #imm]. */
6760 {
6761 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6762 dsc->modinsn[1] = insn2 & 0x0fff;
6763 }
6764 else
6765 /* ldr[b]<cond> rt, [rn, rm], etc.
6766 ->
6767 ldr[b]<cond> r0, [r2, r3]. */
6768 {
6769 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6770 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6771 }
6772
6773 dsc->numinsns = 2;
6774
6775 return 0;
6776 }
6777
6778
6779 static int
6780 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6781 struct regcache *regs,
6782 arm_displaced_step_copy_insn_closure *dsc,
6783 int load, int size, int usermode)
6784 {
6785 int immed = !bit (insn, 25);
6786 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6787 unsigned int rt = bits (insn, 12, 15);
6788 unsigned int rn = bits (insn, 16, 19);
6789 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6790
6791 if (!insn_references_pc (insn, 0x000ff00ful))
6792 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6793
6794 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx",
6795 load ? (size == 1 ? "ldrb" : "ldr")
6796 : (size == 1 ? "strb" : "str"),
6797 usermode ? "t" : "",
6798 rt, rn,
6799 (unsigned long) insn);
6800
6801 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6802 usermode, rt, rm, rn);
6803
6804 if (load || rt != ARM_PC_REGNUM)
6805 {
6806 dsc->u.ldst.restore_r4 = 0;
6807
6808 if (immed)
6809 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6810 ->
6811 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6812 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6813 else
6814 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6815 ->
6816 {ldr,str}[b]<cond> r0, [r2, r3]. */
6817 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6818 }
6819 else
6820 {
6821 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6822 dsc->u.ldst.restore_r4 = 1;
6823 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6824 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6825 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6826 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6827 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6828
6829 /* As above. */
6830 if (immed)
6831 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6832 else
6833 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6834
6835 dsc->numinsns = 6;
6836 }
6837
6838 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6839
6840 return 0;
6841 }
6842
6843 /* Cleanup LDM instructions with fully-populated register list. This is an
6844 unfortunate corner case: it's impossible to implement correctly by modifying
6845 the instruction. The issue is as follows: we have an instruction,
6846
6847 ldm rN, {r0-r15}
6848
6849 which we must rewrite to avoid loading PC. A possible solution would be to
6850 do the load in two halves, something like (with suitable cleanup
6851 afterwards):
6852
6853 mov r8, rN
6854 ldm[id][ab] r8!, {r0-r7}
6855 str r7, <temp>
6856 ldm[id][ab] r8, {r7-r14}
6857 <bkpt>
6858
6859 but at present there's no suitable place for <temp>, since the scratch space
6860 is overwritten before the cleanup routine is called. For now, we simply
6861 emulate the instruction. */
6862
6863 static void
6864 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6865 arm_displaced_step_copy_insn_closure *dsc)
6866 {
6867 int inc = dsc->u.block.increment;
6868 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6869 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6870 uint32_t regmask = dsc->u.block.regmask;
6871 int regno = inc ? 0 : 15;
6872 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6873 int exception_return = dsc->u.block.load && dsc->u.block.user
6874 && (regmask & 0x8000) != 0;
6875 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6876 int do_transfer = condition_true (dsc->u.block.cond, status);
6877 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6878
6879 if (!do_transfer)
6880 return;
6881
6882 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6883 sensible we can do here. Complain loudly. */
6884 if (exception_return)
6885 error (_("Cannot single-step exception return"));
6886
6887 /* We don't handle any stores here for now. */
6888 gdb_assert (dsc->u.block.load != 0);
6889
6890 displaced_debug_printf ("emulating block transfer: %s %s %s",
6891 dsc->u.block.load ? "ldm" : "stm",
6892 dsc->u.block.increment ? "inc" : "dec",
6893 dsc->u.block.before ? "before" : "after");
6894
6895 while (regmask)
6896 {
6897 uint32_t memword;
6898
6899 if (inc)
6900 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6901 regno++;
6902 else
6903 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6904 regno--;
6905
6906 xfer_addr += bump_before;
6907
6908 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6909 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6910
6911 xfer_addr += bump_after;
6912
6913 regmask &= ~(1 << regno);
6914 }
6915
6916 if (dsc->u.block.writeback)
6917 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6918 CANNOT_WRITE_PC);
6919 }
6920
6921 /* Clean up an STM which included the PC in the register list. */
6922
6923 static void
6924 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6925 arm_displaced_step_copy_insn_closure *dsc)
6926 {
6927 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6928 int store_executed = condition_true (dsc->u.block.cond, status);
6929 CORE_ADDR pc_stored_at, transferred_regs
6930 = count_one_bits (dsc->u.block.regmask);
6931 CORE_ADDR stm_insn_addr;
6932 uint32_t pc_val;
6933 long offset;
6934 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6935
6936 /* If condition code fails, there's nothing else to do. */
6937 if (!store_executed)
6938 return;
6939
6940 if (dsc->u.block.increment)
6941 {
6942 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6943
6944 if (dsc->u.block.before)
6945 pc_stored_at += 4;
6946 }
6947 else
6948 {
6949 pc_stored_at = dsc->u.block.xfer_addr;
6950
6951 if (dsc->u.block.before)
6952 pc_stored_at -= 4;
6953 }
6954
6955 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6956 stm_insn_addr = dsc->scratch_base;
6957 offset = pc_val - stm_insn_addr;
6958
6959 displaced_debug_printf ("detected PC offset %.8lx for STM instruction",
6960 offset);
6961
6962 /* Rewrite the stored PC to the proper value for the non-displaced original
6963 instruction. */
6964 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6965 dsc->insn_addr + offset);
6966 }
6967
6968 /* Clean up an LDM which includes the PC in the register list. We clumped all
6969 the registers in the transferred list into a contiguous range r0...rX (to
6970 avoid loading PC directly and losing control of the debugged program), so we
6971 must undo that here. */
6972
6973 static void
6974 cleanup_block_load_pc (struct gdbarch *gdbarch,
6975 struct regcache *regs,
6976 arm_displaced_step_copy_insn_closure *dsc)
6977 {
6978 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6979 int load_executed = condition_true (dsc->u.block.cond, status);
6980 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6981 unsigned int regs_loaded = count_one_bits (mask);
6982 unsigned int num_to_shuffle = regs_loaded, clobbered;
6983
6984 /* The method employed here will fail if the register list is fully populated
6985 (we need to avoid loading PC directly). */
6986 gdb_assert (num_to_shuffle < 16);
6987
6988 if (!load_executed)
6989 return;
6990
6991 clobbered = (1 << num_to_shuffle) - 1;
6992
6993 while (num_to_shuffle > 0)
6994 {
6995 if ((mask & (1 << write_reg)) != 0)
6996 {
6997 unsigned int read_reg = num_to_shuffle - 1;
6998
6999 if (read_reg != write_reg)
7000 {
7001 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7002 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7003 displaced_debug_printf ("LDM: move loaded register r%d to r%d",
7004 read_reg, write_reg);
7005 }
7006 else
7007 displaced_debug_printf ("LDM: register r%d already in the right "
7008 "place", write_reg);
7009
7010 clobbered &= ~(1 << write_reg);
7011
7012 num_to_shuffle--;
7013 }
7014
7015 write_reg--;
7016 }
7017
7018 /* Restore any registers we scribbled over. */
7019 for (write_reg = 0; clobbered != 0; write_reg++)
7020 {
7021 if ((clobbered & (1 << write_reg)) != 0)
7022 {
7023 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7024 CANNOT_WRITE_PC);
7025 displaced_debug_printf ("LDM: restored clobbered register r%d",
7026 write_reg);
7027 clobbered &= ~(1 << write_reg);
7028 }
7029 }
7030
7031 /* Perform register writeback manually. */
7032 if (dsc->u.block.writeback)
7033 {
7034 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7035
7036 if (dsc->u.block.increment)
7037 new_rn_val += regs_loaded * 4;
7038 else
7039 new_rn_val -= regs_loaded * 4;
7040
7041 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7042 CANNOT_WRITE_PC);
7043 }
7044 }
7045
7046 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7047 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7048
7049 static int
7050 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7051 struct regcache *regs,
7052 arm_displaced_step_copy_insn_closure *dsc)
7053 {
7054 int load = bit (insn, 20);
7055 int user = bit (insn, 22);
7056 int increment = bit (insn, 23);
7057 int before = bit (insn, 24);
7058 int writeback = bit (insn, 21);
7059 int rn = bits (insn, 16, 19);
7060
7061 /* Block transfers which don't mention PC can be run directly
7062 out-of-line. */
7063 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7064 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7065
7066 if (rn == ARM_PC_REGNUM)
7067 {
7068 warning (_("displaced: Unpredictable LDM or STM with "
7069 "base register r15"));
7070 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7071 }
7072
7073 displaced_debug_printf ("copying block transfer insn %.8lx",
7074 (unsigned long) insn);
7075
7076 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7077 dsc->u.block.rn = rn;
7078
7079 dsc->u.block.load = load;
7080 dsc->u.block.user = user;
7081 dsc->u.block.increment = increment;
7082 dsc->u.block.before = before;
7083 dsc->u.block.writeback = writeback;
7084 dsc->u.block.cond = bits (insn, 28, 31);
7085
7086 dsc->u.block.regmask = insn & 0xffff;
7087
7088 if (load)
7089 {
7090 if ((insn & 0xffff) == 0xffff)
7091 {
7092 /* LDM with a fully-populated register list. This case is
7093 particularly tricky. Implement for now by fully emulating the
7094 instruction (which might not behave perfectly in all cases, but
7095 these instructions should be rare enough for that not to matter
7096 too much). */
7097 dsc->modinsn[0] = ARM_NOP;
7098
7099 dsc->cleanup = &cleanup_block_load_all;
7100 }
7101 else
7102 {
7103 /* LDM of a list of registers which includes PC. Implement by
7104 rewriting the list of registers to be transferred into a
7105 contiguous chunk r0...rX before doing the transfer, then shuffling
7106 registers into the correct places in the cleanup routine. */
7107 unsigned int regmask = insn & 0xffff;
7108 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
7109 unsigned int i;
7110
7111 for (i = 0; i < num_in_list; i++)
7112 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7113
7114 /* Writeback makes things complicated. We need to avoid clobbering
7115 the base register with one of the registers in our modified
7116 register list, but just using a different register can't work in
7117 all cases, e.g.:
7118
7119 ldm r14!, {r0-r13,pc}
7120
7121 which would need to be rewritten as:
7122
7123 ldm rN!, {r0-r14}
7124
7125 but that can't work, because there's no free register for N.
7126
7127 Solve this by turning off the writeback bit, and emulating
7128 writeback manually in the cleanup routine. */
7129
7130 if (writeback)
7131 insn &= ~(1 << 21);
7132
7133 new_regmask = (1 << num_in_list) - 1;
7134
7135 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
7136 "%.4x, modified list %.4x",
7137 rn, writeback ? "!" : "",
7138 (int) insn & 0xffff, new_regmask);
7139
7140 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7141
7142 dsc->cleanup = &cleanup_block_load_pc;
7143 }
7144 }
7145 else
7146 {
7147 /* STM of a list of registers which includes PC. Run the instruction
7148 as-is, but out of line: this will store the wrong value for the PC,
7149 so we must manually fix up the memory in the cleanup routine.
7150 Doing things this way has the advantage that we can auto-detect
7151 the offset of the PC write (which is architecture-dependent) in
7152 the cleanup routine. */
7153 dsc->modinsn[0] = insn;
7154
7155 dsc->cleanup = &cleanup_block_store_pc;
7156 }
7157
7158 return 0;
7159 }
7160
7161 static int
7162 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7163 struct regcache *regs,
7164 arm_displaced_step_copy_insn_closure *dsc)
7165 {
7166 int rn = bits (insn1, 0, 3);
7167 int load = bit (insn1, 4);
7168 int writeback = bit (insn1, 5);
7169
7170 /* Block transfers which don't mention PC can be run directly
7171 out-of-line. */
7172 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7173 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7174
7175 if (rn == ARM_PC_REGNUM)
7176 {
7177 warning (_("displaced: Unpredictable LDM or STM with "
7178 "base register r15"));
7179 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7180 "unpredictable ldm/stm", dsc);
7181 }
7182
7183 displaced_debug_printf ("copying block transfer insn %.4x%.4x",
7184 insn1, insn2);
7185
7186 /* Clear bit 13, since it should be always zero. */
7187 dsc->u.block.regmask = (insn2 & 0xdfff);
7188 dsc->u.block.rn = rn;
7189
7190 dsc->u.block.load = load;
7191 dsc->u.block.user = 0;
7192 dsc->u.block.increment = bit (insn1, 7);
7193 dsc->u.block.before = bit (insn1, 8);
7194 dsc->u.block.writeback = writeback;
7195 dsc->u.block.cond = INST_AL;
7196 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7197
7198 if (load)
7199 {
7200 if (dsc->u.block.regmask == 0xffff)
7201 {
7202 /* This branch is impossible to happen. */
7203 gdb_assert (0);
7204 }
7205 else
7206 {
7207 unsigned int regmask = dsc->u.block.regmask;
7208 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
7209 unsigned int i;
7210
7211 for (i = 0; i < num_in_list; i++)
7212 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7213
7214 if (writeback)
7215 insn1 &= ~(1 << 5);
7216
7217 new_regmask = (1 << num_in_list) - 1;
7218
7219 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
7220 "%.4x, modified list %.4x",
7221 rn, writeback ? "!" : "",
7222 (int) dsc->u.block.regmask, new_regmask);
7223
7224 dsc->modinsn[0] = insn1;
7225 dsc->modinsn[1] = (new_regmask & 0xffff);
7226 dsc->numinsns = 2;
7227
7228 dsc->cleanup = &cleanup_block_load_pc;
7229 }
7230 }
7231 else
7232 {
7233 dsc->modinsn[0] = insn1;
7234 dsc->modinsn[1] = insn2;
7235 dsc->numinsns = 2;
7236 dsc->cleanup = &cleanup_block_store_pc;
7237 }
7238 return 0;
7239 }
7240
7241 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
7242 This is used to avoid a dependency on BFD's bfd_endian enum. */
7243
7244 ULONGEST
7245 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
7246 int byte_order)
7247 {
7248 return read_memory_unsigned_integer (memaddr, len,
7249 (enum bfd_endian) byte_order);
7250 }
7251
7252 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
7253
7254 CORE_ADDR
7255 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
7256 CORE_ADDR val)
7257 {
7258 return gdbarch_addr_bits_remove
7259 (gdb::checked_static_cast<regcache *> (self->regcache)->arch (), val);
7260 }
7261
7262 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
7263
7264 static CORE_ADDR
7265 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
7266 {
7267 return 0;
7268 }
7269
7270 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
7271
7272 int
7273 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
7274 {
7275 return arm_is_thumb (gdb::checked_static_cast<regcache *> (self->regcache));
7276 }
7277
7278 /* single_step() is called just before we want to resume the inferior,
7279 if we want to single-step it but there is no hardware or kernel
7280 single-step support. We find the target of the coming instructions
7281 and breakpoint them. */
7282
7283 std::vector<CORE_ADDR>
7284 arm_software_single_step (struct regcache *regcache)
7285 {
7286 struct gdbarch *gdbarch = regcache->arch ();
7287 struct arm_get_next_pcs next_pcs_ctx;
7288
7289 arm_get_next_pcs_ctor (&next_pcs_ctx,
7290 &arm_get_next_pcs_ops,
7291 gdbarch_byte_order (gdbarch),
7292 gdbarch_byte_order_for_code (gdbarch),
7293 0,
7294 regcache);
7295
7296 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7297
7298 for (CORE_ADDR &pc_ref : next_pcs)
7299 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
7300
7301 return next_pcs;
7302 }
7303
7304 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7305 for Linux, where some SVC instructions must be treated specially. */
7306
7307 static void
7308 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7309 arm_displaced_step_copy_insn_closure *dsc)
7310 {
7311 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7312
7313 displaced_debug_printf ("cleanup for svc, resume at %.8lx",
7314 (unsigned long) resume_addr);
7315
7316 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7317 }
7318
7319
7320 /* Common copy routine for svc instruction. */
7321
7322 static int
7323 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7324 arm_displaced_step_copy_insn_closure *dsc)
7325 {
7326 /* Preparation: none.
7327 Insn: unmodified svc.
7328 Cleanup: pc <- insn_addr + insn_size. */
7329
7330 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7331 instruction. */
7332 dsc->wrote_to_pc = 1;
7333
7334 /* Allow OS-specific code to override SVC handling. */
7335 if (dsc->u.svc.copy_svc_os)
7336 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7337 else
7338 {
7339 dsc->cleanup = &cleanup_svc;
7340 return 0;
7341 }
7342 }
7343
7344 static int
7345 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7346 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7347 {
7348
7349 displaced_debug_printf ("copying svc insn %.8lx",
7350 (unsigned long) insn);
7351
7352 dsc->modinsn[0] = insn;
7353
7354 return install_svc (gdbarch, regs, dsc);
7355 }
7356
7357 static int
7358 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7359 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7360 {
7361
7362 displaced_debug_printf ("copying svc insn %.4x", insn);
7363
7364 dsc->modinsn[0] = insn;
7365
7366 return install_svc (gdbarch, regs, dsc);
7367 }
7368
7369 /* Copy undefined instructions. */
7370
7371 static int
7372 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7373 arm_displaced_step_copy_insn_closure *dsc)
7374 {
7375 displaced_debug_printf ("copying undefined insn %.8lx",
7376 (unsigned long) insn);
7377
7378 dsc->modinsn[0] = insn;
7379
7380 return 0;
7381 }
7382
7383 static int
7384 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7385 arm_displaced_step_copy_insn_closure *dsc)
7386 {
7387
7388 displaced_debug_printf ("copying undefined insn %.4x %.4x",
7389 (unsigned short) insn1, (unsigned short) insn2);
7390
7391 dsc->modinsn[0] = insn1;
7392 dsc->modinsn[1] = insn2;
7393 dsc->numinsns = 2;
7394
7395 return 0;
7396 }
7397
7398 /* Copy unpredictable instructions. */
7399
7400 static int
7401 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7402 arm_displaced_step_copy_insn_closure *dsc)
7403 {
7404 displaced_debug_printf ("copying unpredictable insn %.8lx",
7405 (unsigned long) insn);
7406
7407 dsc->modinsn[0] = insn;
7408
7409 return 0;
7410 }
7411
7412 /* The decode_* functions are instruction decoding helpers. They mostly follow
7413 the presentation in the ARM ARM. */
7414
7415 static int
7416 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7417 struct regcache *regs,
7418 arm_displaced_step_copy_insn_closure *dsc)
7419 {
7420 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7421 unsigned int rn = bits (insn, 16, 19);
7422
7423 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
7424 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7425 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
7426 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7427 else if ((op1 & 0x60) == 0x20)
7428 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7429 else if ((op1 & 0x71) == 0x40)
7430 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7431 dsc);
7432 else if ((op1 & 0x77) == 0x41)
7433 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7434 else if ((op1 & 0x77) == 0x45)
7435 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7436 else if ((op1 & 0x77) == 0x51)
7437 {
7438 if (rn != 0xf)
7439 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7440 else
7441 return arm_copy_unpred (gdbarch, insn, dsc);
7442 }
7443 else if ((op1 & 0x77) == 0x55)
7444 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7445 else if (op1 == 0x57)
7446 switch (op2)
7447 {
7448 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7449 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7450 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7451 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7452 default: return arm_copy_unpred (gdbarch, insn, dsc);
7453 }
7454 else if ((op1 & 0x63) == 0x43)
7455 return arm_copy_unpred (gdbarch, insn, dsc);
7456 else if ((op2 & 0x1) == 0x0)
7457 switch (op1 & ~0x80)
7458 {
7459 case 0x61:
7460 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7461 case 0x65:
7462 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7463 case 0x71: case 0x75:
7464 /* pld/pldw reg. */
7465 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7466 case 0x63: case 0x67: case 0x73: case 0x77:
7467 return arm_copy_unpred (gdbarch, insn, dsc);
7468 default:
7469 return arm_copy_undef (gdbarch, insn, dsc);
7470 }
7471 else
7472 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7473 }
7474
7475 static int
7476 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7477 struct regcache *regs,
7478 arm_displaced_step_copy_insn_closure *dsc)
7479 {
7480 if (bit (insn, 27) == 0)
7481 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7482 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7483 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7484 {
7485 case 0x0: case 0x2:
7486 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7487
7488 case 0x1: case 0x3:
7489 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7490
7491 case 0x4: case 0x5: case 0x6: case 0x7:
7492 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7493
7494 case 0x8:
7495 switch ((insn & 0xe00000) >> 21)
7496 {
7497 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7498 /* stc/stc2. */
7499 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7500
7501 case 0x2:
7502 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7503
7504 default:
7505 return arm_copy_undef (gdbarch, insn, dsc);
7506 }
7507
7508 case 0x9:
7509 {
7510 int rn_f = (bits (insn, 16, 19) == 0xf);
7511 switch ((insn & 0xe00000) >> 21)
7512 {
7513 case 0x1: case 0x3:
7514 /* ldc/ldc2 imm (undefined for rn == pc). */
7515 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7516 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7517
7518 case 0x2:
7519 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7520
7521 case 0x4: case 0x5: case 0x6: case 0x7:
7522 /* ldc/ldc2 lit (undefined for rn != pc). */
7523 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7524 : arm_copy_undef (gdbarch, insn, dsc);
7525
7526 default:
7527 return arm_copy_undef (gdbarch, insn, dsc);
7528 }
7529 }
7530
7531 case 0xa:
7532 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7533
7534 case 0xb:
7535 if (bits (insn, 16, 19) == 0xf)
7536 /* ldc/ldc2 lit. */
7537 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7538 else
7539 return arm_copy_undef (gdbarch, insn, dsc);
7540
7541 case 0xc:
7542 if (bit (insn, 4))
7543 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7544 else
7545 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7546
7547 case 0xd:
7548 if (bit (insn, 4))
7549 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7550 else
7551 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7552
7553 default:
7554 return arm_copy_undef (gdbarch, insn, dsc);
7555 }
7556 }
7557
7558 /* Decode miscellaneous instructions in dp/misc encoding space. */
7559
7560 static int
7561 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7562 struct regcache *regs,
7563 arm_displaced_step_copy_insn_closure *dsc)
7564 {
7565 unsigned int op2 = bits (insn, 4, 6);
7566 unsigned int op = bits (insn, 21, 22);
7567
7568 switch (op2)
7569 {
7570 case 0x0:
7571 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7572
7573 case 0x1:
7574 if (op == 0x1) /* bx. */
7575 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7576 else if (op == 0x3)
7577 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7578 else
7579 return arm_copy_undef (gdbarch, insn, dsc);
7580
7581 case 0x2:
7582 if (op == 0x1)
7583 /* Not really supported. */
7584 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7585 else
7586 return arm_copy_undef (gdbarch, insn, dsc);
7587
7588 case 0x3:
7589 if (op == 0x1)
7590 return arm_copy_bx_blx_reg (gdbarch, insn,
7591 regs, dsc); /* blx register. */
7592 else
7593 return arm_copy_undef (gdbarch, insn, dsc);
7594
7595 case 0x5:
7596 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7597
7598 case 0x7:
7599 if (op == 0x1)
7600 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7601 else if (op == 0x3)
7602 /* Not really supported. */
7603 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7604 [[fallthrough]];
7605
7606 default:
7607 return arm_copy_undef (gdbarch, insn, dsc);
7608 }
7609 }
7610
7611 static int
7612 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7613 struct regcache *regs,
7614 arm_displaced_step_copy_insn_closure *dsc)
7615 {
7616 if (bit (insn, 25))
7617 switch (bits (insn, 20, 24))
7618 {
7619 case 0x10:
7620 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7621
7622 case 0x14:
7623 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7624
7625 case 0x12: case 0x16:
7626 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7627
7628 default:
7629 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7630 }
7631 else
7632 {
7633 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7634
7635 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7636 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7637 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7638 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7639 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7640 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7641 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7642 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7643 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7644 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7645 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7646 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7647 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7648 /* 2nd arg means "unprivileged". */
7649 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7650 dsc);
7651 }
7652
7653 /* Should be unreachable. */
7654 return 1;
7655 }
7656
7657 static int
7658 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7659 struct regcache *regs,
7660 arm_displaced_step_copy_insn_closure *dsc)
7661 {
7662 int a = bit (insn, 25), b = bit (insn, 4);
7663 uint32_t op1 = bits (insn, 20, 24);
7664
7665 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7666 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7667 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7668 else if ((!a && (op1 & 0x17) == 0x02)
7669 || (a && (op1 & 0x17) == 0x02 && !b))
7670 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7671 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7672 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7673 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7674 else if ((!a && (op1 & 0x17) == 0x03)
7675 || (a && (op1 & 0x17) == 0x03 && !b))
7676 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7677 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7678 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7679 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7680 else if ((!a && (op1 & 0x17) == 0x06)
7681 || (a && (op1 & 0x17) == 0x06 && !b))
7682 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7683 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7684 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7685 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7686 else if ((!a && (op1 & 0x17) == 0x07)
7687 || (a && (op1 & 0x17) == 0x07 && !b))
7688 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7689
7690 /* Should be unreachable. */
7691 return 1;
7692 }
7693
7694 static int
7695 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7696 arm_displaced_step_copy_insn_closure *dsc)
7697 {
7698 switch (bits (insn, 20, 24))
7699 {
7700 case 0x00: case 0x01: case 0x02: case 0x03:
7701 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7702
7703 case 0x04: case 0x05: case 0x06: case 0x07:
7704 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7705
7706 case 0x08: case 0x09: case 0x0a: case 0x0b:
7707 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7708 return arm_copy_unmodified (gdbarch, insn,
7709 "decode/pack/unpack/saturate/reverse", dsc);
7710
7711 case 0x18:
7712 if (bits (insn, 5, 7) == 0) /* op2. */
7713 {
7714 if (bits (insn, 12, 15) == 0xf)
7715 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7716 else
7717 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7718 }
7719 else
7720 return arm_copy_undef (gdbarch, insn, dsc);
7721
7722 case 0x1a: case 0x1b:
7723 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7724 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7725 else
7726 return arm_copy_undef (gdbarch, insn, dsc);
7727
7728 case 0x1c: case 0x1d:
7729 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7730 {
7731 if (bits (insn, 0, 3) == 0xf)
7732 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7733 else
7734 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7735 }
7736 else
7737 return arm_copy_undef (gdbarch, insn, dsc);
7738
7739 case 0x1e: case 0x1f:
7740 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7741 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7742 else
7743 return arm_copy_undef (gdbarch, insn, dsc);
7744 }
7745
7746 /* Should be unreachable. */
7747 return 1;
7748 }
7749
7750 static int
7751 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7752 struct regcache *regs,
7753 arm_displaced_step_copy_insn_closure *dsc)
7754 {
7755 if (bit (insn, 25))
7756 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7757 else
7758 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7759 }
7760
7761 static int
7762 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7763 struct regcache *regs,
7764 arm_displaced_step_copy_insn_closure *dsc)
7765 {
7766 unsigned int opcode = bits (insn, 20, 24);
7767
7768 switch (opcode)
7769 {
7770 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7771 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7772
7773 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7774 case 0x12: case 0x16:
7775 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7776
7777 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7778 case 0x13: case 0x17:
7779 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7780
7781 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7782 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7783 /* Note: no writeback for these instructions. Bit 25 will always be
7784 zero though (via caller), so the following works OK. */
7785 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7786 }
7787
7788 /* Should be unreachable. */
7789 return 1;
7790 }
7791
7792 /* Decode shifted register instructions. */
7793
7794 static int
7795 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7796 uint16_t insn2, struct regcache *regs,
7797 arm_displaced_step_copy_insn_closure *dsc)
7798 {
7799 /* PC is only allowed to be used in instruction MOV. */
7800
7801 unsigned int op = bits (insn1, 5, 8);
7802 unsigned int rn = bits (insn1, 0, 3);
7803
7804 if (op == 0x2 && rn == 0xf) /* MOV */
7805 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7806 else
7807 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7808 "dp (shift reg)", dsc);
7809 }
7810
7811
7812 /* Decode extension register load/store. Exactly the same as
7813 arm_decode_ext_reg_ld_st. */
7814
7815 static int
7816 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7817 uint16_t insn2, struct regcache *regs,
7818 arm_displaced_step_copy_insn_closure *dsc)
7819 {
7820 unsigned int opcode = bits (insn1, 4, 8);
7821
7822 switch (opcode)
7823 {
7824 case 0x04: case 0x05:
7825 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7826 "vfp/neon vmov", dsc);
7827
7828 case 0x08: case 0x0c: /* 01x00 */
7829 case 0x0a: case 0x0e: /* 01x10 */
7830 case 0x12: case 0x16: /* 10x10 */
7831 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7832 "vfp/neon vstm/vpush", dsc);
7833
7834 case 0x09: case 0x0d: /* 01x01 */
7835 case 0x0b: case 0x0f: /* 01x11 */
7836 case 0x13: case 0x17: /* 10x11 */
7837 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7838 "vfp/neon vldm/vpop", dsc);
7839
7840 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7841 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7842 "vstr", dsc);
7843 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7844 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7845 }
7846
7847 /* Should be unreachable. */
7848 return 1;
7849 }
7850
7851 static int
7852 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7853 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7854 {
7855 unsigned int op1 = bits (insn, 20, 25);
7856 int op = bit (insn, 4);
7857 unsigned int coproc = bits (insn, 8, 11);
7858
7859 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7860 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7861 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7862 && (coproc & 0xe) != 0xa)
7863 /* stc/stc2. */
7864 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7865 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7866 && (coproc & 0xe) != 0xa)
7867 /* ldc/ldc2 imm/lit. */
7868 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7869 else if ((op1 & 0x3e) == 0x00)
7870 return arm_copy_undef (gdbarch, insn, dsc);
7871 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7872 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7873 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7874 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7875 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7876 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7877 else if ((op1 & 0x30) == 0x20 && !op)
7878 {
7879 if ((coproc & 0xe) == 0xa)
7880 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7881 else
7882 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7883 }
7884 else if ((op1 & 0x30) == 0x20 && op)
7885 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7886 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7887 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7888 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7889 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7890 else if ((op1 & 0x30) == 0x30)
7891 return arm_copy_svc (gdbarch, insn, regs, dsc);
7892 else
7893 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7894 }
7895
7896 static int
7897 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7898 uint16_t insn2, struct regcache *regs,
7899 arm_displaced_step_copy_insn_closure *dsc)
7900 {
7901 unsigned int coproc = bits (insn2, 8, 11);
7902 unsigned int bit_5_8 = bits (insn1, 5, 8);
7903 unsigned int bit_9 = bit (insn1, 9);
7904 unsigned int bit_4 = bit (insn1, 4);
7905
7906 if (bit_9 == 0)
7907 {
7908 if (bit_5_8 == 2)
7909 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7910 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7911 dsc);
7912 else if (bit_5_8 == 0) /* UNDEFINED. */
7913 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7914 else
7915 {
7916 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7917 if ((coproc & 0xe) == 0xa)
7918 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7919 dsc);
7920 else /* coproc is not 101x. */
7921 {
7922 if (bit_4 == 0) /* STC/STC2. */
7923 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7924 "stc/stc2", dsc);
7925 else /* LDC/LDC2 {literal, immediate}. */
7926 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7927 regs, dsc);
7928 }
7929 }
7930 }
7931 else
7932 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7933
7934 return 0;
7935 }
7936
7937 static void
7938 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7939 arm_displaced_step_copy_insn_closure *dsc, int rd)
7940 {
7941 /* ADR Rd, #imm
7942
7943 Rewrite as:
7944
7945 Preparation: Rd <- PC
7946 Insn: ADD Rd, #imm
7947 Cleanup: Null.
7948 */
7949
7950 /* Rd <- PC */
7951 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7952 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7953 }
7954
7955 static int
7956 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7957 arm_displaced_step_copy_insn_closure *dsc,
7958 int rd, unsigned int imm)
7959 {
7960
7961 /* Encoding T2: ADDS Rd, #imm */
7962 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7963
7964 install_pc_relative (gdbarch, regs, dsc, rd);
7965
7966 return 0;
7967 }
7968
7969 static int
7970 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7971 struct regcache *regs,
7972 arm_displaced_step_copy_insn_closure *dsc)
7973 {
7974 unsigned int rd = bits (insn, 8, 10);
7975 unsigned int imm8 = bits (insn, 0, 7);
7976
7977 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x",
7978 rd, imm8, insn);
7979
7980 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7981 }
7982
7983 static int
7984 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7985 uint16_t insn2, struct regcache *regs,
7986 arm_displaced_step_copy_insn_closure *dsc)
7987 {
7988 unsigned int rd = bits (insn2, 8, 11);
7989 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7990 extract raw immediate encoding rather than computing immediate. When
7991 generating ADD or SUB instruction, we can simply perform OR operation to
7992 set immediate into ADD. */
7993 unsigned int imm_3_8 = insn2 & 0x70ff;
7994 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7995
7996 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x",
7997 rd, imm_i, imm_3_8, insn1, insn2);
7998
7999 if (bit (insn1, 7)) /* Encoding T2 */
8000 {
8001 /* Encoding T3: SUB Rd, Rd, #imm */
8002 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8003 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8004 }
8005 else /* Encoding T3 */
8006 {
8007 /* Encoding T3: ADD Rd, Rd, #imm */
8008 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8009 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8010 }
8011 dsc->numinsns = 2;
8012
8013 install_pc_relative (gdbarch, regs, dsc, rd);
8014
8015 return 0;
8016 }
8017
8018 static int
8019 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
8020 struct regcache *regs,
8021 arm_displaced_step_copy_insn_closure *dsc)
8022 {
8023 unsigned int rt = bits (insn1, 8, 10);
8024 unsigned int pc;
8025 int imm8 = (bits (insn1, 0, 7) << 2);
8026
8027 /* LDR Rd, #imm8
8028
8029 Rwrite as:
8030
8031 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8032
8033 Insn: LDR R0, [R2, R3];
8034 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8035
8036 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt, imm8);
8037
8038 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8039 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8040 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8041 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8042 /* The assembler calculates the required value of the offset from the
8043 Align(PC,4) value of this instruction to the label. */
8044 pc = pc & 0xfffffffc;
8045
8046 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8047 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8048
8049 dsc->rd = rt;
8050 dsc->u.ldst.xfersize = 4;
8051 dsc->u.ldst.rn = 0;
8052 dsc->u.ldst.immed = 0;
8053 dsc->u.ldst.writeback = 0;
8054 dsc->u.ldst.restore_r4 = 0;
8055
8056 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8057
8058 dsc->cleanup = &cleanup_load;
8059
8060 return 0;
8061 }
8062
8063 /* Copy Thumb cbnz/cbz instruction. */
8064
8065 static int
8066 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8067 struct regcache *regs,
8068 arm_displaced_step_copy_insn_closure *dsc)
8069 {
8070 int non_zero = bit (insn1, 11);
8071 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8072 CORE_ADDR from = dsc->insn_addr;
8073 int rn = bits (insn1, 0, 2);
8074 int rn_val = displaced_read_reg (regs, dsc, rn);
8075
8076 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8077 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8078 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8079 condition is false, let it be, cleanup_branch will do nothing. */
8080 if (dsc->u.branch.cond)
8081 {
8082 dsc->u.branch.cond = INST_AL;
8083 dsc->u.branch.dest = from + 4 + imm5;
8084 }
8085 else
8086 dsc->u.branch.dest = from + 2;
8087
8088 dsc->u.branch.link = 0;
8089 dsc->u.branch.exchange = 0;
8090
8091 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx",
8092 non_zero ? "cbnz" : "cbz",
8093 rn, rn_val, insn1, dsc->u.branch.dest);
8094
8095 dsc->modinsn[0] = THUMB_NOP;
8096
8097 dsc->cleanup = &cleanup_branch;
8098 return 0;
8099 }
8100
8101 /* Copy Table Branch Byte/Halfword */
8102 static int
8103 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8104 uint16_t insn2, struct regcache *regs,
8105 arm_displaced_step_copy_insn_closure *dsc)
8106 {
8107 ULONGEST rn_val, rm_val;
8108 int is_tbh = bit (insn2, 4);
8109 CORE_ADDR halfwords = 0;
8110 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8111
8112 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8113 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8114
8115 if (is_tbh)
8116 {
8117 gdb_byte buf[2];
8118
8119 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8120 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8121 }
8122 else
8123 {
8124 gdb_byte buf[1];
8125
8126 target_read_memory (rn_val + rm_val, buf, 1);
8127 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8128 }
8129
8130 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x",
8131 is_tbh ? "tbh" : "tbb",
8132 (unsigned int) rn_val, (unsigned int) rm_val,
8133 (unsigned int) halfwords);
8134
8135 dsc->u.branch.cond = INST_AL;
8136 dsc->u.branch.link = 0;
8137 dsc->u.branch.exchange = 0;
8138 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8139
8140 dsc->cleanup = &cleanup_branch;
8141
8142 return 0;
8143 }
8144
8145 static void
8146 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8147 arm_displaced_step_copy_insn_closure *dsc)
8148 {
8149 /* PC <- r7 */
8150 int val = displaced_read_reg (regs, dsc, 7);
8151 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8152
8153 /* r7 <- r8 */
8154 val = displaced_read_reg (regs, dsc, 8);
8155 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8156
8157 /* r8 <- tmp[0] */
8158 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8159
8160 }
8161
8162 static int
8163 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
8164 struct regcache *regs,
8165 arm_displaced_step_copy_insn_closure *dsc)
8166 {
8167 dsc->u.block.regmask = insn1 & 0x00ff;
8168
8169 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8170 to :
8171
8172 (1) register list is full, that is, r0-r7 are used.
8173 Prepare: tmp[0] <- r8
8174
8175 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8176 MOV r8, r7; Move value of r7 to r8;
8177 POP {r7}; Store PC value into r7.
8178
8179 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8180
8181 (2) register list is not full, supposing there are N registers in
8182 register list (except PC, 0 <= N <= 7).
8183 Prepare: for each i, 0 - N, tmp[i] <- ri.
8184
8185 POP {r0, r1, ...., rN};
8186
8187 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8188 from tmp[] properly.
8189 */
8190 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x",
8191 dsc->u.block.regmask, insn1);
8192
8193 if (dsc->u.block.regmask == 0xff)
8194 {
8195 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8196
8197 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8198 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8199 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8200
8201 dsc->numinsns = 3;
8202 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8203 }
8204 else
8205 {
8206 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
8207 unsigned int i;
8208 unsigned int new_regmask;
8209
8210 for (i = 0; i < num_in_list + 1; i++)
8211 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8212
8213 new_regmask = (1 << (num_in_list + 1)) - 1;
8214
8215 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, "
8216 "modified list %.4x",
8217 (int) dsc->u.block.regmask, new_regmask);
8218
8219 dsc->u.block.regmask |= 0x8000;
8220 dsc->u.block.writeback = 0;
8221 dsc->u.block.cond = INST_AL;
8222
8223 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8224
8225 dsc->cleanup = &cleanup_block_load_pc;
8226 }
8227
8228 return 0;
8229 }
8230
8231 static void
8232 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8233 struct regcache *regs,
8234 arm_displaced_step_copy_insn_closure *dsc)
8235 {
8236 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8237 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8238 int err = 0;
8239
8240 /* 16-bit thumb instructions. */
8241 switch (op_bit_12_15)
8242 {
8243 /* Shift (imme), add, subtract, move and compare. */
8244 case 0: case 1: case 2: case 3:
8245 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8246 "shift/add/sub/mov/cmp",
8247 dsc);
8248 break;
8249 case 4:
8250 switch (op_bit_10_11)
8251 {
8252 case 0: /* Data-processing */
8253 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8254 "data-processing",
8255 dsc);
8256 break;
8257 case 1: /* Special data instructions and branch and exchange. */
8258 {
8259 unsigned short op = bits (insn1, 7, 9);
8260 if (op == 6 || op == 7) /* BX or BLX */
8261 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8262 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8263 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8264 else
8265 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8266 dsc);
8267 }
8268 break;
8269 default: /* LDR (literal) */
8270 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8271 }
8272 break;
8273 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8274 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8275 break;
8276 case 10:
8277 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8278 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8279 else /* Generate SP-relative address */
8280 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8281 break;
8282 case 11: /* Misc 16-bit instructions */
8283 {
8284 switch (bits (insn1, 8, 11))
8285 {
8286 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8287 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8288 break;
8289 case 12: case 13: /* POP */
8290 if (bit (insn1, 8)) /* PC is in register list. */
8291 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8292 else
8293 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8294 break;
8295 case 15: /* If-Then, and hints */
8296 if (bits (insn1, 0, 3))
8297 /* If-Then makes up to four following instructions conditional.
8298 IT instruction itself is not conditional, so handle it as a
8299 common unmodified instruction. */
8300 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8301 dsc);
8302 else
8303 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8304 break;
8305 default:
8306 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8307 }
8308 }
8309 break;
8310 case 12:
8311 if (op_bit_10_11 < 2) /* Store multiple registers */
8312 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8313 else /* Load multiple registers */
8314 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8315 break;
8316 case 13: /* Conditional branch and supervisor call */
8317 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8318 err = thumb_copy_b (gdbarch, insn1, dsc);
8319 else
8320 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8321 break;
8322 case 14: /* Unconditional branch */
8323 err = thumb_copy_b (gdbarch, insn1, dsc);
8324 break;
8325 default:
8326 err = 1;
8327 }
8328
8329 if (err)
8330 internal_error (_("thumb_process_displaced_16bit_insn: Instruction decode error"));
8331 }
8332
8333 static int
8334 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8335 uint16_t insn1, uint16_t insn2,
8336 struct regcache *regs,
8337 arm_displaced_step_copy_insn_closure *dsc)
8338 {
8339 int rt = bits (insn2, 12, 15);
8340 int rn = bits (insn1, 0, 3);
8341 int op1 = bits (insn1, 7, 8);
8342
8343 switch (bits (insn1, 5, 6))
8344 {
8345 case 0: /* Load byte and memory hints */
8346 if (rt == 0xf) /* PLD/PLI */
8347 {
8348 if (rn == 0xf)
8349 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8350 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8351 else
8352 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8353 "pli/pld", dsc);
8354 }
8355 else
8356 {
8357 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8358 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8359 1);
8360 else
8361 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8362 "ldrb{reg, immediate}/ldrbt",
8363 dsc);
8364 }
8365
8366 break;
8367 case 1: /* Load halfword and memory hints. */
8368 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8369 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8370 "pld/unalloc memhint", dsc);
8371 else
8372 {
8373 if (rn == 0xf)
8374 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8375 2);
8376 else
8377 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8378 "ldrh/ldrht", dsc);
8379 }
8380 break;
8381 case 2: /* Load word */
8382 {
8383 int insn2_bit_8_11 = bits (insn2, 8, 11);
8384
8385 if (rn == 0xf)
8386 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8387 else if (op1 == 0x1) /* Encoding T3 */
8388 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8389 0, 1);
8390 else /* op1 == 0x0 */
8391 {
8392 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8393 /* LDR (immediate) */
8394 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8395 dsc, bit (insn2, 8), 1);
8396 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8397 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8398 "ldrt", dsc);
8399 else
8400 /* LDR (register) */
8401 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8402 dsc, 0, 0);
8403 }
8404 break;
8405 }
8406 default:
8407 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8408 break;
8409 }
8410 return 0;
8411 }
8412
8413 static void
8414 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8415 uint16_t insn2, struct regcache *regs,
8416 arm_displaced_step_copy_insn_closure *dsc)
8417 {
8418 int err = 0;
8419 unsigned short op = bit (insn2, 15);
8420 unsigned int op1 = bits (insn1, 11, 12);
8421
8422 switch (op1)
8423 {
8424 case 1:
8425 {
8426 switch (bits (insn1, 9, 10))
8427 {
8428 case 0:
8429 if (bit (insn1, 6))
8430 {
8431 /* Load/store {dual, exclusive}, table branch. */
8432 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8433 && bits (insn2, 5, 7) == 0)
8434 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8435 dsc);
8436 else
8437 /* PC is not allowed to use in load/store {dual, exclusive}
8438 instructions. */
8439 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8440 "load/store dual/ex", dsc);
8441 }
8442 else /* load/store multiple */
8443 {
8444 switch (bits (insn1, 7, 8))
8445 {
8446 case 0: case 3: /* SRS, RFE */
8447 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8448 "srs/rfe", dsc);
8449 break;
8450 case 1: case 2: /* LDM/STM/PUSH/POP */
8451 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8452 break;
8453 }
8454 }
8455 break;
8456
8457 case 1:
8458 /* Data-processing (shift register). */
8459 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8460 dsc);
8461 break;
8462 default: /* Coprocessor instructions. */
8463 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8464 break;
8465 }
8466 break;
8467 }
8468 case 2: /* op1 = 2 */
8469 if (op) /* Branch and misc control. */
8470 {
8471 if (bit (insn2, 14) /* BLX/BL */
8472 || bit (insn2, 12) /* Unconditional branch */
8473 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8474 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8475 else
8476 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8477 "misc ctrl", dsc);
8478 }
8479 else
8480 {
8481 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8482 {
8483 int dp_op = bits (insn1, 4, 8);
8484 int rn = bits (insn1, 0, 3);
8485 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
8486 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8487 regs, dsc);
8488 else
8489 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8490 "dp/pb", dsc);
8491 }
8492 else /* Data processing (modified immediate) */
8493 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8494 "dp/mi", dsc);
8495 }
8496 break;
8497 case 3: /* op1 = 3 */
8498 switch (bits (insn1, 9, 10))
8499 {
8500 case 0:
8501 if (bit (insn1, 4))
8502 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8503 regs, dsc);
8504 else /* NEON Load/Store and Store single data item */
8505 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8506 "neon elt/struct load/store",
8507 dsc);
8508 break;
8509 case 1: /* op1 = 3, bits (9, 10) == 1 */
8510 switch (bits (insn1, 7, 8))
8511 {
8512 case 0: case 1: /* Data processing (register) */
8513 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8514 "dp(reg)", dsc);
8515 break;
8516 case 2: /* Multiply and absolute difference */
8517 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8518 "mul/mua/diff", dsc);
8519 break;
8520 case 3: /* Long multiply and divide */
8521 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8522 "lmul/lmua", dsc);
8523 break;
8524 }
8525 break;
8526 default: /* Coprocessor instructions */
8527 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8528 break;
8529 }
8530 break;
8531 default:
8532 err = 1;
8533 }
8534
8535 if (err)
8536 internal_error (_("thumb_process_displaced_32bit_insn: Instruction decode error"));
8537
8538 }
8539
8540 static void
8541 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8542 struct regcache *regs,
8543 arm_displaced_step_copy_insn_closure *dsc)
8544 {
8545 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8546 uint16_t insn1
8547 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8548
8549 displaced_debug_printf ("process thumb insn %.4x at %.8lx",
8550 insn1, (unsigned long) from);
8551
8552 dsc->is_thumb = 1;
8553 dsc->insn_size = thumb_insn_size (insn1);
8554 if (thumb_insn_size (insn1) == 4)
8555 {
8556 uint16_t insn2
8557 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8558 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8559 }
8560 else
8561 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8562 }
8563
8564 void
8565 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8566 CORE_ADDR to, struct regcache *regs,
8567 arm_displaced_step_copy_insn_closure *dsc)
8568 {
8569 int err = 0;
8570 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8571 uint32_t insn;
8572
8573 /* Most displaced instructions use a 1-instruction scratch space, so set this
8574 here and override below if/when necessary. */
8575 dsc->numinsns = 1;
8576 dsc->insn_addr = from;
8577 dsc->scratch_base = to;
8578 dsc->cleanup = NULL;
8579 dsc->wrote_to_pc = 0;
8580
8581 if (!displaced_in_arm_mode (regs))
8582 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
8583
8584 dsc->is_thumb = 0;
8585 dsc->insn_size = 4;
8586 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8587 displaced_debug_printf ("stepping insn %.8lx at %.8lx",
8588 (unsigned long) insn, (unsigned long) from);
8589
8590 if ((insn & 0xf0000000) == 0xf0000000)
8591 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8592 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8593 {
8594 case 0x0: case 0x1: case 0x2: case 0x3:
8595 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8596 break;
8597
8598 case 0x4: case 0x5: case 0x6:
8599 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8600 break;
8601
8602 case 0x7:
8603 err = arm_decode_media (gdbarch, insn, dsc);
8604 break;
8605
8606 case 0x8: case 0x9: case 0xa: case 0xb:
8607 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8608 break;
8609
8610 case 0xc: case 0xd: case 0xe: case 0xf:
8611 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
8612 break;
8613 }
8614
8615 if (err)
8616 internal_error (_("arm_process_displaced_insn: Instruction decode error"));
8617 }
8618
8619 /* Actually set up the scratch space for a displaced instruction. */
8620
8621 void
8622 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8623 CORE_ADDR to,
8624 arm_displaced_step_copy_insn_closure *dsc)
8625 {
8626 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8627 unsigned int i, len, offset;
8628 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8629 int size = dsc->is_thumb ? 2 : 4;
8630 const gdb_byte *bkp_insn;
8631
8632 offset = 0;
8633 /* Poke modified instruction(s). */
8634 for (i = 0; i < dsc->numinsns; i++)
8635 {
8636 if (size == 4)
8637 displaced_debug_printf ("writing insn %.8lx at %.8lx",
8638 dsc->modinsn[i], (unsigned long) to + offset);
8639 else if (size == 2)
8640 displaced_debug_printf ("writing insn %.4x at %.8lx",
8641 (unsigned short) dsc->modinsn[i],
8642 (unsigned long) to + offset);
8643
8644 write_memory_unsigned_integer (to + offset, size,
8645 byte_order_for_code,
8646 dsc->modinsn[i]);
8647 offset += size;
8648 }
8649
8650 /* Choose the correct breakpoint instruction. */
8651 if (dsc->is_thumb)
8652 {
8653 bkp_insn = tdep->thumb_breakpoint;
8654 len = tdep->thumb_breakpoint_size;
8655 }
8656 else
8657 {
8658 bkp_insn = tdep->arm_breakpoint;
8659 len = tdep->arm_breakpoint_size;
8660 }
8661
8662 /* Put breakpoint afterwards. */
8663 write_memory (to + offset, bkp_insn, len);
8664
8665 displaced_debug_printf ("copy %s->%s", paddress (gdbarch, from),
8666 paddress (gdbarch, to));
8667 }
8668
8669 /* Entry point for cleaning things up after a displaced instruction has been
8670 single-stepped. */
8671
8672 void
8673 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8674 struct displaced_step_copy_insn_closure *dsc_,
8675 CORE_ADDR from, CORE_ADDR to,
8676 struct regcache *regs, bool completed_p)
8677 {
8678 /* The following block exists as a temporary measure while displaced
8679 stepping is fixed architecture at a time within GDB.
8680
8681 In an earlier implementation of displaced stepping, if GDB thought the
8682 displaced instruction had not been executed then this fix up function
8683 was never called. As a consequence, things that should be fixed by
8684 this function were left in an unfixed state.
8685
8686 However, it's not as simple as always calling this function; this
8687 function needs to be updated to decide what should be fixed up based
8688 on whether the displaced step executed or not, which requires each
8689 architecture to be considered individually.
8690
8691 Until this architecture is updated, this block replicates the old
8692 behaviour; we just restore the program counter register, and leave
8693 everything else unfixed. */
8694 if (!completed_p)
8695 {
8696 CORE_ADDR pc = regcache_read_pc (regs);
8697 pc = from + (pc - to);
8698 regcache_write_pc (regs, pc);
8699 return;
8700 }
8701
8702 arm_displaced_step_copy_insn_closure *dsc
8703 = (arm_displaced_step_copy_insn_closure *) dsc_;
8704
8705 if (dsc->cleanup)
8706 dsc->cleanup (gdbarch, regs, dsc);
8707
8708 if (!dsc->wrote_to_pc)
8709 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8710 dsc->insn_addr + dsc->insn_size);
8711
8712 }
8713
8714 #include "bfd-in2.h"
8715 #include "libcoff.h"
8716
8717 static int
8718 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8719 {
8720 gdb_disassemble_info *di
8721 = static_cast<gdb_disassemble_info *> (info->application_data);
8722 struct gdbarch *gdbarch = di->arch ();
8723
8724 if (arm_pc_is_thumb (gdbarch, memaddr))
8725 {
8726 static asymbol *asym;
8727 static combined_entry_type ce;
8728 static struct coff_symbol_struct csym;
8729 static struct bfd fake_bfd;
8730 static bfd_target fake_target;
8731
8732 if (csym.native == NULL)
8733 {
8734 /* Create a fake symbol vector containing a Thumb symbol.
8735 This is solely so that the code in print_insn_little_arm()
8736 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8737 the presence of a Thumb symbol and switch to decoding
8738 Thumb instructions. */
8739
8740 fake_target.flavour = bfd_target_coff_flavour;
8741 fake_bfd.xvec = &fake_target;
8742 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8743 csym.native = &ce;
8744 csym.symbol.the_bfd = &fake_bfd;
8745 csym.symbol.name = "fake";
8746 asym = (asymbol *) & csym;
8747 }
8748
8749 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8750 info->symbols = &asym;
8751 }
8752 else
8753 info->symbols = NULL;
8754
8755 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
8756 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
8757 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
8758 the assert on the mismatch of info->mach and
8759 bfd_get_mach (current_program_space->exec_bfd ()) in
8760 default_print_insn. */
8761 if (current_program_space->exec_bfd () != NULL
8762 && (current_program_space->exec_bfd ()->arch_info
8763 == gdbarch_bfd_arch_info (gdbarch)))
8764 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
8765
8766 return default_print_insn (memaddr, info);
8767 }
8768
8769 /* The following define instruction sequences that will cause ARM
8770 cpu's to take an undefined instruction trap. These are used to
8771 signal a breakpoint to GDB.
8772
8773 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8774 modes. A different instruction is required for each mode. The ARM
8775 cpu's can also be big or little endian. Thus four different
8776 instructions are needed to support all cases.
8777
8778 Note: ARMv4 defines several new instructions that will take the
8779 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8780 not in fact add the new instructions. The new undefined
8781 instructions in ARMv4 are all instructions that had no defined
8782 behavior in earlier chips. There is no guarantee that they will
8783 raise an exception, but may be treated as NOP's. In practice, it
8784 may only safe to rely on instructions matching:
8785
8786 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8787 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8788 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8789
8790 Even this may only true if the condition predicate is true. The
8791 following use a condition predicate of ALWAYS so it is always TRUE.
8792
8793 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8794 and NetBSD all use a software interrupt rather than an undefined
8795 instruction to force a trap. This can be handled by by the
8796 abi-specific code during establishment of the gdbarch vector. */
8797
8798 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8799 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8800 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8801 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8802
8803 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8804 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8805 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8806 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8807
8808 /* Implement the breakpoint_kind_from_pc gdbarch method. */
8809
8810 static int
8811 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
8812 {
8813 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8814 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8815
8816 if (arm_pc_is_thumb (gdbarch, *pcptr))
8817 {
8818 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8819
8820 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8821 check whether we are replacing a 32-bit instruction. */
8822 if (tdep->thumb2_breakpoint != NULL)
8823 {
8824 gdb_byte buf[2];
8825
8826 if (target_read_memory (*pcptr, buf, 2) == 0)
8827 {
8828 unsigned short inst1;
8829
8830 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8831 if (thumb_insn_size (inst1) == 4)
8832 return ARM_BP_KIND_THUMB2;
8833 }
8834 }
8835
8836 return ARM_BP_KIND_THUMB;
8837 }
8838 else
8839 return ARM_BP_KIND_ARM;
8840
8841 }
8842
8843 /* Implement the sw_breakpoint_from_kind gdbarch method. */
8844
8845 static const gdb_byte *
8846 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
8847 {
8848 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8849
8850 switch (kind)
8851 {
8852 case ARM_BP_KIND_ARM:
8853 *size = tdep->arm_breakpoint_size;
8854 return tdep->arm_breakpoint;
8855 case ARM_BP_KIND_THUMB:
8856 *size = tdep->thumb_breakpoint_size;
8857 return tdep->thumb_breakpoint;
8858 case ARM_BP_KIND_THUMB2:
8859 *size = tdep->thumb2_breakpoint_size;
8860 return tdep->thumb2_breakpoint;
8861 default:
8862 gdb_assert_not_reached ("unexpected arm breakpoint kind");
8863 }
8864 }
8865
8866 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
8867
8868 static int
8869 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
8870 struct regcache *regcache,
8871 CORE_ADDR *pcptr)
8872 {
8873 gdb_byte buf[4];
8874
8875 /* Check the memory pointed by PC is readable. */
8876 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
8877 {
8878 struct arm_get_next_pcs next_pcs_ctx;
8879
8880 arm_get_next_pcs_ctor (&next_pcs_ctx,
8881 &arm_get_next_pcs_ops,
8882 gdbarch_byte_order (gdbarch),
8883 gdbarch_byte_order_for_code (gdbarch),
8884 0,
8885 regcache);
8886
8887 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
8888
8889 /* If MEMADDR is the next instruction of current pc, do the
8890 software single step computation, and get the thumb mode by
8891 the destination address. */
8892 for (CORE_ADDR pc : next_pcs)
8893 {
8894 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
8895 {
8896 if (IS_THUMB_ADDR (pc))
8897 {
8898 *pcptr = MAKE_THUMB_ADDR (*pcptr);
8899 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8900 }
8901 else
8902 return ARM_BP_KIND_ARM;
8903 }
8904 }
8905 }
8906
8907 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8908 }
8909
8910 /* Extract from an array REGBUF containing the (raw) register state a
8911 function return value of type TYPE, and copy that, in virtual
8912 format, into VALBUF. */
8913
8914 static void
8915 arm_extract_return_value (struct type *type, struct regcache *regs,
8916 gdb_byte *valbuf)
8917 {
8918 struct gdbarch *gdbarch = regs->arch ();
8919 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8920 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8921
8922 while (type->code () == TYPE_CODE_RANGE)
8923 type = check_typedef (type->target_type ());
8924
8925 if (TYPE_CODE_FLT == type->code ())
8926 {
8927 switch (tdep->fp_model)
8928 {
8929 case ARM_FLOAT_FPA:
8930 {
8931 /* The value is in register F0 in internal format. We need to
8932 extract the raw value and then convert it to the desired
8933 internal type. */
8934 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
8935
8936 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
8937 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
8938 valbuf, type);
8939 }
8940 break;
8941
8942 case ARM_FLOAT_SOFT_FPA:
8943 case ARM_FLOAT_SOFT_VFP:
8944 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8945 not using the VFP ABI code. */
8946 case ARM_FLOAT_VFP:
8947 regs->cooked_read (ARM_A1_REGNUM, valbuf);
8948 if (type->length () > 4)
8949 regs->cooked_read (ARM_A1_REGNUM + 1,
8950 valbuf + ARM_INT_REGISTER_SIZE);
8951 break;
8952
8953 default:
8954 internal_error (_("arm_extract_return_value: "
8955 "Floating point model not supported"));
8956 break;
8957 }
8958 }
8959 else if (type->code () == TYPE_CODE_INT
8960 || type->code () == TYPE_CODE_CHAR
8961 || type->code () == TYPE_CODE_BOOL
8962 || type->code () == TYPE_CODE_PTR
8963 || TYPE_IS_REFERENCE (type)
8964 || type->code () == TYPE_CODE_ENUM
8965 || is_fixed_point_type (type))
8966 {
8967 /* If the type is a plain integer, then the access is
8968 straight-forward. Otherwise we have to play around a bit
8969 more. */
8970 int len = type->length ();
8971 int regno = ARM_A1_REGNUM;
8972 ULONGEST tmp;
8973
8974 while (len > 0)
8975 {
8976 /* By using store_unsigned_integer we avoid having to do
8977 anything special for small big-endian values. */
8978 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8979 store_unsigned_integer (valbuf,
8980 (len > ARM_INT_REGISTER_SIZE
8981 ? ARM_INT_REGISTER_SIZE : len),
8982 byte_order, tmp);
8983 len -= ARM_INT_REGISTER_SIZE;
8984 valbuf += ARM_INT_REGISTER_SIZE;
8985 }
8986 }
8987 else
8988 {
8989 /* For a structure or union the behavior is as if the value had
8990 been stored to word-aligned memory and then loaded into
8991 registers with 32-bit load instruction(s). */
8992 int len = type->length ();
8993 int regno = ARM_A1_REGNUM;
8994 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8995
8996 while (len > 0)
8997 {
8998 regs->cooked_read (regno++, tmpbuf);
8999 memcpy (valbuf, tmpbuf,
9000 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
9001 len -= ARM_INT_REGISTER_SIZE;
9002 valbuf += ARM_INT_REGISTER_SIZE;
9003 }
9004 }
9005 }
9006
9007
9008 /* Will a function return an aggregate type in memory or in a
9009 register? Return 0 if an aggregate type can be returned in a
9010 register, 1 if it must be returned in memory. */
9011
9012 static int
9013 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
9014 {
9015 enum type_code code;
9016
9017 type = check_typedef (type);
9018
9019 /* Simple, non-aggregate types (ie not including vectors and
9020 complex) are always returned in a register (or registers). */
9021 code = type->code ();
9022 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
9023 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
9024 return 0;
9025
9026 if (TYPE_HAS_DYNAMIC_LENGTH (type))
9027 return 1;
9028
9029 if (TYPE_CODE_ARRAY == code && type->is_vector ())
9030 {
9031 /* Vector values should be returned using ARM registers if they
9032 are not over 16 bytes. */
9033 return (type->length () > 16);
9034 }
9035
9036 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9037 if (tdep->arm_abi != ARM_ABI_APCS)
9038 {
9039 /* The AAPCS says all aggregates not larger than a word are returned
9040 in a register. */
9041 if (type->length () <= ARM_INT_REGISTER_SIZE
9042 && language_pass_by_reference (type).trivially_copyable)
9043 return 0;
9044
9045 return 1;
9046 }
9047 else
9048 {
9049 int nRc;
9050
9051 /* All aggregate types that won't fit in a register must be returned
9052 in memory. */
9053 if (type->length () > ARM_INT_REGISTER_SIZE
9054 || !language_pass_by_reference (type).trivially_copyable)
9055 return 1;
9056
9057 /* In the ARM ABI, "integer" like aggregate types are returned in
9058 registers. For an aggregate type to be integer like, its size
9059 must be less than or equal to ARM_INT_REGISTER_SIZE and the
9060 offset of each addressable subfield must be zero. Note that bit
9061 fields are not addressable, and all addressable subfields of
9062 unions always start at offset zero.
9063
9064 This function is based on the behavior of GCC 2.95.1.
9065 See: gcc/arm.c: arm_return_in_memory() for details.
9066
9067 Note: All versions of GCC before GCC 2.95.2 do not set up the
9068 parameters correctly for a function returning the following
9069 structure: struct { float f;}; This should be returned in memory,
9070 not a register. Richard Earnshaw sent me a patch, but I do not
9071 know of any way to detect if a function like the above has been
9072 compiled with the correct calling convention. */
9073
9074 /* Assume all other aggregate types can be returned in a register.
9075 Run a check for structures, unions and arrays. */
9076 nRc = 0;
9077
9078 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9079 {
9080 int i;
9081 /* Need to check if this struct/union is "integer" like. For
9082 this to be true, its size must be less than or equal to
9083 ARM_INT_REGISTER_SIZE and the offset of each addressable
9084 subfield must be zero. Note that bit fields are not
9085 addressable, and unions always start at offset zero. If any
9086 of the subfields is a floating point type, the struct/union
9087 cannot be an integer type. */
9088
9089 /* For each field in the object, check:
9090 1) Is it FP? --> yes, nRc = 1;
9091 2) Is it addressable (bitpos != 0) and
9092 not packed (bitsize == 0)?
9093 --> yes, nRc = 1
9094 */
9095
9096 for (i = 0; i < type->num_fields (); i++)
9097 {
9098 enum type_code field_type_code;
9099
9100 field_type_code
9101 = check_typedef (type->field (i).type ())->code ();
9102
9103 /* Is it a floating point type field? */
9104 if (field_type_code == TYPE_CODE_FLT)
9105 {
9106 nRc = 1;
9107 break;
9108 }
9109
9110 /* If bitpos != 0, then we have to care about it. */
9111 if (type->field (i).loc_bitpos () != 0)
9112 {
9113 /* Bitfields are not addressable. If the field bitsize is
9114 zero, then the field is not packed. Hence it cannot be
9115 a bitfield or any other packed type. */
9116 if (type->field (i).bitsize () == 0)
9117 {
9118 nRc = 1;
9119 break;
9120 }
9121 }
9122 }
9123 }
9124
9125 return nRc;
9126 }
9127 }
9128
9129 /* Write into appropriate registers a function return value of type
9130 TYPE, given in virtual format. */
9131
9132 static void
9133 arm_store_return_value (struct type *type, struct regcache *regs,
9134 const gdb_byte *valbuf)
9135 {
9136 struct gdbarch *gdbarch = regs->arch ();
9137 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9138
9139 while (type->code () == TYPE_CODE_RANGE)
9140 type = check_typedef (type->target_type ());
9141
9142 if (type->code () == TYPE_CODE_FLT)
9143 {
9144 gdb_byte buf[ARM_FP_REGISTER_SIZE];
9145 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9146
9147 switch (tdep->fp_model)
9148 {
9149 case ARM_FLOAT_FPA:
9150
9151 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
9152 regs->cooked_write (ARM_F0_REGNUM, buf);
9153 break;
9154
9155 case ARM_FLOAT_SOFT_FPA:
9156 case ARM_FLOAT_SOFT_VFP:
9157 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9158 not using the VFP ABI code. */
9159 case ARM_FLOAT_VFP:
9160 regs->cooked_write (ARM_A1_REGNUM, valbuf);
9161 if (type->length () > 4)
9162 regs->cooked_write (ARM_A1_REGNUM + 1,
9163 valbuf + ARM_INT_REGISTER_SIZE);
9164 break;
9165
9166 default:
9167 internal_error (_("arm_store_return_value: Floating "
9168 "point model not supported"));
9169 break;
9170 }
9171 }
9172 else if (type->code () == TYPE_CODE_INT
9173 || type->code () == TYPE_CODE_CHAR
9174 || type->code () == TYPE_CODE_BOOL
9175 || type->code () == TYPE_CODE_PTR
9176 || TYPE_IS_REFERENCE (type)
9177 || type->code () == TYPE_CODE_ENUM
9178 || is_fixed_point_type (type))
9179 {
9180 if (type->length () <= 4)
9181 {
9182 /* Values of one word or less are zero/sign-extended and
9183 returned in r0. */
9184 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
9185
9186 if (is_fixed_point_type (type))
9187 {
9188 gdb_mpz unscaled;
9189 unscaled.read (gdb::make_array_view (valbuf, type->length ()),
9190 byte_order, type->is_unsigned ());
9191 unscaled.write (gdb::make_array_view (tmpbuf, sizeof (tmpbuf)),
9192 byte_order, type->is_unsigned ());
9193 }
9194 else
9195 {
9196 LONGEST val = unpack_long (type, valbuf);
9197 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
9198 }
9199 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
9200 }
9201 else
9202 {
9203 /* Integral values greater than one word are stored in consecutive
9204 registers starting with r0. This will always be a multiple of
9205 the register size. */
9206 int len = type->length ();
9207 int regno = ARM_A1_REGNUM;
9208
9209 while (len > 0)
9210 {
9211 regs->cooked_write (regno++, valbuf);
9212 len -= ARM_INT_REGISTER_SIZE;
9213 valbuf += ARM_INT_REGISTER_SIZE;
9214 }
9215 }
9216 }
9217 else
9218 {
9219 /* For a structure or union the behavior is as if the value had
9220 been stored to word-aligned memory and then loaded into
9221 registers with 32-bit load instruction(s). */
9222 int len = type->length ();
9223 int regno = ARM_A1_REGNUM;
9224 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
9225
9226 while (len > 0)
9227 {
9228 memcpy (tmpbuf, valbuf,
9229 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
9230 regs->cooked_write (regno++, tmpbuf);
9231 len -= ARM_INT_REGISTER_SIZE;
9232 valbuf += ARM_INT_REGISTER_SIZE;
9233 }
9234 }
9235 }
9236
9237
9238 /* Handle function return values. */
9239
9240 static enum return_value_convention
9241 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9242 struct type *valtype, struct regcache *regcache,
9243 struct value **read_value, const gdb_byte *writebuf)
9244 {
9245 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9246 struct type *func_type = function ? function->type () : NULL;
9247 enum arm_vfp_cprc_base_type vfp_base_type;
9248 int vfp_base_count;
9249
9250 if (arm_vfp_abi_for_function (gdbarch, func_type)
9251 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9252 {
9253 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9254 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9255 int i;
9256
9257 gdb_byte *readbuf = nullptr;
9258 if (read_value != nullptr)
9259 {
9260 *read_value = value::allocate (valtype);
9261 readbuf = (*read_value)->contents_raw ().data ();
9262 }
9263
9264 for (i = 0; i < vfp_base_count; i++)
9265 {
9266 if (reg_char == 'q')
9267 {
9268 if (writebuf)
9269 arm_neon_quad_write (gdbarch, regcache, i,
9270 writebuf + i * unit_length);
9271
9272 if (readbuf)
9273 arm_neon_quad_read (gdbarch, regcache, i,
9274 readbuf + i * unit_length);
9275 }
9276 else
9277 {
9278 char name_buf[4];
9279 int regnum;
9280
9281 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9282 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9283 strlen (name_buf));
9284 if (writebuf)
9285 regcache->cooked_write (regnum, writebuf + i * unit_length);
9286 if (readbuf)
9287 regcache->cooked_read (regnum, readbuf + i * unit_length);
9288 }
9289 }
9290 return RETURN_VALUE_REGISTER_CONVENTION;
9291 }
9292
9293 if (valtype->code () == TYPE_CODE_STRUCT
9294 || valtype->code () == TYPE_CODE_UNION
9295 || valtype->code () == TYPE_CODE_ARRAY)
9296 {
9297 /* From the AAPCS document:
9298
9299 Result return:
9300
9301 A Composite Type larger than 4 bytes, or whose size cannot be
9302 determined statically by both caller and callee, is stored in memory
9303 at an address passed as an extra argument when the function was
9304 called (Parameter Passing, rule A.4). The memory to be used for the
9305 result may be modified at any point during the function call.
9306
9307 Parameter Passing:
9308
9309 A.4: If the subroutine is a function that returns a result in memory,
9310 then the address for the result is placed in r0 and the NCRN is set
9311 to r1. */
9312 if (tdep->struct_return == pcc_struct_return
9313 || arm_return_in_memory (gdbarch, valtype))
9314 {
9315 if (read_value != nullptr)
9316 {
9317 CORE_ADDR addr;
9318
9319 regcache->cooked_read (ARM_A1_REGNUM, &addr);
9320 *read_value = value_at_non_lval (valtype, addr);
9321 }
9322 return RETURN_VALUE_ABI_RETURNS_ADDRESS;
9323 }
9324 }
9325 else if (valtype->code () == TYPE_CODE_COMPLEX)
9326 {
9327 if (arm_return_in_memory (gdbarch, valtype))
9328 return RETURN_VALUE_STRUCT_CONVENTION;
9329 }
9330
9331 if (writebuf)
9332 arm_store_return_value (valtype, regcache, writebuf);
9333
9334 if (read_value != nullptr)
9335 {
9336 *read_value = value::allocate (valtype);
9337 gdb_byte *readbuf = (*read_value)->contents_raw ().data ();
9338 arm_extract_return_value (valtype, regcache, readbuf);
9339 }
9340
9341 return RETURN_VALUE_REGISTER_CONVENTION;
9342 }
9343
9344
9345 static int
9346 arm_get_longjmp_target (const frame_info_ptr &frame, CORE_ADDR *pc)
9347 {
9348 struct gdbarch *gdbarch = get_frame_arch (frame);
9349 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9350 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9351 CORE_ADDR jb_addr;
9352 gdb_byte buf[ARM_INT_REGISTER_SIZE];
9353
9354 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9355
9356 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9357 ARM_INT_REGISTER_SIZE))
9358 return 0;
9359
9360 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
9361 return 1;
9362 }
9363 /* A call to cmse secure entry function "foo" at "a" is modified by
9364 GNU ld as "b".
9365 a) bl xxxx <foo>
9366
9367 <foo>
9368 xxxx:
9369
9370 b) bl yyyy <__acle_se_foo>
9371
9372 section .gnu.sgstubs:
9373 <foo>
9374 yyyy: sg // secure gateway
9375 b.w xxxx <__acle_se_foo> // original_branch_dest
9376
9377 <__acle_se_foo>
9378 xxxx:
9379
9380 When the control at "b", the pc contains "yyyy" (sg address) which is a
9381 trampoline and does not exist in source code. This function returns the
9382 target pc "xxxx". For more details please refer to section 5.4
9383 (Entry functions) and section 3.4.4 (C level development flow of secure code)
9384 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
9385 document on www.developer.arm.com. */
9386
9387 static CORE_ADDR
9388 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
9389 {
9390 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
9391 char *target_name = (char *) alloca (target_len);
9392 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
9393
9394 bound_minimal_symbol minsym
9395 = lookup_minimal_symbol (current_program_space, target_name, objfile);
9396 if (minsym.minsym != nullptr)
9397 return minsym.value_address ();
9398
9399 return 0;
9400 }
9401
9402 /* Return true when SEC points to ".gnu.sgstubs" section. */
9403
9404 static bool
9405 arm_is_sgstubs_section (struct obj_section *sec)
9406 {
9407 return (sec != nullptr
9408 && sec->the_bfd_section != nullptr
9409 && sec->the_bfd_section->name != nullptr
9410 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
9411 }
9412
9413 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9414 return the target PC. Otherwise return 0. */
9415
9416 CORE_ADDR
9417 arm_skip_stub (const frame_info_ptr &frame, CORE_ADDR pc)
9418 {
9419 const char *name;
9420 int namelen;
9421 CORE_ADDR start_addr;
9422
9423 /* Find the starting address and name of the function containing the PC. */
9424 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9425 {
9426 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9427 check here. */
9428 start_addr = arm_skip_bx_reg (frame, pc);
9429 if (start_addr != 0)
9430 return start_addr;
9431
9432 return 0;
9433 }
9434
9435 /* If PC is in a Thumb call or return stub, return the address of the
9436 target PC, which is in a register. The thunk functions are called
9437 _call_via_xx, where x is the register name. The possible names
9438 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9439 functions, named __ARM_call_via_r[0-7]. */
9440 if (startswith (name, "_call_via_")
9441 || startswith (name, "__ARM_call_via_"))
9442 {
9443 /* Use the name suffix to determine which register contains the
9444 target PC. */
9445 static const char *table[15] =
9446 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9447 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9448 };
9449 int regno;
9450 int offset = strlen (name) - 2;
9451
9452 for (regno = 0; regno <= 14; regno++)
9453 if (strcmp (&name[offset], table[regno]) == 0)
9454 return get_frame_register_unsigned (frame, regno);
9455 }
9456
9457 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9458 non-interworking calls to foo. We could decode the stubs
9459 to find the target but it's easier to use the symbol table. */
9460 namelen = strlen (name);
9461 if (name[0] == '_' && name[1] == '_'
9462 && ((namelen > 2 + strlen ("_from_thumb")
9463 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
9464 || (namelen > 2 + strlen ("_from_arm")
9465 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
9466 {
9467 char *target_name;
9468 int target_len = namelen - 2;
9469 struct objfile *objfile;
9470 struct obj_section *sec;
9471
9472 if (name[namelen - 1] == 'b')
9473 target_len -= strlen ("_from_thumb");
9474 else
9475 target_len -= strlen ("_from_arm");
9476
9477 target_name = (char *) alloca (target_len + 1);
9478 memcpy (target_name, name + 2, target_len);
9479 target_name[target_len] = '\0';
9480
9481 sec = find_pc_section (pc);
9482 objfile = (sec == NULL) ? NULL : sec->objfile;
9483 bound_minimal_symbol minsym
9484 = lookup_minimal_symbol (current_program_space, target_name, objfile);
9485 if (minsym.minsym != NULL)
9486 return minsym.value_address ();
9487 else
9488 return 0;
9489 }
9490
9491 struct obj_section *section = find_pc_section (pc);
9492
9493 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
9494 if (arm_is_sgstubs_section (section))
9495 return arm_skip_cmse_entry (pc, name, section->objfile);
9496
9497 return 0; /* not a stub */
9498 }
9499
9500 static void
9501 arm_update_current_architecture (void)
9502 {
9503 /* If the current architecture is not ARM, we have nothing to do. */
9504 gdbarch *arch = current_inferior ()->arch ();
9505 if (gdbarch_bfd_arch_info (arch)->arch != bfd_arch_arm)
9506 return;
9507
9508 /* Update the architecture. */
9509 gdbarch_info info;
9510 if (!gdbarch_update_p (current_inferior (), info))
9511 internal_error (_("could not update architecture"));
9512 }
9513
9514 static void
9515 set_fp_model_sfunc (const char *args, int from_tty,
9516 struct cmd_list_element *c)
9517 {
9518 int fp_model;
9519
9520 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9521 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9522 {
9523 arm_fp_model = (enum arm_float_model) fp_model;
9524 break;
9525 }
9526
9527 if (fp_model == ARM_FLOAT_LAST)
9528 internal_error (_("Invalid fp model accepted: %s."),
9529 current_fp_model);
9530
9531 arm_update_current_architecture ();
9532 }
9533
9534 static void
9535 show_fp_model (struct ui_file *file, int from_tty,
9536 struct cmd_list_element *c, const char *value)
9537 {
9538 gdbarch *arch = current_inferior ()->arch ();
9539 if (arm_fp_model == ARM_FLOAT_AUTO
9540 && gdbarch_bfd_arch_info (arch)->arch == bfd_arch_arm)
9541 {
9542 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (arch);
9543
9544 gdb_printf (file, _("\
9545 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9546 fp_model_strings[tdep->fp_model]);
9547 }
9548 else
9549 gdb_printf (file, _("\
9550 The current ARM floating point model is \"%s\".\n"),
9551 fp_model_strings[arm_fp_model]);
9552 }
9553
9554 static void
9555 arm_set_abi (const char *args, int from_tty,
9556 struct cmd_list_element *c)
9557 {
9558 int arm_abi;
9559
9560 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9561 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9562 {
9563 arm_abi_global = (enum arm_abi_kind) arm_abi;
9564 break;
9565 }
9566
9567 if (arm_abi == ARM_ABI_LAST)
9568 internal_error (_("Invalid ABI accepted: %s."),
9569 arm_abi_string);
9570
9571 arm_update_current_architecture ();
9572 }
9573
9574 static void
9575 arm_show_abi (struct ui_file *file, int from_tty,
9576 struct cmd_list_element *c, const char *value)
9577 {
9578 gdbarch *arch = current_inferior ()->arch ();
9579 if (arm_abi_global == ARM_ABI_AUTO
9580 && gdbarch_bfd_arch_info (arch)->arch == bfd_arch_arm)
9581 {
9582 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (arch);
9583
9584 gdb_printf (file, _("\
9585 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9586 arm_abi_strings[tdep->arm_abi]);
9587 }
9588 else
9589 gdb_printf (file, _("The current ARM ABI is \"%s\".\n"),
9590 arm_abi_string);
9591 }
9592
9593 static void
9594 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9595 struct cmd_list_element *c, const char *value)
9596 {
9597 gdb_printf (file,
9598 _("The current execution mode assumed "
9599 "(when symbols are unavailable) is \"%s\".\n"),
9600 arm_fallback_mode_string);
9601 }
9602
9603 static void
9604 arm_show_force_mode (struct ui_file *file, int from_tty,
9605 struct cmd_list_element *c, const char *value)
9606 {
9607 gdb_printf (file,
9608 _("The current execution mode assumed "
9609 "(even when symbols are available) is \"%s\".\n"),
9610 arm_force_mode_string);
9611 }
9612
9613 static void
9614 arm_show_unwind_secure_frames (struct ui_file *file, int from_tty,
9615 struct cmd_list_element *c, const char *value)
9616 {
9617 gdb_printf (file,
9618 _("Usage of non-secure to secure exception stack unwinding is %s.\n"),
9619 arm_unwind_secure_frames ? "on" : "off");
9620 }
9621
9622 /* If the user changes the register disassembly style used for info
9623 register and other commands, we have to also switch the style used
9624 in opcodes for disassembly output. This function is run in the "set
9625 arm disassembly" command, and does that. */
9626
9627 static void
9628 set_disassembly_style_sfunc (const char *args, int from_tty,
9629 struct cmd_list_element *c)
9630 {
9631 /* Convert the short style name into the long style name (eg, reg-names-*)
9632 before calling the generic set_disassembler_options() function. */
9633 std::string long_name = std::string ("reg-names-") + disassembly_style;
9634 set_disassembler_options (&long_name[0]);
9635 }
9636
9637 static void
9638 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
9639 struct cmd_list_element *c, const char *value)
9640 {
9641 struct gdbarch *gdbarch = get_current_arch ();
9642 const char *options = get_disassembler_options (gdbarch);
9643 const char *style = "";
9644 int len = 0;
9645 const char *opt;
9646
9647 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
9648 if (startswith (opt, "reg-names-"))
9649 {
9650 style = &opt[strlen ("reg-names-")];
9651 len = strcspn (style, ",");
9652 }
9653
9654 gdb_printf (file, "The disassembly style is \"%.*s\".\n", len, style);
9655 }
9656 \f
9657 /* Return the ARM register name corresponding to register I. */
9658 static const char *
9659 arm_register_name (struct gdbarch *gdbarch, int i)
9660 {
9661 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9662
9663 if (is_s_pseudo (gdbarch, i))
9664 {
9665 static const char *const s_pseudo_names[] = {
9666 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9667 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9668 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9669 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9670 };
9671
9672 return s_pseudo_names[i - tdep->s_pseudo_base];
9673 }
9674
9675 if (is_q_pseudo (gdbarch, i))
9676 {
9677 static const char *const q_pseudo_names[] = {
9678 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9679 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9680 };
9681
9682 return q_pseudo_names[i - tdep->q_pseudo_base];
9683 }
9684
9685 if (is_mve_pseudo (gdbarch, i))
9686 return "p0";
9687
9688 /* RA_AUTH_CODE is used for unwinding only. Do not assign it a name. */
9689 if (is_pacbti_pseudo (gdbarch, i))
9690 return "";
9691
9692 if (i >= ARRAY_SIZE (arm_register_names))
9693 /* These registers are only supported on targets which supply
9694 an XML description. */
9695 return "";
9696
9697 /* Non-pseudo registers. */
9698 return arm_register_names[i];
9699 }
9700
9701 /* Test whether the coff symbol specific value corresponds to a Thumb
9702 function. */
9703
9704 static int
9705 coff_sym_is_thumb (int val)
9706 {
9707 return (val == C_THUMBEXT
9708 || val == C_THUMBSTAT
9709 || val == C_THUMBEXTFUNC
9710 || val == C_THUMBSTATFUNC
9711 || val == C_THUMBLABEL);
9712 }
9713
9714 /* arm_coff_make_msymbol_special()
9715 arm_elf_make_msymbol_special()
9716
9717 These functions test whether the COFF or ELF symbol corresponds to
9718 an address in thumb code, and set a "special" bit in a minimal
9719 symbol to indicate that it does. */
9720
9721 static void
9722 arm_elf_make_msymbol_special (const asymbol *sym, struct minimal_symbol *msym)
9723 {
9724 const elf_symbol_type *elfsym = (const elf_symbol_type *) sym;
9725
9726 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
9727 == ST_BRANCH_TO_THUMB)
9728 MSYMBOL_SET_SPECIAL (msym);
9729 }
9730
9731 static void
9732 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9733 {
9734 if (coff_sym_is_thumb (val))
9735 MSYMBOL_SET_SPECIAL (msym);
9736 }
9737
9738 static void
9739 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9740 const asymbol *sym)
9741 {
9742 const char *name = bfd_asymbol_name (sym);
9743 struct arm_per_bfd *data;
9744 struct arm_mapping_symbol new_map_sym;
9745
9746 gdb_assert (name[0] == '$');
9747 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9748 return;
9749
9750 data = arm_bfd_data_key.get (objfile->obfd.get ());
9751 if (data == NULL)
9752 data = arm_bfd_data_key.emplace (objfile->obfd.get (),
9753 objfile->obfd->section_count);
9754 arm_mapping_symbol_vec &map
9755 = data->section_maps[bfd_asymbol_section (sym)->index];
9756
9757 new_map_sym.value = sym->value;
9758 new_map_sym.type = name[1];
9759
9760 /* Insert at the end, the vector will be sorted on first use. */
9761 map.push_back (new_map_sym);
9762 }
9763
9764 static void
9765 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9766 {
9767 struct gdbarch *gdbarch = regcache->arch ();
9768 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9769
9770 /* If necessary, set the T bit. */
9771 if (arm_apcs_32)
9772 {
9773 ULONGEST val, t_bit;
9774 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9775 t_bit = arm_psr_thumb_bit (gdbarch);
9776 if (arm_pc_is_thumb (gdbarch, pc))
9777 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9778 val | t_bit);
9779 else
9780 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9781 val & ~t_bit);
9782 }
9783 }
9784
9785 /* Read the contents of a NEON quad register, by reading from two
9786 double registers. This is used to implement the quad pseudo
9787 registers, and for argument passing in case the quad registers are
9788 missing; vectors are passed in quad registers when using the VFP
9789 ABI, even if a NEON unit is not present. REGNUM is the index of
9790 the quad register, in [0, 15]. */
9791
9792 static enum register_status
9793 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9794 int regnum, gdb_byte *buf)
9795 {
9796 char name_buf[4];
9797 gdb_byte reg_buf[8];
9798 int double_regnum;
9799 enum register_status status;
9800
9801 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9802 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9803 strlen (name_buf));
9804
9805 status = regcache->raw_read (double_regnum, reg_buf);
9806 if (status != REG_VALID)
9807 return status;
9808 memcpy (buf, reg_buf, 8);
9809
9810 status = regcache->raw_read (double_regnum + 1, reg_buf);
9811 if (status != REG_VALID)
9812 return status;
9813 memcpy (buf + 8, reg_buf, 8);
9814
9815 return REG_VALID;
9816 }
9817
9818 /* Read the contents of a NEON quad register, by reading from two double
9819 registers, and return it as a value. QUAD_REG_INDEX is the index of the quad
9820 register, in [0, 15]. */
9821
9822 static value *
9823 arm_neon_quad_read_value (gdbarch *gdbarch, const frame_info_ptr &next_frame,
9824 int pseudo_reg_num, int quad_reg_index)
9825 {
9826 std::string raw_reg_name = string_printf ("d%d", quad_reg_index << 1);
9827 int double_regnum
9828 = user_reg_map_name_to_regnum (gdbarch, raw_reg_name.c_str (),
9829 raw_reg_name.length ());
9830
9831 return pseudo_from_concat_raw (next_frame, pseudo_reg_num, double_regnum,
9832 double_regnum + 1);
9833 }
9834
9835 /* Read the contents of the MVE pseudo register REGNUM and return it as a
9836 value. */
9837 static value *
9838 arm_mve_pseudo_read_value (gdbarch *gdbarch, const frame_info_ptr &next_frame,
9839 int pseudo_reg_num)
9840 {
9841 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9842
9843 /* P0 is the first 16 bits of VPR. */
9844 return pseudo_from_raw_part (next_frame, pseudo_reg_num,
9845 tdep->mve_vpr_regnum, 0);
9846 }
9847
9848 static value *
9849 arm_pseudo_read_value (gdbarch *gdbarch, const frame_info_ptr &next_frame,
9850 const int pseudo_reg_num)
9851 {
9852 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9853
9854 gdb_assert (pseudo_reg_num >= gdbarch_num_regs (gdbarch));
9855
9856 if (is_q_pseudo (gdbarch, pseudo_reg_num))
9857 {
9858 /* Quad-precision register. */
9859 return arm_neon_quad_read_value (gdbarch, next_frame, pseudo_reg_num,
9860 pseudo_reg_num - tdep->q_pseudo_base);
9861 }
9862 else if (is_mve_pseudo (gdbarch, pseudo_reg_num))
9863 return arm_mve_pseudo_read_value (gdbarch, next_frame, pseudo_reg_num);
9864 else
9865 {
9866 int s_reg_index = pseudo_reg_num - tdep->s_pseudo_base;
9867
9868 /* Single-precision register. */
9869 gdb_assert (s_reg_index < 32);
9870
9871 /* s0 is always the least significant half of d0. */
9872 int offset;
9873 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9874 offset = (s_reg_index & 1) ? 0 : 4;
9875 else
9876 offset = (s_reg_index & 1) ? 4 : 0;
9877
9878 std::string raw_reg_name = string_printf ("d%d", s_reg_index >> 1);
9879 int double_regnum
9880 = user_reg_map_name_to_regnum (gdbarch, raw_reg_name.c_str (),
9881 raw_reg_name.length ());
9882
9883 return pseudo_from_raw_part (next_frame, pseudo_reg_num, double_regnum,
9884 offset);
9885 }
9886 }
9887
9888 /* Store the contents of BUF to a NEON quad register, by writing to
9889 two double registers. This is used to implement the quad pseudo
9890 registers, and for argument passing in case the quad registers are
9891 missing; vectors are passed in quad registers when using the VFP
9892 ABI, even if a NEON unit is not present. REGNUM is the index
9893 of the quad register, in [0, 15]. */
9894
9895 static void
9896 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9897 int regnum, const gdb_byte *buf)
9898 {
9899 char name_buf[4];
9900 int double_regnum;
9901
9902 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9903 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9904 strlen (name_buf));
9905
9906 regcache->raw_write (double_regnum, buf);
9907 regcache->raw_write (double_regnum + 1, buf + 8);
9908 }
9909
9910 static void
9911 arm_neon_quad_write (gdbarch *gdbarch, const frame_info_ptr &next_frame,
9912 int quad_reg_index, gdb::array_view<const gdb_byte> buf)
9913 {
9914 std::string raw_reg_name = string_printf ("d%d", quad_reg_index << 1);
9915 int double_regnum
9916 = user_reg_map_name_to_regnum (gdbarch, raw_reg_name.data (),
9917 raw_reg_name.length ());
9918
9919 pseudo_to_concat_raw (next_frame, buf, double_regnum, double_regnum + 1);
9920 }
9921
9922 /* Store the contents of BUF to the MVE pseudo register REGNUM. */
9923
9924 static void
9925 arm_mve_pseudo_write (gdbarch *gdbarch, const frame_info_ptr &next_frame,
9926 int pseudo_reg_num, gdb::array_view<const gdb_byte> buf)
9927 {
9928 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9929
9930 /* P0 is the first 16 bits of VPR. */
9931 pseudo_to_raw_part(next_frame, buf, tdep->mve_vpr_regnum, 0);
9932 }
9933
9934 static void
9935 arm_pseudo_write (gdbarch *gdbarch, const frame_info_ptr &next_frame,
9936 const int pseudo_reg_num,
9937 gdb::array_view<const gdb_byte> buf)
9938 {
9939 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9940
9941 gdb_assert (pseudo_reg_num >= gdbarch_num_regs (gdbarch));
9942
9943 if (is_q_pseudo (gdbarch, pseudo_reg_num))
9944 {
9945 /* Quad-precision register. */
9946 arm_neon_quad_write (gdbarch, next_frame,
9947 pseudo_reg_num - tdep->q_pseudo_base, buf);
9948 }
9949 else if (is_mve_pseudo (gdbarch, pseudo_reg_num))
9950 arm_mve_pseudo_write (gdbarch, next_frame, pseudo_reg_num, buf);
9951 else
9952 {
9953 int s_reg_index = pseudo_reg_num - tdep->s_pseudo_base;
9954
9955 /* Single-precision register. */
9956 gdb_assert (s_reg_index < 32);
9957
9958 /* s0 is always the least significant half of d0. */
9959 int offset;
9960 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9961 offset = (s_reg_index & 1) ? 0 : 4;
9962 else
9963 offset = (s_reg_index & 1) ? 4 : 0;
9964
9965 std::string raw_reg_name = string_printf ("d%d", s_reg_index >> 1);
9966 int double_regnum
9967 = user_reg_map_name_to_regnum (gdbarch, raw_reg_name.c_str (),
9968 raw_reg_name.length ());
9969
9970 pseudo_to_raw_part (next_frame, buf, double_regnum, offset);
9971 }
9972 }
9973
9974 static struct value *
9975 value_of_arm_user_reg (const frame_info_ptr &frame, const void *baton)
9976 {
9977 const int *reg_p = (const int *) baton;
9978 return value_of_register (*reg_p, get_next_frame_sentinel_okay (frame));
9979 }
9980
9981 static enum gdb_osabi
9982 arm_elf_osabi_sniffer (bfd *abfd)
9983 {
9984 unsigned int elfosabi;
9985 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9986
9987 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9988
9989 if (elfosabi == ELFOSABI_ARM)
9990 /* GNU tools use this value. Check note sections in this case,
9991 as well. */
9992 {
9993 for (asection *sect : gdb_bfd_sections (abfd))
9994 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi);
9995 }
9996
9997 /* Anything else will be handled by the generic ELF sniffer. */
9998 return osabi;
9999 }
10000
10001 static int
10002 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
10003 const struct reggroup *group)
10004 {
10005 /* FPS register's type is INT, but belongs to float_reggroup. Beside
10006 this, FPS register belongs to save_regroup, restore_reggroup, and
10007 all_reggroup, of course. */
10008 if (regnum == ARM_FPS_REGNUM)
10009 return (group == float_reggroup
10010 || group == save_reggroup
10011 || group == restore_reggroup
10012 || group == all_reggroup);
10013 else
10014 return default_register_reggroup_p (gdbarch, regnum, group);
10015 }
10016
10017 /* For backward-compatibility we allow two 'g' packet lengths with
10018 the remote protocol depending on whether FPA registers are
10019 supplied. M-profile targets do not have FPA registers, but some
10020 stubs already exist in the wild which use a 'g' packet which
10021 supplies them albeit with dummy values. The packet format which
10022 includes FPA registers should be considered deprecated for
10023 M-profile targets. */
10024
10025 static void
10026 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
10027 {
10028 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10029
10030 if (tdep->is_m)
10031 {
10032 const target_desc *tdesc;
10033
10034 /* If we know from the executable this is an M-profile target,
10035 cater for remote targets whose register set layout is the
10036 same as the FPA layout. */
10037 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
10038 register_remote_g_packet_guess (gdbarch,
10039 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
10040 tdesc);
10041
10042 /* The regular M-profile layout. */
10043 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
10044 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
10045 tdesc);
10046
10047 /* M-profile plus M4F VFP. */
10048 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
10049 register_remote_g_packet_guess (gdbarch,
10050 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
10051 tdesc);
10052 /* M-profile plus MVE. */
10053 tdesc = arm_read_mprofile_description (ARM_M_TYPE_MVE);
10054 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE
10055 + ARM_VFP2_REGS_SIZE
10056 + ARM_INT_REGISTER_SIZE, tdesc);
10057
10058 /* M-profile system (stack pointers). */
10059 tdesc = arm_read_mprofile_description (ARM_M_TYPE_SYSTEM);
10060 register_remote_g_packet_guess (gdbarch, 2 * ARM_INT_REGISTER_SIZE, tdesc);
10061 }
10062
10063 /* Otherwise we don't have a useful guess. */
10064 }
10065
10066 /* Implement the code_of_frame_writable gdbarch method. */
10067
10068 static int
10069 arm_code_of_frame_writable (struct gdbarch *gdbarch, const frame_info_ptr &frame)
10070 {
10071 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10072
10073 if (tdep->is_m && get_frame_type (frame) == SIGTRAMP_FRAME)
10074 {
10075 /* M-profile exception frames return to some magic PCs, where
10076 isn't writable at all. */
10077 return 0;
10078 }
10079 else
10080 return 1;
10081 }
10082
10083 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
10084 to be postfixed by a version (eg armv7hl). */
10085
10086 static const char *
10087 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
10088 {
10089 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
10090 return "arm(v[^- ]*)?";
10091 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
10092 }
10093
10094 /* Implement the "get_pc_address_flags" gdbarch method. */
10095
10096 static std::string
10097 arm_get_pc_address_flags (const frame_info_ptr &frame, CORE_ADDR pc)
10098 {
10099 if (get_frame_pc_masked (frame))
10100 return "PAC";
10101
10102 return "";
10103 }
10104
10105 /* Initialize the current architecture based on INFO. If possible,
10106 reuse an architecture from ARCHES, which is a list of
10107 architectures already created during this debugging session.
10108
10109 Called e.g. at program startup, when reading a core file, and when
10110 reading a binary file. */
10111
10112 static struct gdbarch *
10113 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
10114 {
10115 struct gdbarch_list *best_arch;
10116 enum arm_abi_kind arm_abi = arm_abi_global;
10117 enum arm_float_model fp_model = arm_fp_model;
10118 tdesc_arch_data_up tdesc_data;
10119 int i;
10120 bool is_m = false;
10121 bool have_sec_ext = false;
10122 int vfp_register_count = 0;
10123 bool have_s_pseudos = false, have_q_pseudos = false;
10124 bool have_wmmx_registers = false;
10125 bool have_neon = false;
10126 bool have_fpa_registers = true;
10127 const struct target_desc *tdesc = info.target_desc;
10128 bool have_vfp = false;
10129 bool have_mve = false;
10130 bool have_pacbti = false;
10131 int mve_vpr_regnum = -1;
10132 int register_count = ARM_NUM_REGS;
10133 bool have_m_profile_msp = false;
10134 int m_profile_msp_regnum = -1;
10135 int m_profile_psp_regnum = -1;
10136 int m_profile_msp_ns_regnum = -1;
10137 int m_profile_psp_ns_regnum = -1;
10138 int m_profile_msp_s_regnum = -1;
10139 int m_profile_psp_s_regnum = -1;
10140 int tls_regnum = 0;
10141
10142 /* If we have an object to base this architecture on, try to determine
10143 its ABI. */
10144
10145 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
10146 {
10147 int ei_osabi, e_flags;
10148
10149 switch (bfd_get_flavour (info.abfd))
10150 {
10151 case bfd_target_coff_flavour:
10152 /* Assume it's an old APCS-style ABI. */
10153 /* XXX WinCE? */
10154 arm_abi = ARM_ABI_APCS;
10155 break;
10156
10157 case bfd_target_elf_flavour:
10158 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
10159 e_flags = elf_elfheader (info.abfd)->e_flags;
10160
10161 if (ei_osabi == ELFOSABI_ARM)
10162 {
10163 /* GNU tools used to use this value, but do not for EABI
10164 objects. There's nowhere to tag an EABI version
10165 anyway, so assume APCS. */
10166 arm_abi = ARM_ABI_APCS;
10167 }
10168 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
10169 {
10170 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
10171
10172 switch (eabi_ver)
10173 {
10174 case EF_ARM_EABI_UNKNOWN:
10175 /* Assume GNU tools. */
10176 arm_abi = ARM_ABI_APCS;
10177 break;
10178
10179 case EF_ARM_EABI_VER4:
10180 case EF_ARM_EABI_VER5:
10181 arm_abi = ARM_ABI_AAPCS;
10182 /* EABI binaries default to VFP float ordering.
10183 They may also contain build attributes that can
10184 be used to identify if the VFP argument-passing
10185 ABI is in use. */
10186 if (fp_model == ARM_FLOAT_AUTO)
10187 {
10188 #ifdef HAVE_ELF
10189 switch (bfd_elf_get_obj_attr_int (info.abfd,
10190 OBJ_ATTR_PROC,
10191 Tag_ABI_VFP_args))
10192 {
10193 case AEABI_VFP_args_base:
10194 /* "The user intended FP parameter/result
10195 passing to conform to AAPCS, base
10196 variant". */
10197 fp_model = ARM_FLOAT_SOFT_VFP;
10198 break;
10199 case AEABI_VFP_args_vfp:
10200 /* "The user intended FP parameter/result
10201 passing to conform to AAPCS, VFP
10202 variant". */
10203 fp_model = ARM_FLOAT_VFP;
10204 break;
10205 case AEABI_VFP_args_toolchain:
10206 /* "The user intended FP parameter/result
10207 passing to conform to tool chain-specific
10208 conventions" - we don't know any such
10209 conventions, so leave it as "auto". */
10210 break;
10211 case AEABI_VFP_args_compatible:
10212 /* "Code is compatible with both the base
10213 and VFP variants; the user did not permit
10214 non-variadic functions to pass FP
10215 parameters/results" - leave it as
10216 "auto". */
10217 break;
10218 default:
10219 /* Attribute value not mentioned in the
10220 November 2012 ABI, so leave it as
10221 "auto". */
10222 break;
10223 }
10224 #else
10225 fp_model = ARM_FLOAT_SOFT_VFP;
10226 #endif
10227 }
10228 break;
10229
10230 default:
10231 /* Leave it as "auto". */
10232 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10233 break;
10234 }
10235
10236 #ifdef HAVE_ELF
10237 /* Detect M-profile programs. This only works if the
10238 executable file includes build attributes; GCC does
10239 copy them to the executable, but e.g. RealView does
10240 not. */
10241 int attr_arch
10242 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10243 Tag_CPU_arch);
10244 int attr_profile
10245 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10246 Tag_CPU_arch_profile);
10247
10248 /* GCC specifies the profile for v6-M; RealView only
10249 specifies the profile for architectures starting with
10250 V7 (as opposed to architectures with a tag
10251 numerically greater than TAG_CPU_ARCH_V7). */
10252 if (!tdesc_has_registers (tdesc)
10253 && (attr_arch == TAG_CPU_ARCH_V6_M
10254 || attr_arch == TAG_CPU_ARCH_V6S_M
10255 || attr_arch == TAG_CPU_ARCH_V7E_M
10256 || attr_arch == TAG_CPU_ARCH_V8M_BASE
10257 || attr_arch == TAG_CPU_ARCH_V8M_MAIN
10258 || attr_arch == TAG_CPU_ARCH_V8_1M_MAIN
10259 || attr_profile == 'M'))
10260 is_m = true;
10261
10262 /* Look for attributes that indicate support for ARMv8.1-m
10263 PACBTI. */
10264 if (!tdesc_has_registers (tdesc) && is_m)
10265 {
10266 int attr_pac_extension
10267 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10268 Tag_PAC_extension);
10269
10270 int attr_bti_extension
10271 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10272 Tag_BTI_extension);
10273
10274 int attr_pacret_use
10275 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10276 Tag_PACRET_use);
10277
10278 int attr_bti_use
10279 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10280 Tag_BTI_use);
10281
10282 if (attr_pac_extension != 0 || attr_bti_extension != 0
10283 || attr_pacret_use != 0 || attr_bti_use != 0)
10284 have_pacbti = true;
10285 }
10286 #endif
10287 }
10288
10289 if (fp_model == ARM_FLOAT_AUTO)
10290 {
10291 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10292 {
10293 case 0:
10294 /* Leave it as "auto". Strictly speaking this case
10295 means FPA, but almost nobody uses that now, and
10296 many toolchains fail to set the appropriate bits
10297 for the floating-point model they use. */
10298 break;
10299 case EF_ARM_SOFT_FLOAT:
10300 fp_model = ARM_FLOAT_SOFT_FPA;
10301 break;
10302 case EF_ARM_VFP_FLOAT:
10303 fp_model = ARM_FLOAT_VFP;
10304 break;
10305 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10306 fp_model = ARM_FLOAT_SOFT_VFP;
10307 break;
10308 }
10309 }
10310
10311 if (e_flags & EF_ARM_BE8)
10312 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10313
10314 break;
10315
10316 default:
10317 /* Leave it as "auto". */
10318 break;
10319 }
10320 }
10321
10322 /* Check any target description for validity. */
10323 if (tdesc_has_registers (tdesc))
10324 {
10325 /* For most registers we require GDB's default names; but also allow
10326 the numeric names for sp / lr / pc, as a convenience. */
10327 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10328 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10329 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10330
10331 const struct tdesc_feature *feature;
10332 int valid_p;
10333
10334 feature = tdesc_find_feature (tdesc,
10335 "org.gnu.gdb.arm.core");
10336 if (feature == NULL)
10337 {
10338 feature = tdesc_find_feature (tdesc,
10339 "org.gnu.gdb.arm.m-profile");
10340 if (feature == NULL)
10341 return NULL;
10342 else
10343 is_m = true;
10344 }
10345
10346 tdesc_data = tdesc_data_alloc ();
10347
10348 valid_p = 1;
10349 for (i = 0; i < ARM_SP_REGNUM; i++)
10350 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10351 arm_register_names[i]);
10352 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10353 ARM_SP_REGNUM,
10354 arm_sp_names);
10355 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10356 ARM_LR_REGNUM,
10357 arm_lr_names);
10358 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10359 ARM_PC_REGNUM,
10360 arm_pc_names);
10361 if (is_m)
10362 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10363 ARM_PS_REGNUM, "xpsr");
10364 else
10365 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10366 ARM_PS_REGNUM, "cpsr");
10367
10368 if (!valid_p)
10369 return NULL;
10370
10371 if (is_m)
10372 {
10373 feature = tdesc_find_feature (tdesc,
10374 "org.gnu.gdb.arm.m-system");
10375 if (feature != nullptr)
10376 {
10377 /* MSP */
10378 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10379 register_count, "msp");
10380 if (!valid_p)
10381 {
10382 warning (_("M-profile m-system feature is missing required register msp."));
10383 return nullptr;
10384 }
10385 have_m_profile_msp = true;
10386 m_profile_msp_regnum = register_count++;
10387
10388 /* PSP */
10389 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10390 register_count, "psp");
10391 if (!valid_p)
10392 {
10393 warning (_("M-profile m-system feature is missing required register psp."));
10394 return nullptr;
10395 }
10396 m_profile_psp_regnum = register_count++;
10397 }
10398 }
10399
10400 feature = tdesc_find_feature (tdesc,
10401 "org.gnu.gdb.arm.fpa");
10402 if (feature != NULL)
10403 {
10404 valid_p = 1;
10405 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10406 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10407 arm_register_names[i]);
10408 if (!valid_p)
10409 return NULL;
10410 }
10411 else
10412 have_fpa_registers = false;
10413
10414 feature = tdesc_find_feature (tdesc,
10415 "org.gnu.gdb.xscale.iwmmxt");
10416 if (feature != NULL)
10417 {
10418 static const char *const iwmmxt_names[] = {
10419 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10420 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10421 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10422 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10423 };
10424
10425 valid_p = 1;
10426 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10427 valid_p
10428 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10429 iwmmxt_names[i - ARM_WR0_REGNUM]);
10430
10431 /* Check for the control registers, but do not fail if they
10432 are missing. */
10433 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10434 tdesc_numbered_register (feature, tdesc_data.get (), i,
10435 iwmmxt_names[i - ARM_WR0_REGNUM]);
10436
10437 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10438 valid_p
10439 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10440 iwmmxt_names[i - ARM_WR0_REGNUM]);
10441
10442 if (!valid_p)
10443 return NULL;
10444
10445 have_wmmx_registers = true;
10446 }
10447
10448 /* If we have a VFP unit, check whether the single precision registers
10449 are present. If not, then we will synthesize them as pseudo
10450 registers. */
10451 feature = tdesc_find_feature (tdesc,
10452 "org.gnu.gdb.arm.vfp");
10453 if (feature != NULL)
10454 {
10455 static const char *const vfp_double_names[] = {
10456 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10457 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10458 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10459 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10460 };
10461
10462 /* Require the double precision registers. There must be either
10463 16 or 32. */
10464 valid_p = 1;
10465 for (i = 0; i < 32; i++)
10466 {
10467 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10468 ARM_D0_REGNUM + i,
10469 vfp_double_names[i]);
10470 if (!valid_p)
10471 break;
10472 }
10473 if (!valid_p && i == 16)
10474 valid_p = 1;
10475
10476 /* Also require FPSCR. */
10477 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10478 ARM_FPSCR_REGNUM, "fpscr");
10479 if (!valid_p)
10480 return NULL;
10481
10482 have_vfp = true;
10483
10484 if (tdesc_unnumbered_register (feature, "s0") == 0)
10485 have_s_pseudos = true;
10486
10487 vfp_register_count = i;
10488
10489 /* If we have VFP, also check for NEON. The architecture allows
10490 NEON without VFP (integer vector operations only), but GDB
10491 does not support that. */
10492 feature = tdesc_find_feature (tdesc,
10493 "org.gnu.gdb.arm.neon");
10494 if (feature != NULL)
10495 {
10496 /* NEON requires 32 double-precision registers. */
10497 if (i != 32)
10498 return NULL;
10499
10500 /* If there are quad registers defined by the stub, use
10501 their type; otherwise (normally) provide them with
10502 the default type. */
10503 if (tdesc_unnumbered_register (feature, "q0") == 0)
10504 have_q_pseudos = true;
10505 }
10506 }
10507
10508 /* Check for the TLS register feature. */
10509 feature = tdesc_find_feature (tdesc, "org.gnu.gdb.arm.tls");
10510 if (feature != nullptr)
10511 {
10512 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10513 register_count, "tpidruro");
10514 if (!valid_p)
10515 return nullptr;
10516
10517 tls_regnum = register_count;
10518 register_count++;
10519 }
10520
10521 /* Check for MVE after all the checks for GPR's, VFP and Neon.
10522 MVE (Helium) is an M-profile extension. */
10523 if (is_m)
10524 {
10525 /* Do we have the MVE feature? */
10526 feature = tdesc_find_feature (tdesc,"org.gnu.gdb.arm.m-profile-mve");
10527
10528 if (feature != nullptr)
10529 {
10530 /* If we have MVE, we must always have the VPR register. */
10531 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10532 register_count, "vpr");
10533 if (!valid_p)
10534 {
10535 warning (_("MVE feature is missing required register vpr."));
10536 return nullptr;
10537 }
10538
10539 have_mve = true;
10540 mve_vpr_regnum = register_count;
10541 register_count++;
10542
10543 /* We can't have Q pseudo registers available here, as that
10544 would mean we have NEON features, and that is only available
10545 on A and R profiles. */
10546 gdb_assert (!have_q_pseudos);
10547
10548 /* Given we have a M-profile target description, if MVE is
10549 enabled and there are VFP registers, we should have Q
10550 pseudo registers (Q0 ~ Q7). */
10551 if (have_vfp)
10552 have_q_pseudos = true;
10553 }
10554
10555 /* Do we have the ARMv8.1-m PACBTI feature? */
10556 feature = tdesc_find_feature (tdesc,
10557 "org.gnu.gdb.arm.m-profile-pacbti");
10558 if (feature != nullptr)
10559 {
10560 /* By advertising this feature, the target acknowledges the
10561 presence of the ARMv8.1-m PACBTI extensions.
10562
10563 We don't care for any particular registers in this group, so
10564 the target is free to include whatever it deems appropriate.
10565
10566 The expectation is for this feature to include the PAC
10567 keys. */
10568 have_pacbti = true;
10569 }
10570
10571 /* Do we have the Security extension? */
10572 feature = tdesc_find_feature (tdesc,
10573 "org.gnu.gdb.arm.secext");
10574 if (feature != nullptr)
10575 {
10576 /* Secure/Non-secure stack pointers. */
10577 /* MSP_NS */
10578 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10579 register_count, "msp_ns");
10580 if (!valid_p)
10581 {
10582 warning (_("M-profile secext feature is missing required register msp_ns."));
10583 return nullptr;
10584 }
10585 m_profile_msp_ns_regnum = register_count++;
10586
10587 /* PSP_NS */
10588 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10589 register_count, "psp_ns");
10590 if (!valid_p)
10591 {
10592 warning (_("M-profile secext feature is missing required register psp_ns."));
10593 return nullptr;
10594 }
10595 m_profile_psp_ns_regnum = register_count++;
10596
10597 /* MSP_S */
10598 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10599 register_count, "msp_s");
10600 if (!valid_p)
10601 {
10602 warning (_("M-profile secext feature is missing required register msp_s."));
10603 return nullptr;
10604 }
10605 m_profile_msp_s_regnum = register_count++;
10606
10607 /* PSP_S */
10608 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10609 register_count, "psp_s");
10610 if (!valid_p)
10611 {
10612 warning (_("M-profile secext feature is missing required register psp_s."));
10613 return nullptr;
10614 }
10615 m_profile_psp_s_regnum = register_count++;
10616
10617 have_sec_ext = true;
10618 }
10619
10620 }
10621 }
10622
10623 /* If there is already a candidate, use it. */
10624 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10625 best_arch != NULL;
10626 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10627 {
10628 arm_gdbarch_tdep *tdep
10629 = gdbarch_tdep<arm_gdbarch_tdep> (best_arch->gdbarch);
10630
10631 if (arm_abi != ARM_ABI_AUTO && arm_abi != tdep->arm_abi)
10632 continue;
10633
10634 if (fp_model != ARM_FLOAT_AUTO && fp_model != tdep->fp_model)
10635 continue;
10636
10637 /* There are various other properties in tdep that we do not
10638 need to check here: those derived from a target description,
10639 since gdbarches with a different target description are
10640 automatically disqualified. */
10641
10642 /* Do check is_m, though, since it might come from the binary. */
10643 if (is_m != tdep->is_m)
10644 continue;
10645
10646 /* Also check for ARMv8.1-m PACBTI support, since it might come from
10647 the binary. */
10648 if (have_pacbti != tdep->have_pacbti)
10649 continue;
10650
10651 /* Found a match. */
10652 break;
10653 }
10654
10655 if (best_arch != NULL)
10656 return best_arch->gdbarch;
10657
10658 gdbarch *gdbarch
10659 = gdbarch_alloc (&info, gdbarch_tdep_up (new arm_gdbarch_tdep));
10660 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10661
10662 /* Record additional information about the architecture we are defining.
10663 These are gdbarch discriminators, like the OSABI. */
10664 tdep->arm_abi = arm_abi;
10665 tdep->fp_model = fp_model;
10666 tdep->is_m = is_m;
10667 tdep->have_sec_ext = have_sec_ext;
10668 tdep->have_fpa_registers = have_fpa_registers;
10669 tdep->have_wmmx_registers = have_wmmx_registers;
10670 gdb_assert (vfp_register_count == 0
10671 || vfp_register_count == 16
10672 || vfp_register_count == 32);
10673 tdep->vfp_register_count = vfp_register_count;
10674 tdep->have_s_pseudos = have_s_pseudos;
10675 tdep->have_q_pseudos = have_q_pseudos;
10676 tdep->have_neon = have_neon;
10677 tdep->tls_regnum = tls_regnum;
10678
10679 /* Adjust the MVE feature settings. */
10680 if (have_mve)
10681 {
10682 tdep->have_mve = true;
10683 tdep->mve_vpr_regnum = mve_vpr_regnum;
10684 }
10685
10686 /* Adjust the PACBTI feature settings. */
10687 tdep->have_pacbti = have_pacbti;
10688
10689 /* Adjust the M-profile stack pointers settings. */
10690 if (have_m_profile_msp)
10691 {
10692 tdep->m_profile_msp_regnum = m_profile_msp_regnum;
10693 tdep->m_profile_psp_regnum = m_profile_psp_regnum;
10694 tdep->m_profile_msp_ns_regnum = m_profile_msp_ns_regnum;
10695 tdep->m_profile_psp_ns_regnum = m_profile_psp_ns_regnum;
10696 tdep->m_profile_msp_s_regnum = m_profile_msp_s_regnum;
10697 tdep->m_profile_psp_s_regnum = m_profile_psp_s_regnum;
10698 }
10699
10700 arm_register_g_packet_guesses (gdbarch);
10701
10702 /* Breakpoints. */
10703 switch (info.byte_order_for_code)
10704 {
10705 case BFD_ENDIAN_BIG:
10706 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10707 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10708 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10709 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10710
10711 break;
10712
10713 case BFD_ENDIAN_LITTLE:
10714 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10715 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10716 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10717 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10718
10719 break;
10720
10721 default:
10722 internal_error (_("arm_gdbarch_init: bad byte order for float format"));
10723 }
10724
10725 /* On ARM targets char defaults to unsigned. */
10726 set_gdbarch_char_signed (gdbarch, 0);
10727
10728 /* wchar_t is unsigned under the AAPCS. */
10729 if (tdep->arm_abi == ARM_ABI_AAPCS)
10730 set_gdbarch_wchar_signed (gdbarch, 0);
10731 else
10732 set_gdbarch_wchar_signed (gdbarch, 1);
10733
10734 /* Compute type alignment. */
10735 set_gdbarch_type_align (gdbarch, arm_type_align);
10736
10737 /* Note: for displaced stepping, this includes the breakpoint, and one word
10738 of additional scratch space. This setting isn't used for anything beside
10739 displaced stepping at present. */
10740 set_gdbarch_displaced_step_buffer_length
10741 (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
10742 set_gdbarch_max_insn_length (gdbarch, 4);
10743
10744 /* This should be low enough for everything. */
10745 tdep->lowest_pc = 0x20;
10746 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10747
10748 /* The default, for both APCS and AAPCS, is to return small
10749 structures in registers. */
10750 tdep->struct_return = reg_struct_return;
10751
10752 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10753 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10754
10755 if (is_m)
10756 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
10757
10758 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10759
10760 frame_base_set_default (gdbarch, &arm_normal_base);
10761
10762 /* Address manipulation. */
10763 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10764
10765 /* Advance PC across function entry code. */
10766 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10767
10768 /* Detect whether PC is at a point where the stack has been destroyed. */
10769 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
10770
10771 /* Skip trampolines. */
10772 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10773
10774 /* The stack grows downward. */
10775 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10776
10777 /* Breakpoint manipulation. */
10778 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
10779 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
10780 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
10781 arm_breakpoint_kind_from_current_state);
10782
10783 /* Information about registers, etc. */
10784 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10785 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10786 set_gdbarch_num_regs (gdbarch, register_count);
10787 set_gdbarch_register_type (gdbarch, arm_register_type);
10788 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10789
10790 /* This "info float" is FPA-specific. Use the generic version if we
10791 do not have FPA. */
10792 if (tdep->have_fpa_registers)
10793 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10794
10795 /* Internal <-> external register number maps. */
10796 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10797 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10798
10799 set_gdbarch_register_name (gdbarch, arm_register_name);
10800
10801 /* Returning results. */
10802 set_gdbarch_return_value_as_value (gdbarch, arm_return_value);
10803
10804 /* Disassembly. */
10805 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10806
10807 /* Minsymbol frobbing. */
10808 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10809 set_gdbarch_coff_make_msymbol_special (gdbarch,
10810 arm_coff_make_msymbol_special);
10811 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10812
10813 /* Thumb-2 IT block support. */
10814 set_gdbarch_adjust_breakpoint_address (gdbarch,
10815 arm_adjust_breakpoint_address);
10816
10817 /* Virtual tables. */
10818 set_gdbarch_vbit_in_delta (gdbarch, 1);
10819
10820 /* Hook in the ABI-specific overrides, if they have been registered. */
10821 gdbarch_init_osabi (info, gdbarch);
10822
10823 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10824
10825 /* Add some default predicates. */
10826 if (is_m)
10827 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10828 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10829 dwarf2_append_unwinders (gdbarch);
10830 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10831 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
10832 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10833
10834 /* Now we have tuned the configuration, set a few final things,
10835 based on what the OS ABI has told us. */
10836
10837 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10838 binaries are always marked. */
10839 if (tdep->arm_abi == ARM_ABI_AUTO)
10840 tdep->arm_abi = ARM_ABI_APCS;
10841
10842 /* Watchpoints are not steppable. */
10843 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10844
10845 /* We used to default to FPA for generic ARM, but almost nobody
10846 uses that now, and we now provide a way for the user to force
10847 the model. So default to the most useful variant. */
10848 if (tdep->fp_model == ARM_FLOAT_AUTO)
10849 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10850
10851 if (tdep->jb_pc >= 0)
10852 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10853
10854 /* Floating point sizes and format. */
10855 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10856 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10857 {
10858 set_gdbarch_double_format
10859 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10860 set_gdbarch_long_double_format
10861 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10862 }
10863 else
10864 {
10865 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10866 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10867 }
10868
10869 /* Hook used to decorate frames with signed return addresses, only available
10870 for ARMv8.1-m PACBTI. */
10871 if (is_m && have_pacbti)
10872 set_gdbarch_get_pc_address_flags (gdbarch, arm_get_pc_address_flags);
10873
10874 if (tdesc_data != nullptr)
10875 {
10876 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10877
10878 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
10879 register_count = gdbarch_num_regs (gdbarch);
10880
10881 /* Override tdesc_register_type to adjust the types of VFP
10882 registers for NEON. */
10883 set_gdbarch_register_type (gdbarch, arm_register_type);
10884 }
10885
10886 /* Initialize the pseudo register data. */
10887 int num_pseudos = 0;
10888 if (tdep->have_s_pseudos)
10889 {
10890 /* VFP single precision pseudo registers (S0~S31). */
10891 tdep->s_pseudo_base = register_count;
10892 tdep->s_pseudo_count = 32;
10893 num_pseudos += tdep->s_pseudo_count;
10894
10895 if (tdep->have_q_pseudos)
10896 {
10897 /* NEON quad precision pseudo registers (Q0~Q15). */
10898 tdep->q_pseudo_base = register_count + num_pseudos;
10899
10900 if (have_neon)
10901 tdep->q_pseudo_count = 16;
10902 else if (have_mve)
10903 tdep->q_pseudo_count = ARM_MVE_NUM_Q_REGS;
10904
10905 num_pseudos += tdep->q_pseudo_count;
10906 }
10907 }
10908
10909 /* Do we have any MVE pseudo registers? */
10910 if (have_mve)
10911 {
10912 tdep->mve_pseudo_base = register_count + num_pseudos;
10913 tdep->mve_pseudo_count = 1;
10914 num_pseudos += tdep->mve_pseudo_count;
10915 }
10916
10917 /* Do we have any ARMv8.1-m PACBTI pseudo registers. */
10918 if (have_pacbti)
10919 {
10920 tdep->pacbti_pseudo_base = register_count + num_pseudos;
10921 tdep->pacbti_pseudo_count = 1;
10922 num_pseudos += tdep->pacbti_pseudo_count;
10923 }
10924
10925 /* Set some pseudo register hooks, if we have pseudo registers. */
10926 if (tdep->have_s_pseudos || have_mve || have_pacbti)
10927 {
10928 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10929 set_gdbarch_pseudo_register_read_value (gdbarch, arm_pseudo_read_value);
10930 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10931 }
10932
10933 /* Add standard register aliases. We add aliases even for those
10934 names which are used by the current architecture - it's simpler,
10935 and does no harm, since nothing ever lists user registers. */
10936 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10937 user_reg_add (gdbarch, arm_register_aliases[i].name,
10938 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10939
10940 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
10941 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
10942
10943 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
10944
10945 return gdbarch;
10946 }
10947
10948 static void
10949 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10950 {
10951 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10952
10953 if (tdep == NULL)
10954 return;
10955
10956 gdb_printf (file, _("arm_dump_tdep: fp_model = %i\n"),
10957 (int) tdep->fp_model);
10958 gdb_printf (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
10959 (int) tdep->have_fpa_registers);
10960 gdb_printf (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
10961 (int) tdep->have_wmmx_registers);
10962 gdb_printf (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
10963 (int) tdep->vfp_register_count);
10964 gdb_printf (file, _("arm_dump_tdep: have_s_pseudos = %s\n"),
10965 tdep->have_s_pseudos ? "true" : "false");
10966 gdb_printf (file, _("arm_dump_tdep: s_pseudo_base = %i\n"),
10967 (int) tdep->s_pseudo_base);
10968 gdb_printf (file, _("arm_dump_tdep: s_pseudo_count = %i\n"),
10969 (int) tdep->s_pseudo_count);
10970 gdb_printf (file, _("arm_dump_tdep: have_q_pseudos = %s\n"),
10971 tdep->have_q_pseudos ? "true" : "false");
10972 gdb_printf (file, _("arm_dump_tdep: q_pseudo_base = %i\n"),
10973 (int) tdep->q_pseudo_base);
10974 gdb_printf (file, _("arm_dump_tdep: q_pseudo_count = %i\n"),
10975 (int) tdep->q_pseudo_count);
10976 gdb_printf (file, _("arm_dump_tdep: have_neon = %i\n"),
10977 (int) tdep->have_neon);
10978 gdb_printf (file, _("arm_dump_tdep: have_mve = %s\n"),
10979 tdep->have_mve ? "yes" : "no");
10980 gdb_printf (file, _("arm_dump_tdep: mve_vpr_regnum = %i\n"),
10981 tdep->mve_vpr_regnum);
10982 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_base = %i\n"),
10983 tdep->mve_pseudo_base);
10984 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_count = %i\n"),
10985 tdep->mve_pseudo_count);
10986 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_regnum = %i\n"),
10987 tdep->m_profile_msp_regnum);
10988 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_regnum = %i\n"),
10989 tdep->m_profile_psp_regnum);
10990 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_ns_regnum = %i\n"),
10991 tdep->m_profile_msp_ns_regnum);
10992 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_ns_regnum = %i\n"),
10993 tdep->m_profile_psp_ns_regnum);
10994 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_s_regnum = %i\n"),
10995 tdep->m_profile_msp_s_regnum);
10996 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_s_regnum = %i\n"),
10997 tdep->m_profile_psp_s_regnum);
10998 gdb_printf (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
10999 (unsigned long) tdep->lowest_pc);
11000 gdb_printf (file, _("arm_dump_tdep: have_pacbti = %s\n"),
11001 tdep->have_pacbti ? "yes" : "no");
11002 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_base = %i\n"),
11003 tdep->pacbti_pseudo_base);
11004 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_count = %i\n"),
11005 tdep->pacbti_pseudo_count);
11006 gdb_printf (file, _("arm_dump_tdep: is_m = %s\n"),
11007 tdep->is_m ? "yes" : "no");
11008 }
11009
11010 #if GDB_SELF_TEST
11011 namespace selftests
11012 {
11013 static void arm_record_test (void);
11014 static void arm_analyze_prologue_test ();
11015 }
11016 #endif
11017
11018 INIT_GDB_FILE (arm_tdep)
11019 {
11020 long length;
11021 int i, j;
11022 char regdesc[1024], *rdptr = regdesc;
11023 size_t rest = sizeof (regdesc);
11024
11025 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
11026
11027 /* Add ourselves to objfile event chain. */
11028 gdb::observers::new_objfile.attach (arm_exidx_new_objfile, "arm-tdep");
11029
11030 /* Register an ELF OS ABI sniffer for ARM binaries. */
11031 gdbarch_register_osabi_sniffer (bfd_arch_arm,
11032 bfd_target_elf_flavour,
11033 arm_elf_osabi_sniffer);
11034
11035 /* Add root prefix command for all "set arm"/"show arm" commands. */
11036 add_setshow_prefix_cmd ("arm", no_class,
11037 _("Various ARM-specific commands."),
11038 _("Various ARM-specific commands."),
11039 &setarmcmdlist, &showarmcmdlist,
11040 &setlist, &showlist);
11041
11042 arm_disassembler_options = "reg-names-std";
11043 const disasm_options_t *disasm_options
11044 = &disassembler_options_arm ()->options;
11045 int num_disassembly_styles = 0;
11046 for (i = 0; disasm_options->name[i] != NULL; i++)
11047 if (startswith (disasm_options->name[i], "reg-names-"))
11048 num_disassembly_styles++;
11049
11050 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
11051 valid_disassembly_styles = XNEWVEC (const char *,
11052 num_disassembly_styles + 1);
11053 for (i = j = 0; disasm_options->name[i] != NULL; i++)
11054 if (startswith (disasm_options->name[i], "reg-names-"))
11055 {
11056 size_t offset = strlen ("reg-names-");
11057 const char *style = disasm_options->name[i];
11058 valid_disassembly_styles[j++] = &style[offset];
11059 if (strcmp (&style[offset], "std") == 0)
11060 disassembly_style = &style[offset];
11061 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
11062 disasm_options->description[i]);
11063 rdptr += length;
11064 rest -= length;
11065 }
11066 /* Mark the end of valid options. */
11067 valid_disassembly_styles[num_disassembly_styles] = NULL;
11068
11069 /* Create the help text. */
11070 std::string helptext = string_printf ("%s%s%s",
11071 _("The valid values are:\n"),
11072 regdesc,
11073 _("The default is \"std\"."));
11074
11075 add_setshow_enum_cmd("disassembler", no_class,
11076 valid_disassembly_styles, &disassembly_style,
11077 _("Set the disassembly style."),
11078 _("Show the disassembly style."),
11079 helptext.c_str (),
11080 set_disassembly_style_sfunc,
11081 show_disassembly_style_sfunc,
11082 &setarmcmdlist, &showarmcmdlist);
11083
11084 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
11085 _("Set usage of ARM 32-bit mode."),
11086 _("Show usage of ARM 32-bit mode."),
11087 _("When off, a 26-bit PC will be used."),
11088 NULL,
11089 NULL, /* FIXME: i18n: Usage of ARM 32-bit
11090 mode is %s. */
11091 &setarmcmdlist, &showarmcmdlist);
11092
11093 /* Add a command to allow the user to force the FPU model. */
11094 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
11095 _("Set the floating point type."),
11096 _("Show the floating point type."),
11097 _("auto - Determine the FP typefrom the OS-ABI.\n\
11098 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
11099 fpa - FPA co-processor (GCC compiled).\n\
11100 softvfp - Software FP with pure-endian doubles.\n\
11101 vfp - VFP co-processor."),
11102 set_fp_model_sfunc, show_fp_model,
11103 &setarmcmdlist, &showarmcmdlist);
11104
11105 /* Add a command to allow the user to force the ABI. */
11106 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
11107 _("Set the ABI."),
11108 _("Show the ABI."),
11109 NULL, arm_set_abi, arm_show_abi,
11110 &setarmcmdlist, &showarmcmdlist);
11111
11112 /* Add two commands to allow the user to force the assumed
11113 execution mode. */
11114 add_setshow_enum_cmd ("fallback-mode", class_support,
11115 arm_mode_strings, &arm_fallback_mode_string,
11116 _("Set the mode assumed when symbols are unavailable."),
11117 _("Show the mode assumed when symbols are unavailable."),
11118 NULL, NULL, arm_show_fallback_mode,
11119 &setarmcmdlist, &showarmcmdlist);
11120 add_setshow_enum_cmd ("force-mode", class_support,
11121 arm_mode_strings, &arm_force_mode_string,
11122 _("Set the mode assumed even when symbols are available."),
11123 _("Show the mode assumed even when symbols are available."),
11124 NULL, NULL, arm_show_force_mode,
11125 &setarmcmdlist, &showarmcmdlist);
11126
11127 /* Add a command to stop triggering security exceptions when
11128 unwinding exception stacks. */
11129 add_setshow_boolean_cmd ("unwind-secure-frames", no_class, &arm_unwind_secure_frames,
11130 _("Set usage of non-secure to secure exception stack unwinding."),
11131 _("Show usage of non-secure to secure exception stack unwinding."),
11132 _("When on, the debugger can trigger memory access traps."),
11133 NULL, arm_show_unwind_secure_frames,
11134 &setarmcmdlist, &showarmcmdlist);
11135
11136 /* Debugging flag. */
11137 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
11138 _("Set ARM debugging."),
11139 _("Show ARM debugging."),
11140 _("When on, arm-specific debugging is enabled."),
11141 NULL,
11142 NULL, /* FIXME: i18n: "ARM debugging is %s. */
11143 &setdebuglist, &showdebuglist);
11144
11145 #if GDB_SELF_TEST
11146 selftests::register_test ("arm-record", selftests::arm_record_test);
11147 selftests::register_test ("arm_analyze_prologue", selftests::arm_analyze_prologue_test);
11148 #endif
11149
11150 }
11151
11152 /* ARM-reversible process record data structures. */
11153
11154 #define ARM_INSN_SIZE_BYTES 4
11155 #define THUMB_INSN_SIZE_BYTES 2
11156 #define THUMB2_INSN_SIZE_BYTES 4
11157
11158
11159 /* Position of the bit within a 32-bit ARM instruction
11160 that defines whether the instruction is a load or store. */
11161 #define INSN_S_L_BIT_NUM 20
11162
11163 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
11164 do \
11165 { \
11166 unsigned int reg_len = LENGTH; \
11167 if (reg_len) \
11168 { \
11169 REGS = XNEWVEC (uint32_t, reg_len); \
11170 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
11171 } \
11172 } \
11173 while (0)
11174
11175 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
11176 do \
11177 { \
11178 unsigned int mem_len = LENGTH; \
11179 if (mem_len) \
11180 { \
11181 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
11182 memcpy(&MEMS->len, &RECORD_BUF[0], \
11183 sizeof(struct arm_mem_r) * LENGTH); \
11184 } \
11185 } \
11186 while (0)
11187
11188 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
11189 #define INSN_RECORDED(ARM_RECORD) \
11190 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
11191
11192 /* ARM memory record structure. */
11193 struct arm_mem_r
11194 {
11195 uint32_t len; /* Record length. */
11196 uint32_t addr; /* Memory address. */
11197 };
11198
11199 /* ARM instruction record contains opcode of current insn
11200 and execution state (before entry to decode_insn()),
11201 contains list of to-be-modified registers and
11202 memory blocks (on return from decode_insn()). */
11203
11204 struct arm_insn_decode_record
11205 {
11206 struct gdbarch *gdbarch;
11207 struct regcache *regcache;
11208 CORE_ADDR this_addr; /* Address of the insn being decoded. */
11209 uint32_t arm_insn; /* Should accommodate thumb. */
11210 uint32_t cond; /* Condition code. */
11211 uint32_t opcode; /* Insn opcode. */
11212 uint32_t decode; /* Insn decode bits. */
11213 uint32_t mem_rec_count; /* No of mem records. */
11214 uint32_t reg_rec_count; /* No of reg records. */
11215 uint32_t *arm_regs; /* Registers to be saved for this record. */
11216 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
11217 };
11218
11219
11220 /* Checks ARM SBZ and SBO mandatory fields. */
11221
11222 static int
11223 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
11224 {
11225 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
11226
11227 if (!len)
11228 return 1;
11229
11230 if (!sbo)
11231 ones = ~ones;
11232
11233 while (ones)
11234 {
11235 if (!(ones & sbo))
11236 {
11237 return 0;
11238 }
11239 ones = ones >> 1;
11240 }
11241 return 1;
11242 }
11243
11244 enum arm_record_result
11245 {
11246 ARM_RECORD_SUCCESS = 0,
11247 ARM_RECORD_FAILURE = 1
11248 };
11249
11250 enum arm_record_strx_t
11251 {
11252 ARM_RECORD_STRH=1,
11253 ARM_RECORD_STRD
11254 };
11255
11256 enum record_type_t
11257 {
11258 ARM_RECORD=1,
11259 THUMB_RECORD,
11260 THUMB2_RECORD
11261 };
11262
11263
11264 static int
11265 arm_record_strx (arm_insn_decode_record *arm_insn_r, uint32_t *record_buf,
11266 uint32_t *record_buf_mem, arm_record_strx_t str_type)
11267 {
11268
11269 struct regcache *reg_cache = arm_insn_r->regcache;
11270 ULONGEST u_regval[2]= {0};
11271
11272 uint32_t reg_src1 = 0, reg_src2 = 0;
11273 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
11274
11275 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11276 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11277
11278 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11279 {
11280 /* 1) Handle misc store, immediate offset. */
11281 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
11282 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
11283 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11284 regcache_raw_read_unsigned (reg_cache, reg_src1,
11285 &u_regval[0]);
11286 if (ARM_PC_REGNUM == reg_src1)
11287 {
11288 /* If R15 was used as Rn, hence current PC+8. */
11289 u_regval[0] = u_regval[0] + 8;
11290 }
11291 offset_8 = (immed_high << 4) | immed_low;
11292 /* Calculate target store address. */
11293 if (14 == arm_insn_r->opcode)
11294 {
11295 tgt_mem_addr = u_regval[0] + offset_8;
11296 }
11297 else
11298 {
11299 tgt_mem_addr = u_regval[0] - offset_8;
11300 }
11301 if (ARM_RECORD_STRH == str_type)
11302 {
11303 record_buf_mem[0] = 2;
11304 record_buf_mem[1] = tgt_mem_addr;
11305 arm_insn_r->mem_rec_count = 1;
11306 }
11307 else if (ARM_RECORD_STRD == str_type)
11308 {
11309 record_buf_mem[0] = 4;
11310 record_buf_mem[1] = tgt_mem_addr;
11311 record_buf_mem[2] = 4;
11312 record_buf_mem[3] = tgt_mem_addr + 4;
11313 arm_insn_r->mem_rec_count = 2;
11314 }
11315 }
11316 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
11317 {
11318 /* 2) Store, register offset. */
11319 /* Get Rm. */
11320 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11321 /* Get Rn. */
11322 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11323 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11324 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11325 if (15 == reg_src2)
11326 {
11327 /* If R15 was used as Rn, hence current PC+8. */
11328 u_regval[0] = u_regval[0] + 8;
11329 }
11330 /* Calculate target store address, Rn +/- Rm, register offset. */
11331 if (12 == arm_insn_r->opcode)
11332 {
11333 tgt_mem_addr = u_regval[0] + u_regval[1];
11334 }
11335 else
11336 {
11337 tgt_mem_addr = u_regval[1] - u_regval[0];
11338 }
11339 if (ARM_RECORD_STRH == str_type)
11340 {
11341 record_buf_mem[0] = 2;
11342 record_buf_mem[1] = tgt_mem_addr;
11343 arm_insn_r->mem_rec_count = 1;
11344 }
11345 else if (ARM_RECORD_STRD == str_type)
11346 {
11347 record_buf_mem[0] = 4;
11348 record_buf_mem[1] = tgt_mem_addr;
11349 record_buf_mem[2] = 4;
11350 record_buf_mem[3] = tgt_mem_addr + 4;
11351 arm_insn_r->mem_rec_count = 2;
11352 }
11353 }
11354 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11355 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
11356 {
11357 /* 3) Store, immediate pre-indexed. */
11358 /* 5) Store, immediate post-indexed. */
11359 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
11360 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
11361 offset_8 = (immed_high << 4) | immed_low;
11362 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11363 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11364 /* Calculate target store address, Rn +/- Rm, register offset. */
11365 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
11366 {
11367 tgt_mem_addr = u_regval[0] + offset_8;
11368 }
11369 else
11370 {
11371 tgt_mem_addr = u_regval[0] - offset_8;
11372 }
11373 if (ARM_RECORD_STRH == str_type)
11374 {
11375 record_buf_mem[0] = 2;
11376 record_buf_mem[1] = tgt_mem_addr;
11377 arm_insn_r->mem_rec_count = 1;
11378 }
11379 else if (ARM_RECORD_STRD == str_type)
11380 {
11381 record_buf_mem[0] = 4;
11382 record_buf_mem[1] = tgt_mem_addr;
11383 record_buf_mem[2] = 4;
11384 record_buf_mem[3] = tgt_mem_addr + 4;
11385 arm_insn_r->mem_rec_count = 2;
11386 }
11387 /* Record Rn also as it changes. */
11388 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
11389 arm_insn_r->reg_rec_count = 1;
11390 }
11391 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
11392 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
11393 {
11394 /* 4) Store, register pre-indexed. */
11395 /* 6) Store, register post -indexed. */
11396 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11397 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11398 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11399 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11400 /* Calculate target store address, Rn +/- Rm, register offset. */
11401 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
11402 {
11403 tgt_mem_addr = u_regval[0] + u_regval[1];
11404 }
11405 else
11406 {
11407 tgt_mem_addr = u_regval[1] - u_regval[0];
11408 }
11409 if (ARM_RECORD_STRH == str_type)
11410 {
11411 record_buf_mem[0] = 2;
11412 record_buf_mem[1] = tgt_mem_addr;
11413 arm_insn_r->mem_rec_count = 1;
11414 }
11415 else if (ARM_RECORD_STRD == str_type)
11416 {
11417 record_buf_mem[0] = 4;
11418 record_buf_mem[1] = tgt_mem_addr;
11419 record_buf_mem[2] = 4;
11420 record_buf_mem[3] = tgt_mem_addr + 4;
11421 arm_insn_r->mem_rec_count = 2;
11422 }
11423 /* Record Rn also as it changes. */
11424 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
11425 arm_insn_r->reg_rec_count = 1;
11426 }
11427 return 0;
11428 }
11429
11430 /* Handling ARM extension space insns. */
11431
11432 static int
11433 arm_record_extension_space (arm_insn_decode_record *arm_insn_r)
11434 {
11435 int ret = 0; /* Return value: -1:record failure ; 0:success */
11436 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
11437 uint32_t record_buf[8], record_buf_mem[8];
11438 uint32_t reg_src1 = 0;
11439 struct regcache *reg_cache = arm_insn_r->regcache;
11440 ULONGEST u_regval = 0;
11441
11442 gdb_assert (!INSN_RECORDED(arm_insn_r));
11443 /* Handle unconditional insn extension space. */
11444
11445 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
11446 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11447 if (arm_insn_r->cond)
11448 {
11449 /* PLD has no affect on architectural state, it just affects
11450 the caches. */
11451 if (5 == ((opcode1 & 0xE0) >> 5))
11452 {
11453 /* BLX(1) */
11454 record_buf[0] = ARM_PS_REGNUM;
11455 record_buf[1] = ARM_LR_REGNUM;
11456 arm_insn_r->reg_rec_count = 2;
11457 }
11458 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
11459 }
11460
11461
11462 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11463 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
11464 {
11465 ret = -1;
11466 /* Undefined instruction on ARM V5; need to handle if later
11467 versions define it. */
11468 }
11469
11470 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
11471 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11472 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
11473
11474 /* Handle arithmetic insn extension space. */
11475 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
11476 && !INSN_RECORDED(arm_insn_r))
11477 {
11478 /* Handle MLA(S) and MUL(S). */
11479 if (in_inclusive_range (insn_op1, 0U, 3U))
11480 {
11481 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11482 record_buf[1] = ARM_PS_REGNUM;
11483 arm_insn_r->reg_rec_count = 2;
11484 }
11485 else if (in_inclusive_range (insn_op1, 4U, 15U))
11486 {
11487 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11488 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11489 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11490 record_buf[2] = ARM_PS_REGNUM;
11491 arm_insn_r->reg_rec_count = 3;
11492 }
11493 }
11494
11495 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11496 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11497 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11498
11499 /* Handle control insn extension space. */
11500
11501 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11502 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11503 {
11504 if (!bit (arm_insn_r->arm_insn,25))
11505 {
11506 if (!bits (arm_insn_r->arm_insn, 4, 7))
11507 {
11508 if ((0 == insn_op1) || (2 == insn_op1))
11509 {
11510 /* MRS. */
11511 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11512 arm_insn_r->reg_rec_count = 1;
11513 }
11514 else if (1 == insn_op1)
11515 {
11516 /* CSPR is going to be changed. */
11517 record_buf[0] = ARM_PS_REGNUM;
11518 arm_insn_r->reg_rec_count = 1;
11519 }
11520 else if (3 == insn_op1)
11521 {
11522 /* SPSR is going to be changed. */
11523 /* We need to get SPSR value, which is yet to be done. */
11524 return -1;
11525 }
11526 }
11527 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11528 {
11529 if (1 == insn_op1)
11530 {
11531 /* BX. */
11532 record_buf[0] = ARM_PS_REGNUM;
11533 arm_insn_r->reg_rec_count = 1;
11534 }
11535 else if (3 == insn_op1)
11536 {
11537 /* CLZ. */
11538 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11539 arm_insn_r->reg_rec_count = 1;
11540 }
11541 }
11542 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11543 {
11544 /* BLX. */
11545 record_buf[0] = ARM_PS_REGNUM;
11546 record_buf[1] = ARM_LR_REGNUM;
11547 arm_insn_r->reg_rec_count = 2;
11548 }
11549 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11550 {
11551 /* QADD, QSUB, QDADD, QDSUB */
11552 record_buf[0] = ARM_PS_REGNUM;
11553 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11554 arm_insn_r->reg_rec_count = 2;
11555 }
11556 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11557 {
11558 /* BKPT. */
11559 record_buf[0] = ARM_PS_REGNUM;
11560 record_buf[1] = ARM_LR_REGNUM;
11561 arm_insn_r->reg_rec_count = 2;
11562
11563 /* Save SPSR also;how? */
11564 return -1;
11565 }
11566 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11567 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11568 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11569 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11570 )
11571 {
11572 if (0 == insn_op1 || 1 == insn_op1)
11573 {
11574 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11575 /* We dont do optimization for SMULW<y> where we
11576 need only Rd. */
11577 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11578 record_buf[1] = ARM_PS_REGNUM;
11579 arm_insn_r->reg_rec_count = 2;
11580 }
11581 else if (2 == insn_op1)
11582 {
11583 /* SMLAL<x><y>. */
11584 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11585 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11586 arm_insn_r->reg_rec_count = 2;
11587 }
11588 else if (3 == insn_op1)
11589 {
11590 /* SMUL<x><y>. */
11591 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11592 arm_insn_r->reg_rec_count = 1;
11593 }
11594 }
11595 }
11596 else
11597 {
11598 /* MSR : immediate form. */
11599 if (1 == insn_op1)
11600 {
11601 /* CSPR is going to be changed. */
11602 record_buf[0] = ARM_PS_REGNUM;
11603 arm_insn_r->reg_rec_count = 1;
11604 }
11605 else if (3 == insn_op1)
11606 {
11607 /* SPSR is going to be changed. */
11608 /* we need to get SPSR value, which is yet to be done */
11609 return -1;
11610 }
11611 }
11612 }
11613
11614 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11615 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11616 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11617
11618 /* Handle load/store insn extension space. */
11619
11620 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11621 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11622 && !INSN_RECORDED(arm_insn_r))
11623 {
11624 /* SWP/SWPB. */
11625 if (0 == insn_op1)
11626 {
11627 /* These insn, changes register and memory as well. */
11628 /* SWP or SWPB insn. */
11629 /* Get memory address given by Rn. */
11630 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11631 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11632 /* SWP insn ?, swaps word. */
11633 if (8 == arm_insn_r->opcode)
11634 {
11635 record_buf_mem[0] = 4;
11636 }
11637 else
11638 {
11639 /* SWPB insn, swaps only byte. */
11640 record_buf_mem[0] = 1;
11641 }
11642 record_buf_mem[1] = u_regval;
11643 arm_insn_r->mem_rec_count = 1;
11644 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11645 arm_insn_r->reg_rec_count = 1;
11646 }
11647 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11648 {
11649 /* STRH. */
11650 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11651 ARM_RECORD_STRH);
11652 }
11653 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11654 {
11655 /* LDRD. */
11656 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11657 record_buf[1] = record_buf[0] + 1;
11658 arm_insn_r->reg_rec_count = 2;
11659 }
11660 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11661 {
11662 /* STRD. */
11663 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11664 ARM_RECORD_STRD);
11665 }
11666 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11667 {
11668 /* LDRH, LDRSB, LDRSH. */
11669 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11670 arm_insn_r->reg_rec_count = 1;
11671 }
11672
11673 }
11674
11675 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11676 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11677 && !INSN_RECORDED(arm_insn_r))
11678 {
11679 ret = -1;
11680 /* Handle coprocessor insn extension space. */
11681 }
11682
11683 /* To be done for ARMv5 and later; as of now we return -1. */
11684 if (-1 == ret)
11685 return ret;
11686
11687 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11688 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11689
11690 return ret;
11691 }
11692
11693 /* Handling opcode 000 insns. */
11694
11695 static int
11696 arm_record_data_proc_misc_ld_str (arm_insn_decode_record *arm_insn_r)
11697 {
11698 struct regcache *reg_cache = arm_insn_r->regcache;
11699 uint32_t record_buf[8], record_buf_mem[8];
11700 ULONGEST u_regval[2] = {0};
11701
11702 uint32_t reg_src1 = 0;
11703 uint32_t opcode1 = 0;
11704
11705 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11706 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11707 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11708
11709 if (!((opcode1 & 0x19) == 0x10))
11710 {
11711 /* Data-processing (register) and Data-processing (register-shifted
11712 register */
11713 /* Out of 11 shifter operands mode, all the insn modifies destination
11714 register, which is specified by 13-16 decode. */
11715 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11716 record_buf[1] = ARM_PS_REGNUM;
11717 arm_insn_r->reg_rec_count = 2;
11718 }
11719 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
11720 {
11721 /* Miscellaneous instructions */
11722
11723 if (3 == arm_insn_r->decode && 0x12 == opcode1
11724 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11725 {
11726 /* Handle BLX, branch and link/exchange. */
11727 if (9 == arm_insn_r->opcode)
11728 {
11729 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11730 and R14 stores the return address. */
11731 record_buf[0] = ARM_PS_REGNUM;
11732 record_buf[1] = ARM_LR_REGNUM;
11733 arm_insn_r->reg_rec_count = 2;
11734 }
11735 }
11736 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11737 {
11738 /* Handle enhanced software breakpoint insn, BKPT. */
11739 /* CPSR is changed to be executed in ARM state, disabling normal
11740 interrupts, entering abort mode. */
11741 /* According to high vector configuration PC is set. */
11742 /* user hit breakpoint and type reverse, in
11743 that case, we need to go back with previous CPSR and
11744 Program Counter. */
11745 record_buf[0] = ARM_PS_REGNUM;
11746 record_buf[1] = ARM_LR_REGNUM;
11747 arm_insn_r->reg_rec_count = 2;
11748
11749 /* Save SPSR also; how? */
11750 return -1;
11751 }
11752 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11753 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11754 {
11755 /* Handle BX, branch and link/exchange. */
11756 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11757 record_buf[0] = ARM_PS_REGNUM;
11758 arm_insn_r->reg_rec_count = 1;
11759 }
11760 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11761 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11762 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11763 {
11764 /* Count leading zeros: CLZ. */
11765 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11766 arm_insn_r->reg_rec_count = 1;
11767 }
11768 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11769 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11770 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11771 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
11772 {
11773 /* Handle MRS insn. */
11774 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11775 arm_insn_r->reg_rec_count = 1;
11776 }
11777 }
11778 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
11779 {
11780 /* Multiply and multiply-accumulate */
11781
11782 /* Handle multiply instructions. */
11783 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11784 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11785 {
11786 /* Handle MLA and MUL. */
11787 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11788 record_buf[1] = ARM_PS_REGNUM;
11789 arm_insn_r->reg_rec_count = 2;
11790 }
11791 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11792 {
11793 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11794 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11795 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11796 record_buf[2] = ARM_PS_REGNUM;
11797 arm_insn_r->reg_rec_count = 3;
11798 }
11799 }
11800 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
11801 {
11802 /* Synchronization primitives */
11803
11804 /* Handling SWP, SWPB. */
11805 /* These insn, changes register and memory as well. */
11806 /* SWP or SWPB insn. */
11807
11808 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11809 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11810 /* SWP insn ?, swaps word. */
11811 if (8 == arm_insn_r->opcode)
11812 {
11813 record_buf_mem[0] = 4;
11814 }
11815 else
11816 {
11817 /* SWPB insn, swaps only byte. */
11818 record_buf_mem[0] = 1;
11819 }
11820 record_buf_mem[1] = u_regval[0];
11821 arm_insn_r->mem_rec_count = 1;
11822 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11823 arm_insn_r->reg_rec_count = 1;
11824 }
11825 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
11826 || 15 == arm_insn_r->decode)
11827 {
11828 if ((opcode1 & 0x12) == 2)
11829 {
11830 /* Extra load/store (unprivileged) */
11831 return -1;
11832 }
11833 else
11834 {
11835 /* Extra load/store */
11836 switch (bits (arm_insn_r->arm_insn, 5, 6))
11837 {
11838 case 1:
11839 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
11840 {
11841 /* STRH (register), STRH (immediate) */
11842 arm_record_strx (arm_insn_r, &record_buf[0],
11843 &record_buf_mem[0], ARM_RECORD_STRH);
11844 }
11845 else if ((opcode1 & 0x05) == 0x1)
11846 {
11847 /* LDRH (register) */
11848 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11849 arm_insn_r->reg_rec_count = 1;
11850
11851 if (bit (arm_insn_r->arm_insn, 21))
11852 {
11853 /* Write back to Rn. */
11854 record_buf[arm_insn_r->reg_rec_count++]
11855 = bits (arm_insn_r->arm_insn, 16, 19);
11856 }
11857 }
11858 else if ((opcode1 & 0x05) == 0x5)
11859 {
11860 /* LDRH (immediate), LDRH (literal) */
11861 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11862
11863 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11864 arm_insn_r->reg_rec_count = 1;
11865
11866 if (rn != 15)
11867 {
11868 /*LDRH (immediate) */
11869 if (bit (arm_insn_r->arm_insn, 21))
11870 {
11871 /* Write back to Rn. */
11872 record_buf[arm_insn_r->reg_rec_count++] = rn;
11873 }
11874 }
11875 }
11876 else
11877 return -1;
11878 break;
11879 case 2:
11880 if ((opcode1 & 0x05) == 0x0)
11881 {
11882 /* LDRD (register) */
11883 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11884 record_buf[1] = record_buf[0] + 1;
11885 arm_insn_r->reg_rec_count = 2;
11886
11887 if (bit (arm_insn_r->arm_insn, 21))
11888 {
11889 /* Write back to Rn. */
11890 record_buf[arm_insn_r->reg_rec_count++]
11891 = bits (arm_insn_r->arm_insn, 16, 19);
11892 }
11893 }
11894 else if ((opcode1 & 0x05) == 0x1)
11895 {
11896 /* LDRSB (register) */
11897 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11898 arm_insn_r->reg_rec_count = 1;
11899
11900 if (bit (arm_insn_r->arm_insn, 21))
11901 {
11902 /* Write back to Rn. */
11903 record_buf[arm_insn_r->reg_rec_count++]
11904 = bits (arm_insn_r->arm_insn, 16, 19);
11905 }
11906 }
11907 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
11908 {
11909 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
11910 LDRSB (literal) */
11911 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11912
11913 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11914 arm_insn_r->reg_rec_count = 1;
11915
11916 if (rn != 15)
11917 {
11918 /*LDRD (immediate), LDRSB (immediate) */
11919 if (bit (arm_insn_r->arm_insn, 21))
11920 {
11921 /* Write back to Rn. */
11922 record_buf[arm_insn_r->reg_rec_count++] = rn;
11923 }
11924 }
11925 }
11926 else
11927 return -1;
11928 break;
11929 case 3:
11930 if ((opcode1 & 0x05) == 0x0)
11931 {
11932 /* STRD (register) */
11933 arm_record_strx (arm_insn_r, &record_buf[0],
11934 &record_buf_mem[0], ARM_RECORD_STRD);
11935 }
11936 else if ((opcode1 & 0x05) == 0x1)
11937 {
11938 /* LDRSH (register) */
11939 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11940 arm_insn_r->reg_rec_count = 1;
11941
11942 if (bit (arm_insn_r->arm_insn, 21))
11943 {
11944 /* Write back to Rn. */
11945 record_buf[arm_insn_r->reg_rec_count++]
11946 = bits (arm_insn_r->arm_insn, 16, 19);
11947 }
11948 }
11949 else if ((opcode1 & 0x05) == 0x4)
11950 {
11951 /* STRD (immediate) */
11952 arm_record_strx (arm_insn_r, &record_buf[0],
11953 &record_buf_mem[0], ARM_RECORD_STRD);
11954 }
11955 else if ((opcode1 & 0x05) == 0x5)
11956 {
11957 /* LDRSH (immediate), LDRSH (literal) */
11958 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11959 arm_insn_r->reg_rec_count = 1;
11960
11961 if (bit (arm_insn_r->arm_insn, 21))
11962 {
11963 /* Write back to Rn. */
11964 record_buf[arm_insn_r->reg_rec_count++]
11965 = bits (arm_insn_r->arm_insn, 16, 19);
11966 }
11967 }
11968 else
11969 return -1;
11970 break;
11971 default:
11972 return -1;
11973 }
11974 }
11975 }
11976 else
11977 {
11978 return -1;
11979 }
11980
11981 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11982 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11983 return 0;
11984 }
11985
11986 /* Handling opcode 001 insns. */
11987
11988 static int
11989 arm_record_data_proc_imm (arm_insn_decode_record *arm_insn_r)
11990 {
11991 uint32_t record_buf[8], record_buf_mem[8];
11992
11993 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11994 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11995
11996 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11997 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11998 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11999 )
12000 {
12001 /* Handle MSR insn. */
12002 if (9 == arm_insn_r->opcode)
12003 {
12004 /* CSPR is going to be changed. */
12005 record_buf[0] = ARM_PS_REGNUM;
12006 arm_insn_r->reg_rec_count = 1;
12007 }
12008 else
12009 {
12010 /* SPSR is going to be changed. */
12011 }
12012 }
12013 else if (arm_insn_r->opcode <= 15)
12014 {
12015 /* Normal data processing insns. */
12016 /* Out of 11 shifter operands mode, all the insn modifies destination
12017 register, which is specified by 13-16 decode. */
12018 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12019 record_buf[1] = ARM_PS_REGNUM;
12020 arm_insn_r->reg_rec_count = 2;
12021 }
12022 else
12023 {
12024 return -1;
12025 }
12026
12027 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12028 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12029 return 0;
12030 }
12031
12032 static int
12033 arm_record_media (arm_insn_decode_record *arm_insn_r)
12034 {
12035 uint32_t record_buf[8];
12036
12037 switch (bits (arm_insn_r->arm_insn, 22, 24))
12038 {
12039 case 0:
12040 /* Parallel addition and subtraction, signed */
12041 case 1:
12042 /* Parallel addition and subtraction, unsigned */
12043 case 2:
12044 case 3:
12045 /* Packing, unpacking, saturation and reversal */
12046 {
12047 int rd = bits (arm_insn_r->arm_insn, 12, 15);
12048
12049 record_buf[arm_insn_r->reg_rec_count++] = rd;
12050 }
12051 break;
12052
12053 case 4:
12054 case 5:
12055 /* Signed multiplies */
12056 {
12057 int rd = bits (arm_insn_r->arm_insn, 16, 19);
12058 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
12059
12060 record_buf[arm_insn_r->reg_rec_count++] = rd;
12061 if (op1 == 0x0)
12062 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12063 else if (op1 == 0x4)
12064 record_buf[arm_insn_r->reg_rec_count++]
12065 = bits (arm_insn_r->arm_insn, 12, 15);
12066 }
12067 break;
12068
12069 case 6:
12070 {
12071 if (bit (arm_insn_r->arm_insn, 21)
12072 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
12073 {
12074 /* SBFX */
12075 record_buf[arm_insn_r->reg_rec_count++]
12076 = bits (arm_insn_r->arm_insn, 12, 15);
12077 }
12078 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
12079 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
12080 {
12081 /* USAD8 and USADA8 */
12082 record_buf[arm_insn_r->reg_rec_count++]
12083 = bits (arm_insn_r->arm_insn, 16, 19);
12084 }
12085 }
12086 break;
12087
12088 case 7:
12089 {
12090 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
12091 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
12092 {
12093 /* Permanently UNDEFINED */
12094 return -1;
12095 }
12096 else
12097 {
12098 /* BFC, BFI and UBFX */
12099 record_buf[arm_insn_r->reg_rec_count++]
12100 = bits (arm_insn_r->arm_insn, 12, 15);
12101 }
12102 }
12103 break;
12104
12105 default:
12106 return -1;
12107 }
12108
12109 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12110
12111 return 0;
12112 }
12113
12114 /* Handle ARM mode instructions with opcode 010. */
12115
12116 static int
12117 arm_record_ld_st_imm_offset (arm_insn_decode_record *arm_insn_r)
12118 {
12119 struct regcache *reg_cache = arm_insn_r->regcache;
12120
12121 uint32_t reg_base , reg_dest;
12122 uint32_t offset_12, tgt_mem_addr;
12123 uint32_t record_buf[8], record_buf_mem[8];
12124 unsigned char wback;
12125 ULONGEST u_regval;
12126
12127 /* Calculate wback. */
12128 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
12129 || (bit (arm_insn_r->arm_insn, 21) == 1);
12130
12131 arm_insn_r->reg_rec_count = 0;
12132 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
12133
12134 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12135 {
12136 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
12137 and LDRT. */
12138
12139 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
12140 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
12141
12142 /* The LDR instruction is capable of doing branching. If MOV LR, PC
12143 precedes a LDR instruction having R15 as reg_base, it
12144 emulates a branch and link instruction, and hence we need to save
12145 CPSR and PC as well. */
12146 if (ARM_PC_REGNUM == reg_dest)
12147 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12148
12149 /* If wback is true, also save the base register, which is going to be
12150 written to. */
12151 if (wback)
12152 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12153 }
12154 else
12155 {
12156 /* STR (immediate), STRB (immediate), STRBT and STRT. */
12157
12158 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
12159 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
12160
12161 /* Handle bit U. */
12162 if (bit (arm_insn_r->arm_insn, 23))
12163 {
12164 /* U == 1: Add the offset. */
12165 tgt_mem_addr = (uint32_t) u_regval + offset_12;
12166 }
12167 else
12168 {
12169 /* U == 0: subtract the offset. */
12170 tgt_mem_addr = (uint32_t) u_regval - offset_12;
12171 }
12172
12173 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
12174 bytes. */
12175 if (bit (arm_insn_r->arm_insn, 22))
12176 {
12177 /* STRB and STRBT: 1 byte. */
12178 record_buf_mem[0] = 1;
12179 }
12180 else
12181 {
12182 /* STR and STRT: 4 bytes. */
12183 record_buf_mem[0] = 4;
12184 }
12185
12186 /* Handle bit P. */
12187 if (bit (arm_insn_r->arm_insn, 24))
12188 record_buf_mem[1] = tgt_mem_addr;
12189 else
12190 record_buf_mem[1] = (uint32_t) u_regval;
12191
12192 arm_insn_r->mem_rec_count = 1;
12193
12194 /* If wback is true, also save the base register, which is going to be
12195 written to. */
12196 if (wback)
12197 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12198 }
12199
12200 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12201 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12202 return 0;
12203 }
12204
12205 /* Handling opcode 011 insns. */
12206
12207 static int
12208 arm_record_ld_st_reg_offset (arm_insn_decode_record *arm_insn_r)
12209 {
12210 struct regcache *reg_cache = arm_insn_r->regcache;
12211
12212 uint32_t shift_imm = 0;
12213 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
12214 uint32_t offset_12 = 0, tgt_mem_addr = 0;
12215 uint32_t record_buf[8], record_buf_mem[8];
12216
12217 LONGEST s_word;
12218 ULONGEST u_regval[2];
12219
12220 if (bit (arm_insn_r->arm_insn, 4))
12221 return arm_record_media (arm_insn_r);
12222
12223 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
12224 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
12225
12226 /* Handle enhanced store insns and LDRD DSP insn,
12227 order begins according to addressing modes for store insns
12228 STRH insn. */
12229
12230 /* LDR or STR? */
12231 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12232 {
12233 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
12234 /* LDR insn has a capability to do branching, if
12235 MOV LR, PC is preceded by LDR insn having Rn as R15
12236 in that case, it emulates branch and link insn, and hence we
12237 need to save CSPR and PC as well. */
12238 if (15 != reg_dest)
12239 {
12240 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12241 arm_insn_r->reg_rec_count = 1;
12242 }
12243 else
12244 {
12245 record_buf[0] = reg_dest;
12246 record_buf[1] = ARM_PS_REGNUM;
12247 arm_insn_r->reg_rec_count = 2;
12248 }
12249 }
12250 else
12251 {
12252 if (! bits (arm_insn_r->arm_insn, 4, 11))
12253 {
12254 /* Store insn, register offset and register pre-indexed,
12255 register post-indexed. */
12256 /* Get Rm. */
12257 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
12258 /* Get Rn. */
12259 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
12260 regcache_raw_read_unsigned (reg_cache, reg_src1
12261 , &u_regval[0]);
12262 regcache_raw_read_unsigned (reg_cache, reg_src2
12263 , &u_regval[1]);
12264 if (15 == reg_src2)
12265 {
12266 /* If R15 was used as Rn, hence current PC+8. */
12267 /* Pre-indexed mode doesn't reach here ; illegal insn. */
12268 u_regval[0] = u_regval[0] + 8;
12269 }
12270 /* Calculate target store address, Rn +/- Rm, register offset. */
12271 /* U == 1. */
12272 if (bit (arm_insn_r->arm_insn, 23))
12273 {
12274 tgt_mem_addr = u_regval[0] + u_regval[1];
12275 }
12276 else
12277 {
12278 tgt_mem_addr = u_regval[1] - u_regval[0];
12279 }
12280
12281 switch (arm_insn_r->opcode)
12282 {
12283 /* STR. */
12284 case 8:
12285 case 12:
12286 /* STR. */
12287 case 9:
12288 case 13:
12289 /* STRT. */
12290 case 1:
12291 case 5:
12292 /* STR. */
12293 case 0:
12294 case 4:
12295 record_buf_mem[0] = 4;
12296 break;
12297
12298 /* STRB. */
12299 case 10:
12300 case 14:
12301 /* STRB. */
12302 case 11:
12303 case 15:
12304 /* STRBT. */
12305 case 3:
12306 case 7:
12307 /* STRB. */
12308 case 2:
12309 case 6:
12310 record_buf_mem[0] = 1;
12311 break;
12312
12313 default:
12314 gdb_assert_not_reached ("no decoding pattern found");
12315 break;
12316 }
12317 record_buf_mem[1] = tgt_mem_addr;
12318 arm_insn_r->mem_rec_count = 1;
12319
12320 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
12321 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
12322 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
12323 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
12324 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
12325 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
12326 )
12327 {
12328 /* Rn is going to be changed in pre-indexed mode and
12329 post-indexed mode as well. */
12330 record_buf[0] = reg_src2;
12331 arm_insn_r->reg_rec_count = 1;
12332 }
12333 }
12334 else
12335 {
12336 /* Store insn, scaled register offset; scaled pre-indexed. */
12337 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
12338 /* Get Rm. */
12339 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
12340 /* Get Rn. */
12341 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
12342 /* Get shift_imm. */
12343 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
12344 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12345 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
12346 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12347 /* Offset_12 used as shift. */
12348 switch (offset_12)
12349 {
12350 case 0:
12351 /* Offset_12 used as index. */
12352 offset_12 = u_regval[0] << shift_imm;
12353 break;
12354
12355 case 1:
12356 offset_12 = (!shift_imm) ? 0 : u_regval[0] >> shift_imm;
12357 break;
12358
12359 case 2:
12360 if (!shift_imm)
12361 {
12362 if (bit (u_regval[0], 31))
12363 {
12364 offset_12 = 0xFFFFFFFF;
12365 }
12366 else
12367 {
12368 offset_12 = 0;
12369 }
12370 }
12371 else
12372 {
12373 /* This is arithmetic shift. */
12374 offset_12 = s_word >> shift_imm;
12375 }
12376 break;
12377
12378 case 3:
12379 if (!shift_imm)
12380 {
12381 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
12382 &u_regval[1]);
12383 /* Get C flag value and shift it by 31. */
12384 offset_12 = (((bit (u_regval[1], 29)) << 31) \
12385 | (u_regval[0]) >> 1);
12386 }
12387 else
12388 {
12389 offset_12 = (u_regval[0] >> shift_imm) \
12390 | (u_regval[0] <<
12391 (sizeof(uint32_t) - shift_imm));
12392 }
12393 break;
12394
12395 default:
12396 gdb_assert_not_reached ("no decoding pattern found");
12397 break;
12398 }
12399
12400 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12401 /* bit U set. */
12402 if (bit (arm_insn_r->arm_insn, 23))
12403 {
12404 tgt_mem_addr = u_regval[1] + offset_12;
12405 }
12406 else
12407 {
12408 tgt_mem_addr = u_regval[1] - offset_12;
12409 }
12410
12411 switch (arm_insn_r->opcode)
12412 {
12413 /* STR. */
12414 case 8:
12415 case 12:
12416 /* STR. */
12417 case 9:
12418 case 13:
12419 /* STRT. */
12420 case 1:
12421 case 5:
12422 /* STR. */
12423 case 0:
12424 case 4:
12425 record_buf_mem[0] = 4;
12426 break;
12427
12428 /* STRB. */
12429 case 10:
12430 case 14:
12431 /* STRB. */
12432 case 11:
12433 case 15:
12434 /* STRBT. */
12435 case 3:
12436 case 7:
12437 /* STRB. */
12438 case 2:
12439 case 6:
12440 record_buf_mem[0] = 1;
12441 break;
12442
12443 default:
12444 gdb_assert_not_reached ("no decoding pattern found");
12445 break;
12446 }
12447 record_buf_mem[1] = tgt_mem_addr;
12448 arm_insn_r->mem_rec_count = 1;
12449
12450 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
12451 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
12452 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
12453 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
12454 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
12455 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
12456 )
12457 {
12458 /* Rn is going to be changed in register scaled pre-indexed
12459 mode,and scaled post indexed mode. */
12460 record_buf[0] = reg_src2;
12461 arm_insn_r->reg_rec_count = 1;
12462 }
12463 }
12464 }
12465
12466 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12467 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12468 return 0;
12469 }
12470
12471 /* Handle ARM mode instructions with opcode 100. */
12472
12473 static int
12474 arm_record_ld_st_multiple (arm_insn_decode_record *arm_insn_r)
12475 {
12476 struct regcache *reg_cache = arm_insn_r->regcache;
12477 uint32_t register_count = 0, register_bits;
12478 uint32_t reg_base, addr_mode;
12479 uint32_t record_buf[24], record_buf_mem[48];
12480 uint32_t wback;
12481 ULONGEST u_regval;
12482
12483 /* Fetch the list of registers. */
12484 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
12485 arm_insn_r->reg_rec_count = 0;
12486
12487 /* Fetch the base register that contains the address we are loading data
12488 to. */
12489 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
12490
12491 /* Calculate wback. */
12492 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
12493
12494 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12495 {
12496 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
12497
12498 /* Find out which registers are going to be loaded from memory. */
12499 while (register_bits)
12500 {
12501 if (register_bits & 0x00000001)
12502 record_buf[arm_insn_r->reg_rec_count++] = register_count;
12503 register_bits = register_bits >> 1;
12504 register_count++;
12505 }
12506
12507
12508 /* If wback is true, also save the base register, which is going to be
12509 written to. */
12510 if (wback)
12511 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12512
12513 /* Save the CPSR register. */
12514 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12515 }
12516 else
12517 {
12518 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
12519
12520 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
12521
12522 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
12523
12524 /* Find out how many registers are going to be stored to memory. */
12525 while (register_bits)
12526 {
12527 if (register_bits & 0x00000001)
12528 register_count++;
12529 register_bits = register_bits >> 1;
12530 }
12531
12532 switch (addr_mode)
12533 {
12534 /* STMDA (STMED): Decrement after. */
12535 case 0:
12536 record_buf_mem[1] = (uint32_t) u_regval
12537 - register_count * ARM_INT_REGISTER_SIZE + 4;
12538 break;
12539 /* STM (STMIA, STMEA): Increment after. */
12540 case 1:
12541 record_buf_mem[1] = (uint32_t) u_regval;
12542 break;
12543 /* STMDB (STMFD): Decrement before. */
12544 case 2:
12545 record_buf_mem[1] = (uint32_t) u_regval
12546 - register_count * ARM_INT_REGISTER_SIZE;
12547 break;
12548 /* STMIB (STMFA): Increment before. */
12549 case 3:
12550 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
12551 break;
12552 default:
12553 gdb_assert_not_reached ("no decoding pattern found");
12554 break;
12555 }
12556
12557 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
12558 arm_insn_r->mem_rec_count = 1;
12559
12560 /* If wback is true, also save the base register, which is going to be
12561 written to. */
12562 if (wback)
12563 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12564 }
12565
12566 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12567 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12568 return 0;
12569 }
12570
12571 /* Handling opcode 101 insns. */
12572
12573 static int
12574 arm_record_b_bl (arm_insn_decode_record *arm_insn_r)
12575 {
12576 uint32_t record_buf[8];
12577
12578 /* Handle B, BL, BLX(1) insns. */
12579 /* B simply branches so we do nothing here. */
12580 /* Note: BLX(1) doesn't fall here but instead it falls into
12581 extension space. */
12582 if (bit (arm_insn_r->arm_insn, 24))
12583 {
12584 record_buf[0] = ARM_LR_REGNUM;
12585 arm_insn_r->reg_rec_count = 1;
12586 }
12587
12588 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12589
12590 return 0;
12591 }
12592
12593 static int
12594 arm_record_unsupported_insn (arm_insn_decode_record *arm_insn_r)
12595 {
12596 gdb_printf (gdb_stderr,
12597 _("Process record does not support instruction "
12598 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
12599 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
12600
12601 return -1;
12602 }
12603
12604 /* Record handler for vector data transfer instructions. */
12605
12606 static int
12607 arm_record_vdata_transfer_insn (arm_insn_decode_record *arm_insn_r)
12608 {
12609 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
12610 uint32_t record_buf[4];
12611
12612 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
12613 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
12614 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
12615 bit_l = bit (arm_insn_r->arm_insn, 20);
12616 bit_c = bit (arm_insn_r->arm_insn, 8);
12617
12618 /* Handle VMOV instruction. */
12619 if (bit_l && bit_c)
12620 {
12621 record_buf[0] = reg_t;
12622 arm_insn_r->reg_rec_count = 1;
12623 }
12624 else if (bit_l && !bit_c)
12625 {
12626 /* Handle VMOV instruction. */
12627 if (bits_a == 0x00)
12628 {
12629 record_buf[0] = reg_t;
12630 arm_insn_r->reg_rec_count = 1;
12631 }
12632 /* Handle VMRS instruction. */
12633 else if (bits_a == 0x07)
12634 {
12635 if (reg_t == 15)
12636 reg_t = ARM_PS_REGNUM;
12637
12638 record_buf[0] = reg_t;
12639 arm_insn_r->reg_rec_count = 1;
12640 }
12641 }
12642 else if (!bit_l && !bit_c)
12643 {
12644 /* Handle VMOV instruction. */
12645 if (bits_a == 0x00)
12646 {
12647 record_buf[0] = ARM_D0_REGNUM + reg_v;
12648
12649 arm_insn_r->reg_rec_count = 1;
12650 }
12651 /* Handle VMSR instruction. */
12652 else if (bits_a == 0x07)
12653 {
12654 record_buf[0] = ARM_FPSCR_REGNUM;
12655 arm_insn_r->reg_rec_count = 1;
12656 }
12657 }
12658 else if (!bit_l && bit_c)
12659 {
12660 /* Handle VMOV instruction. */
12661 if (!(bits_a & 0x04))
12662 {
12663 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12664 + ARM_D0_REGNUM;
12665 arm_insn_r->reg_rec_count = 1;
12666 }
12667 /* Handle VDUP instruction. */
12668 else
12669 {
12670 if (bit (arm_insn_r->arm_insn, 21))
12671 {
12672 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12673 record_buf[0] = reg_v + ARM_D0_REGNUM;
12674 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12675 arm_insn_r->reg_rec_count = 2;
12676 }
12677 else
12678 {
12679 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12680 record_buf[0] = reg_v + ARM_D0_REGNUM;
12681 arm_insn_r->reg_rec_count = 1;
12682 }
12683 }
12684 }
12685
12686 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12687 return 0;
12688 }
12689
12690 /* Record handler for extension register load/store instructions. */
12691
12692 static int
12693 arm_record_exreg_ld_st_insn (arm_insn_decode_record *arm_insn_r)
12694 {
12695 uint32_t opcode, single_reg;
12696 uint8_t op_vldm_vstm;
12697 uint32_t record_buf[8], record_buf_mem[128];
12698 ULONGEST u_regval = 0;
12699
12700 struct regcache *reg_cache = arm_insn_r->regcache;
12701
12702 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12703 single_reg = !bit (arm_insn_r->arm_insn, 8);
12704 op_vldm_vstm = opcode & 0x1b;
12705
12706 /* Handle VMOV instructions. */
12707 if ((opcode & 0x1e) == 0x04)
12708 {
12709 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
12710 {
12711 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12712 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12713 arm_insn_r->reg_rec_count = 2;
12714 }
12715 else
12716 {
12717 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
12718 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
12719
12720 if (single_reg)
12721 {
12722 /* The first S register number m is REG_M:M (M is bit 5),
12723 the corresponding D register number is REG_M:M / 2, which
12724 is REG_M. */
12725 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
12726 /* The second S register number is REG_M:M + 1, the
12727 corresponding D register number is (REG_M:M + 1) / 2.
12728 IOW, if bit M is 1, the first and second S registers
12729 are mapped to different D registers, otherwise, they are
12730 in the same D register. */
12731 if (bit_m)
12732 {
12733 record_buf[arm_insn_r->reg_rec_count++]
12734 = ARM_D0_REGNUM + reg_m + 1;
12735 }
12736 }
12737 else
12738 {
12739 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
12740 arm_insn_r->reg_rec_count = 1;
12741 }
12742 }
12743 }
12744 /* Handle VSTM and VPUSH instructions. */
12745 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12746 || op_vldm_vstm == 0x12)
12747 {
12748 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12749 uint32_t memory_index = 0;
12750
12751 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12752 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12753 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12754 imm_off32 = imm_off8 << 2;
12755 memory_count = imm_off8;
12756
12757 if (bit (arm_insn_r->arm_insn, 23))
12758 start_address = u_regval;
12759 else
12760 start_address = u_regval - imm_off32;
12761
12762 if (bit (arm_insn_r->arm_insn, 21))
12763 {
12764 record_buf[0] = reg_rn;
12765 arm_insn_r->reg_rec_count = 1;
12766 }
12767
12768 while (memory_count > 0)
12769 {
12770 if (single_reg)
12771 {
12772 record_buf_mem[memory_index] = 4;
12773 record_buf_mem[memory_index + 1] = start_address;
12774 start_address = start_address + 4;
12775 memory_index = memory_index + 2;
12776 }
12777 else
12778 {
12779 record_buf_mem[memory_index] = 4;
12780 record_buf_mem[memory_index + 1] = start_address;
12781 record_buf_mem[memory_index + 2] = 4;
12782 record_buf_mem[memory_index + 3] = start_address + 4;
12783 start_address = start_address + 8;
12784 memory_index = memory_index + 4;
12785 }
12786 memory_count--;
12787 }
12788 arm_insn_r->mem_rec_count = (memory_index >> 1);
12789 }
12790 /* Handle VLDM instructions. */
12791 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12792 || op_vldm_vstm == 0x13)
12793 {
12794 uint32_t reg_count, reg_vd;
12795 uint32_t reg_index = 0;
12796 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
12797
12798 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12799 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12800
12801 /* REG_VD is the first D register number. If the instruction
12802 loads memory to S registers (SINGLE_REG is TRUE), the register
12803 number is (REG_VD << 1 | bit D), so the corresponding D
12804 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
12805 if (!single_reg)
12806 reg_vd = reg_vd | (bit_d << 4);
12807
12808 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
12809 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12810
12811 /* If the instruction loads memory to D register, REG_COUNT should
12812 be divided by 2, according to the ARM Architecture Reference
12813 Manual. If the instruction loads memory to S register, divide by
12814 2 as well because two S registers are mapped to D register. */
12815 reg_count = reg_count / 2;
12816 if (single_reg && bit_d)
12817 {
12818 /* Increase the register count if S register list starts from
12819 an odd number (bit d is one). */
12820 reg_count++;
12821 }
12822
12823 while (reg_count > 0)
12824 {
12825 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12826 reg_count--;
12827 }
12828 arm_insn_r->reg_rec_count = reg_index;
12829 }
12830 /* VSTR Vector store register. */
12831 else if ((opcode & 0x13) == 0x10)
12832 {
12833 uint32_t start_address, reg_rn, imm_off32, imm_off8;
12834 uint32_t memory_index = 0;
12835
12836 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12837 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12838 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12839 imm_off32 = imm_off8 << 2;
12840
12841 if (bit (arm_insn_r->arm_insn, 23))
12842 start_address = u_regval + imm_off32;
12843 else
12844 start_address = u_regval - imm_off32;
12845
12846 if (single_reg)
12847 {
12848 record_buf_mem[memory_index] = 4;
12849 record_buf_mem[memory_index + 1] = start_address;
12850 arm_insn_r->mem_rec_count = 1;
12851 }
12852 else
12853 {
12854 record_buf_mem[memory_index] = 4;
12855 record_buf_mem[memory_index + 1] = start_address;
12856 record_buf_mem[memory_index + 2] = 4;
12857 record_buf_mem[memory_index + 3] = start_address + 4;
12858 arm_insn_r->mem_rec_count = 2;
12859 }
12860 }
12861 /* VLDR Vector load register. */
12862 else if ((opcode & 0x13) == 0x11)
12863 {
12864 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12865
12866 if (!single_reg)
12867 {
12868 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12869 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12870 }
12871 else
12872 {
12873 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12874 /* Record register D rather than pseudo register S. */
12875 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
12876 }
12877 arm_insn_r->reg_rec_count = 1;
12878 }
12879
12880 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12881 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12882 return 0;
12883 }
12884
12885 /* Record handler for arm/thumb mode VFP data processing instructions. */
12886
12887 static int
12888 arm_record_vfp_data_proc_insn (arm_insn_decode_record *arm_insn_r)
12889 {
12890 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12891 uint32_t record_buf[4];
12892 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12893 enum insn_types curr_insn_type = INSN_INV;
12894
12895 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12896 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12897 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12898 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12899 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12900 bit_d = bit (arm_insn_r->arm_insn, 22);
12901 /* Mask off the "D" bit. */
12902 opc1 = opc1 & ~0x04;
12903
12904 /* Handle VMLA, VMLS. */
12905 if (opc1 == 0x00)
12906 {
12907 if (bit (arm_insn_r->arm_insn, 10))
12908 {
12909 if (bit (arm_insn_r->arm_insn, 6))
12910 curr_insn_type = INSN_T0;
12911 else
12912 curr_insn_type = INSN_T1;
12913 }
12914 else
12915 {
12916 if (dp_op_sz)
12917 curr_insn_type = INSN_T1;
12918 else
12919 curr_insn_type = INSN_T2;
12920 }
12921 }
12922 /* Handle VNMLA, VNMLS, VNMUL. */
12923 else if (opc1 == 0x01)
12924 {
12925 if (dp_op_sz)
12926 curr_insn_type = INSN_T1;
12927 else
12928 curr_insn_type = INSN_T2;
12929 }
12930 /* Handle VMUL. */
12931 else if (opc1 == 0x02 && !(opc3 & 0x01))
12932 {
12933 if (bit (arm_insn_r->arm_insn, 10))
12934 {
12935 if (bit (arm_insn_r->arm_insn, 6))
12936 curr_insn_type = INSN_T0;
12937 else
12938 curr_insn_type = INSN_T1;
12939 }
12940 else
12941 {
12942 if (dp_op_sz)
12943 curr_insn_type = INSN_T1;
12944 else
12945 curr_insn_type = INSN_T2;
12946 }
12947 }
12948 /* Handle VADD, VSUB. */
12949 else if (opc1 == 0x03)
12950 {
12951 if (!bit (arm_insn_r->arm_insn, 9))
12952 {
12953 if (bit (arm_insn_r->arm_insn, 6))
12954 curr_insn_type = INSN_T0;
12955 else
12956 curr_insn_type = INSN_T1;
12957 }
12958 else
12959 {
12960 if (dp_op_sz)
12961 curr_insn_type = INSN_T1;
12962 else
12963 curr_insn_type = INSN_T2;
12964 }
12965 }
12966 /* Handle VDIV. */
12967 else if (opc1 == 0x08)
12968 {
12969 if (dp_op_sz)
12970 curr_insn_type = INSN_T1;
12971 else
12972 curr_insn_type = INSN_T2;
12973 }
12974 /* Handle all other vfp data processing instructions. */
12975 else if (opc1 == 0x0b)
12976 {
12977 /* Handle VMOV. */
12978 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12979 {
12980 if (bit (arm_insn_r->arm_insn, 4))
12981 {
12982 if (bit (arm_insn_r->arm_insn, 6))
12983 curr_insn_type = INSN_T0;
12984 else
12985 curr_insn_type = INSN_T1;
12986 }
12987 else
12988 {
12989 if (dp_op_sz)
12990 curr_insn_type = INSN_T1;
12991 else
12992 curr_insn_type = INSN_T2;
12993 }
12994 }
12995 /* Handle VNEG and VABS. */
12996 else if ((opc2 == 0x01 && opc3 == 0x01)
12997 || (opc2 == 0x00 && opc3 == 0x03))
12998 {
12999 if (!bit (arm_insn_r->arm_insn, 11))
13000 {
13001 if (bit (arm_insn_r->arm_insn, 6))
13002 curr_insn_type = INSN_T0;
13003 else
13004 curr_insn_type = INSN_T1;
13005 }
13006 else
13007 {
13008 if (dp_op_sz)
13009 curr_insn_type = INSN_T1;
13010 else
13011 curr_insn_type = INSN_T2;
13012 }
13013 }
13014 /* Handle VSQRT. */
13015 else if (opc2 == 0x01 && opc3 == 0x03)
13016 {
13017 if (dp_op_sz)
13018 curr_insn_type = INSN_T1;
13019 else
13020 curr_insn_type = INSN_T2;
13021 }
13022 /* Handle VCVT. */
13023 else if (opc2 == 0x07 && opc3 == 0x03)
13024 {
13025 if (!dp_op_sz)
13026 curr_insn_type = INSN_T1;
13027 else
13028 curr_insn_type = INSN_T2;
13029 }
13030 else if (opc3 & 0x01)
13031 {
13032 /* Handle VCVT. */
13033 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
13034 {
13035 if (!bit (arm_insn_r->arm_insn, 18))
13036 curr_insn_type = INSN_T2;
13037 else
13038 {
13039 if (dp_op_sz)
13040 curr_insn_type = INSN_T1;
13041 else
13042 curr_insn_type = INSN_T2;
13043 }
13044 }
13045 /* Handle VCVT. */
13046 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
13047 {
13048 if (dp_op_sz)
13049 curr_insn_type = INSN_T1;
13050 else
13051 curr_insn_type = INSN_T2;
13052 }
13053 /* Handle VCVTB, VCVTT. */
13054 else if ((opc2 & 0x0e) == 0x02)
13055 curr_insn_type = INSN_T2;
13056 /* Handle VCMP, VCMPE. */
13057 else if ((opc2 & 0x0e) == 0x04)
13058 curr_insn_type = INSN_T3;
13059 }
13060 }
13061
13062 switch (curr_insn_type)
13063 {
13064 case INSN_T0:
13065 reg_vd = reg_vd | (bit_d << 4);
13066 record_buf[0] = reg_vd + ARM_D0_REGNUM;
13067 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
13068 arm_insn_r->reg_rec_count = 2;
13069 break;
13070
13071 case INSN_T1:
13072 reg_vd = reg_vd | (bit_d << 4);
13073 record_buf[0] = reg_vd + ARM_D0_REGNUM;
13074 arm_insn_r->reg_rec_count = 1;
13075 break;
13076
13077 case INSN_T2:
13078 reg_vd = (reg_vd << 1) | bit_d;
13079 record_buf[0] = reg_vd + ARM_D0_REGNUM;
13080 arm_insn_r->reg_rec_count = 1;
13081 break;
13082
13083 case INSN_T3:
13084 record_buf[0] = ARM_FPSCR_REGNUM;
13085 arm_insn_r->reg_rec_count = 1;
13086 break;
13087
13088 default:
13089 gdb_assert_not_reached ("no decoding pattern found");
13090 break;
13091 }
13092
13093 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
13094 return 0;
13095 }
13096
13097 /* Handling opcode 110 insns. */
13098
13099 static int
13100 arm_record_asimd_vfp_coproc (arm_insn_decode_record *arm_insn_r)
13101 {
13102 uint32_t op1, op1_ebit, coproc;
13103
13104 coproc = bits (arm_insn_r->arm_insn, 8, 11);
13105 op1 = bits (arm_insn_r->arm_insn, 20, 25);
13106 op1_ebit = bit (arm_insn_r->arm_insn, 20);
13107
13108 if ((coproc & 0x0e) == 0x0a)
13109 {
13110 /* Handle extension register ld/st instructions. */
13111 if (!(op1 & 0x20))
13112 return arm_record_exreg_ld_st_insn (arm_insn_r);
13113
13114 /* 64-bit transfers between arm core and extension registers. */
13115 if ((op1 & 0x3e) == 0x04)
13116 return arm_record_exreg_ld_st_insn (arm_insn_r);
13117 }
13118 else
13119 {
13120 /* Handle coprocessor ld/st instructions. */
13121 if (!(op1 & 0x3a))
13122 {
13123 /* Store. */
13124 if (!op1_ebit)
13125 return arm_record_unsupported_insn (arm_insn_r);
13126 else
13127 /* Load. */
13128 return arm_record_unsupported_insn (arm_insn_r);
13129 }
13130
13131 /* Move to coprocessor from two arm core registers. */
13132 if (op1 == 0x4)
13133 return arm_record_unsupported_insn (arm_insn_r);
13134
13135 /* Move to two arm core registers from coprocessor. */
13136 if (op1 == 0x5)
13137 {
13138 uint32_t reg_t[2];
13139
13140 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
13141 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
13142 arm_insn_r->reg_rec_count = 2;
13143
13144 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
13145 return 0;
13146 }
13147 }
13148 return arm_record_unsupported_insn (arm_insn_r);
13149 }
13150
13151 /* Handling opcode 111 insns. */
13152
13153 static int
13154 arm_record_coproc_data_proc (arm_insn_decode_record *arm_insn_r)
13155 {
13156 uint32_t op, op1_ebit, coproc, bits_24_25;
13157 arm_gdbarch_tdep *tdep
13158 = gdbarch_tdep<arm_gdbarch_tdep> (arm_insn_r->gdbarch);
13159 struct regcache *reg_cache = arm_insn_r->regcache;
13160
13161 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
13162 coproc = bits (arm_insn_r->arm_insn, 8, 11);
13163 op1_ebit = bit (arm_insn_r->arm_insn, 20);
13164 op = bit (arm_insn_r->arm_insn, 4);
13165 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
13166
13167 /* Handle arm SWI/SVC system call instructions. */
13168 if (bits_24_25 == 0x3)
13169 {
13170 if (tdep->arm_syscall_record != NULL)
13171 {
13172 ULONGEST svc_operand, svc_number;
13173
13174 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
13175
13176 if (svc_operand) /* OABI. */
13177 svc_number = svc_operand - 0x900000;
13178 else /* EABI. */
13179 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
13180
13181 return tdep->arm_syscall_record (reg_cache, svc_number);
13182 }
13183 else
13184 {
13185 gdb_printf (gdb_stderr, _("no syscall record support\n"));
13186 return -1;
13187 }
13188 }
13189 else if (bits_24_25 == 0x02)
13190 {
13191 if (op)
13192 {
13193 if ((coproc & 0x0e) == 0x0a)
13194 {
13195 /* 8, 16, and 32-bit transfer */
13196 return arm_record_vdata_transfer_insn (arm_insn_r);
13197 }
13198 else
13199 {
13200 if (op1_ebit)
13201 {
13202 /* MRC, MRC2 */
13203 uint32_t record_buf[1];
13204
13205 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
13206 if (record_buf[0] == 15)
13207 record_buf[0] = ARM_PS_REGNUM;
13208
13209 arm_insn_r->reg_rec_count = 1;
13210 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
13211 record_buf);
13212 return 0;
13213 }
13214 else
13215 {
13216 /* MCR, MCR2 */
13217 return -1;
13218 }
13219 }
13220 }
13221 else
13222 {
13223 if ((coproc & 0x0e) == 0x0a)
13224 {
13225 /* VFP data-processing instructions. */
13226 return arm_record_vfp_data_proc_insn (arm_insn_r);
13227 }
13228 else
13229 {
13230 /* CDP, CDP2 */
13231 return -1;
13232 }
13233 }
13234 }
13235 else
13236 {
13237 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
13238
13239 if (op1 == 5)
13240 {
13241 if ((coproc & 0x0e) != 0x0a)
13242 {
13243 /* MRRC, MRRC2 */
13244 return -1;
13245 }
13246 }
13247 else if (op1 == 4 || op1 == 5)
13248 {
13249 if ((coproc & 0x0e) == 0x0a)
13250 {
13251 /* 64-bit transfers between ARM core and extension */
13252 return -1;
13253 }
13254 else if (op1 == 4)
13255 {
13256 /* MCRR, MCRR2 */
13257 return -1;
13258 }
13259 }
13260 else if (op1 == 0 || op1 == 1)
13261 {
13262 /* UNDEFINED */
13263 return -1;
13264 }
13265 else
13266 {
13267 if ((coproc & 0x0e) == 0x0a)
13268 {
13269 /* Extension register load/store */
13270 }
13271 else
13272 {
13273 /* STC, STC2, LDC, LDC2 */
13274 }
13275 return -1;
13276 }
13277 }
13278
13279 return -1;
13280 }
13281
13282 /* Handling opcode 000 insns. */
13283
13284 static int
13285 thumb_record_shift_add_sub (arm_insn_decode_record *thumb_insn_r)
13286 {
13287 uint32_t record_buf[8];
13288 uint32_t reg_src1 = 0;
13289
13290 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13291
13292 record_buf[0] = ARM_PS_REGNUM;
13293 record_buf[1] = reg_src1;
13294 thumb_insn_r->reg_rec_count = 2;
13295
13296 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13297
13298 return 0;
13299 }
13300
13301
13302 /* Handling opcode 001 insns. */
13303
13304 static int
13305 thumb_record_add_sub_cmp_mov (arm_insn_decode_record *thumb_insn_r)
13306 {
13307 uint32_t record_buf[8];
13308 uint32_t reg_src1 = 0;
13309
13310 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13311
13312 record_buf[0] = ARM_PS_REGNUM;
13313 record_buf[1] = reg_src1;
13314 thumb_insn_r->reg_rec_count = 2;
13315
13316 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13317
13318 return 0;
13319 }
13320
13321 /* Handling opcode 010 insns. */
13322
13323 static int
13324 thumb_record_ld_st_reg_offset (arm_insn_decode_record *thumb_insn_r)
13325 {
13326 struct regcache *reg_cache = thumb_insn_r->regcache;
13327 uint32_t record_buf[8], record_buf_mem[8];
13328
13329 uint32_t reg_src1 = 0, reg_src2 = 0;
13330 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
13331
13332 ULONGEST u_regval[2] = {0};
13333
13334 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
13335
13336 if (bit (thumb_insn_r->arm_insn, 12))
13337 {
13338 /* Handle load/store register offset. */
13339 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
13340
13341 if (in_inclusive_range (opB, 4U, 7U))
13342 {
13343 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
13344 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
13345 record_buf[0] = reg_src1;
13346 thumb_insn_r->reg_rec_count = 1;
13347 }
13348 else if (in_inclusive_range (opB, 0U, 2U))
13349 {
13350 /* STR(2), STRB(2), STRH(2) . */
13351 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13352 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
13353 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
13354 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
13355 if (0 == opB)
13356 record_buf_mem[0] = 4; /* STR (2). */
13357 else if (2 == opB)
13358 record_buf_mem[0] = 1; /* STRB (2). */
13359 else if (1 == opB)
13360 record_buf_mem[0] = 2; /* STRH (2). */
13361 record_buf_mem[1] = u_regval[0] + u_regval[1];
13362 thumb_insn_r->mem_rec_count = 1;
13363 }
13364 }
13365 else if (bit (thumb_insn_r->arm_insn, 11))
13366 {
13367 /* Handle load from literal pool. */
13368 /* LDR(3). */
13369 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13370 record_buf[0] = reg_src1;
13371 thumb_insn_r->reg_rec_count = 1;
13372 }
13373 else if (opcode1)
13374 {
13375 /* Special data instructions and branch and exchange */
13376 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
13377 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
13378 if ((3 == opcode2) && (!opcode3))
13379 {
13380 /* Branch with exchange. */
13381 record_buf[0] = ARM_PS_REGNUM;
13382 thumb_insn_r->reg_rec_count = 1;
13383 }
13384 else
13385 {
13386 /* Format 8; special data processing insns. */
13387 record_buf[0] = ARM_PS_REGNUM;
13388 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
13389 | bits (thumb_insn_r->arm_insn, 0, 2));
13390 thumb_insn_r->reg_rec_count = 2;
13391 }
13392 }
13393 else
13394 {
13395 /* Format 5; data processing insns. */
13396 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13397 if (bit (thumb_insn_r->arm_insn, 7))
13398 {
13399 reg_src1 = reg_src1 + 8;
13400 }
13401 record_buf[0] = ARM_PS_REGNUM;
13402 record_buf[1] = reg_src1;
13403 thumb_insn_r->reg_rec_count = 2;
13404 }
13405
13406 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13407 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13408 record_buf_mem);
13409
13410 return 0;
13411 }
13412
13413 /* Handling opcode 001 insns. */
13414
13415 static int
13416 thumb_record_ld_st_imm_offset (arm_insn_decode_record *thumb_insn_r)
13417 {
13418 struct regcache *reg_cache = thumb_insn_r->regcache;
13419 uint32_t record_buf[8], record_buf_mem[8];
13420
13421 uint32_t reg_src1 = 0;
13422 uint32_t opcode = 0, immed_5 = 0;
13423
13424 ULONGEST u_regval = 0;
13425
13426 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13427
13428 if (opcode)
13429 {
13430 /* LDR(1). */
13431 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13432 record_buf[0] = reg_src1;
13433 thumb_insn_r->reg_rec_count = 1;
13434 }
13435 else
13436 {
13437 /* STR(1). */
13438 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13439 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
13440 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13441 record_buf_mem[0] = 4;
13442 record_buf_mem[1] = u_regval + (immed_5 * 4);
13443 thumb_insn_r->mem_rec_count = 1;
13444 }
13445
13446 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13447 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13448 record_buf_mem);
13449
13450 return 0;
13451 }
13452
13453 /* Handling opcode 100 insns. */
13454
13455 static int
13456 thumb_record_ld_st_stack (arm_insn_decode_record *thumb_insn_r)
13457 {
13458 struct regcache *reg_cache = thumb_insn_r->regcache;
13459 uint32_t record_buf[8], record_buf_mem[8];
13460
13461 uint32_t reg_src1 = 0;
13462 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
13463
13464 ULONGEST u_regval = 0;
13465
13466 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13467
13468 if (3 == opcode)
13469 {
13470 /* LDR(4). */
13471 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13472 record_buf[0] = reg_src1;
13473 thumb_insn_r->reg_rec_count = 1;
13474 }
13475 else if (1 == opcode)
13476 {
13477 /* LDRH(1). */
13478 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13479 record_buf[0] = reg_src1;
13480 thumb_insn_r->reg_rec_count = 1;
13481 }
13482 else if (2 == opcode)
13483 {
13484 /* STR(3). */
13485 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
13486 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
13487 record_buf_mem[0] = 4;
13488 record_buf_mem[1] = u_regval + (immed_8 * 4);
13489 thumb_insn_r->mem_rec_count = 1;
13490 }
13491 else if (0 == opcode)
13492 {
13493 /* STRH(1). */
13494 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
13495 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13496 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13497 record_buf_mem[0] = 2;
13498 record_buf_mem[1] = u_regval + (immed_5 * 2);
13499 thumb_insn_r->mem_rec_count = 1;
13500 }
13501
13502 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13503 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13504 record_buf_mem);
13505
13506 return 0;
13507 }
13508
13509 /* Handling opcode 101 insns. */
13510
13511 static int
13512 thumb_record_misc (arm_insn_decode_record *thumb_insn_r)
13513 {
13514 struct regcache *reg_cache = thumb_insn_r->regcache;
13515
13516 uint32_t opcode = 0;
13517 uint32_t register_bits = 0, register_count = 0;
13518 uint32_t index = 0, start_address = 0;
13519 uint32_t record_buf[24], record_buf_mem[48];
13520 uint32_t reg_src1;
13521
13522 ULONGEST u_regval = 0;
13523
13524 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13525
13526 if (opcode == 0 || opcode == 1)
13527 {
13528 /* ADR and ADD (SP plus immediate) */
13529
13530 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13531 record_buf[0] = reg_src1;
13532 thumb_insn_r->reg_rec_count = 1;
13533 }
13534 else
13535 {
13536 /* Miscellaneous 16-bit instructions */
13537 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
13538
13539 switch (opcode2)
13540 {
13541 case 6:
13542 /* SETEND and CPS */
13543 break;
13544 case 0:
13545 /* ADD/SUB (SP plus immediate) */
13546 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13547 record_buf[0] = ARM_SP_REGNUM;
13548 thumb_insn_r->reg_rec_count = 1;
13549 break;
13550 case 1: /* fall through */
13551 case 3: /* fall through */
13552 case 9: /* fall through */
13553 case 11:
13554 /* CBNZ, CBZ */
13555 break;
13556 case 2:
13557 /* SXTH, SXTB, UXTH, UXTB */
13558 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13559 thumb_insn_r->reg_rec_count = 1;
13560 break;
13561 case 5:
13562 /* PUSH with lr. */
13563 register_count++;
13564 [[fallthrough]];
13565 case 4:
13566 /* PUSH without lr. */
13567 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13568 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
13569 while (register_bits)
13570 {
13571 if (register_bits & 0x00000001)
13572 register_count++;
13573 register_bits = register_bits >> 1;
13574 }
13575 start_address = u_regval - (4 * register_count);
13576 thumb_insn_r->mem_rec_count = register_count;
13577 while (register_count)
13578 {
13579 record_buf_mem[(register_count * 2) - 1] = start_address;
13580 record_buf_mem[(register_count * 2) - 2] = 4;
13581 start_address = start_address + 4;
13582 register_count--;
13583 }
13584 record_buf[0] = ARM_SP_REGNUM;
13585 thumb_insn_r->reg_rec_count = 1;
13586 break;
13587 case 10:
13588 /* REV, REV16, REVSH */
13589 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13590 thumb_insn_r->reg_rec_count = 1;
13591 break;
13592 case 12: /* fall through */
13593 case 13:
13594 /* POP. */
13595 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13596 while (register_bits)
13597 {
13598 if (register_bits & 0x00000001)
13599 record_buf[index++] = register_count;
13600 register_bits = register_bits >> 1;
13601 register_count++;
13602 }
13603 record_buf[index++] = ARM_PS_REGNUM;
13604 record_buf[index++] = ARM_SP_REGNUM;
13605 thumb_insn_r->reg_rec_count = index;
13606 break;
13607 case 0xe:
13608 /* BKPT insn. */
13609 /* Handle enhanced software breakpoint insn, BKPT. */
13610 /* CPSR is changed to be executed in ARM state, disabling normal
13611 interrupts, entering abort mode. */
13612 /* According to high vector configuration PC is set. */
13613 /* User hits breakpoint and type reverse, in that case, we need to go back with
13614 previous CPSR and Program Counter. */
13615 record_buf[0] = ARM_PS_REGNUM;
13616 record_buf[1] = ARM_LR_REGNUM;
13617 thumb_insn_r->reg_rec_count = 2;
13618 /* We need to save SPSR value, which is not yet done. */
13619 gdb_printf (gdb_stderr,
13620 _("Process record does not support instruction "
13621 "0x%0x at address %s.\n"),
13622 thumb_insn_r->arm_insn,
13623 paddress (thumb_insn_r->gdbarch,
13624 thumb_insn_r->this_addr));
13625 return -1;
13626
13627 case 0xf:
13628 /* If-Then, and hints */
13629 break;
13630 default:
13631 return -1;
13632 };
13633 }
13634
13635 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13636 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13637 record_buf_mem);
13638
13639 return 0;
13640 }
13641
13642 /* Handling opcode 110 insns. */
13643
13644 static int
13645 thumb_record_ldm_stm_swi (arm_insn_decode_record *thumb_insn_r)
13646 {
13647 arm_gdbarch_tdep *tdep
13648 = gdbarch_tdep<arm_gdbarch_tdep> (thumb_insn_r->gdbarch);
13649 struct regcache *reg_cache = thumb_insn_r->regcache;
13650
13651 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
13652 uint32_t reg_src1 = 0;
13653 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
13654 uint32_t index = 0, start_address = 0;
13655 uint32_t record_buf[24], record_buf_mem[48];
13656
13657 ULONGEST u_regval = 0;
13658
13659 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
13660 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
13661
13662 if (1 == opcode2)
13663 {
13664
13665 /* LDMIA. */
13666 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13667 /* Get Rn. */
13668 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13669 while (register_bits)
13670 {
13671 if (register_bits & 0x00000001)
13672 record_buf[index++] = register_count;
13673 register_bits = register_bits >> 1;
13674 register_count++;
13675 }
13676 record_buf[index++] = reg_src1;
13677 thumb_insn_r->reg_rec_count = index;
13678 }
13679 else if (0 == opcode2)
13680 {
13681 /* It handles both STMIA. */
13682 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13683 /* Get Rn. */
13684 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13685 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13686 while (register_bits)
13687 {
13688 if (register_bits & 0x00000001)
13689 register_count++;
13690 register_bits = register_bits >> 1;
13691 }
13692 start_address = u_regval;
13693 thumb_insn_r->mem_rec_count = register_count;
13694 while (register_count)
13695 {
13696 record_buf_mem[(register_count * 2) - 1] = start_address;
13697 record_buf_mem[(register_count * 2) - 2] = 4;
13698 start_address = start_address + 4;
13699 register_count--;
13700 }
13701 }
13702 else if (0x1F == opcode1)
13703 {
13704 /* Handle arm syscall insn. */
13705 if (tdep->arm_syscall_record != NULL)
13706 {
13707 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
13708 ret = tdep->arm_syscall_record (reg_cache, u_regval);
13709 }
13710 else
13711 {
13712 gdb_printf (gdb_stderr, _("no syscall record support\n"));
13713 return -1;
13714 }
13715 }
13716
13717 /* B (1), conditional branch is automatically taken care in process_record,
13718 as PC is saved there. */
13719
13720 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13721 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13722 record_buf_mem);
13723
13724 return ret;
13725 }
13726
13727 /* Handling opcode 111 insns. */
13728
13729 static int
13730 thumb_record_branch (arm_insn_decode_record *thumb_insn_r)
13731 {
13732 uint32_t record_buf[8];
13733 uint32_t bits_h = 0;
13734
13735 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
13736
13737 if (2 == bits_h || 3 == bits_h)
13738 {
13739 /* BL */
13740 record_buf[0] = ARM_LR_REGNUM;
13741 thumb_insn_r->reg_rec_count = 1;
13742 }
13743 else if (1 == bits_h)
13744 {
13745 /* BLX(1). */
13746 record_buf[0] = ARM_PS_REGNUM;
13747 record_buf[1] = ARM_LR_REGNUM;
13748 thumb_insn_r->reg_rec_count = 2;
13749 }
13750
13751 /* B(2) is automatically taken care in process_record, as PC is
13752 saved there. */
13753
13754 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13755
13756 return 0;
13757 }
13758
13759 /* Handler for thumb2 load/store multiple instructions. */
13760
13761 static int
13762 thumb2_record_ld_st_multiple (arm_insn_decode_record *thumb2_insn_r)
13763 {
13764 struct regcache *reg_cache = thumb2_insn_r->regcache;
13765
13766 uint32_t reg_rn, op;
13767 uint32_t register_bits = 0, register_count = 0;
13768 uint32_t index = 0, start_address = 0;
13769 uint32_t record_buf[24], record_buf_mem[48];
13770
13771 ULONGEST u_regval = 0;
13772
13773 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13774 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13775
13776 if (0 == op || 3 == op)
13777 {
13778 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13779 {
13780 /* Handle RFE instruction. */
13781 record_buf[0] = ARM_PS_REGNUM;
13782 thumb2_insn_r->reg_rec_count = 1;
13783 }
13784 else
13785 {
13786 /* Handle SRS instruction after reading banked SP. */
13787 return arm_record_unsupported_insn (thumb2_insn_r);
13788 }
13789 }
13790 else if (1 == op || 2 == op)
13791 {
13792 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13793 {
13794 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13795 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13796 while (register_bits)
13797 {
13798 if (register_bits & 0x00000001)
13799 record_buf[index++] = register_count;
13800
13801 register_count++;
13802 register_bits = register_bits >> 1;
13803 }
13804 record_buf[index++] = reg_rn;
13805 record_buf[index++] = ARM_PS_REGNUM;
13806 thumb2_insn_r->reg_rec_count = index;
13807 }
13808 else
13809 {
13810 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13811 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13812 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13813 while (register_bits)
13814 {
13815 if (register_bits & 0x00000001)
13816 register_count++;
13817
13818 register_bits = register_bits >> 1;
13819 }
13820
13821 if (1 == op)
13822 {
13823 /* Start address calculation for LDMDB/LDMEA. */
13824 start_address = u_regval;
13825 }
13826 else if (2 == op)
13827 {
13828 /* Start address calculation for LDMDB/LDMEA. */
13829 start_address = u_regval - register_count * 4;
13830 }
13831
13832 thumb2_insn_r->mem_rec_count = register_count;
13833 while (register_count)
13834 {
13835 record_buf_mem[register_count * 2 - 1] = start_address;
13836 record_buf_mem[register_count * 2 - 2] = 4;
13837 start_address = start_address + 4;
13838 register_count--;
13839 }
13840 record_buf[0] = reg_rn;
13841 record_buf[1] = ARM_PS_REGNUM;
13842 thumb2_insn_r->reg_rec_count = 2;
13843 }
13844 }
13845
13846 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13847 record_buf_mem);
13848 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13849 record_buf);
13850 return ARM_RECORD_SUCCESS;
13851 }
13852
13853 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13854 instructions. */
13855
13856 static int
13857 thumb2_record_ld_st_dual_ex_tbb (arm_insn_decode_record *thumb2_insn_r)
13858 {
13859 struct regcache *reg_cache = thumb2_insn_r->regcache;
13860
13861 uint32_t reg_rd, reg_rn, offset_imm;
13862 uint32_t reg_dest1, reg_dest2;
13863 uint32_t address, offset_addr;
13864 uint32_t record_buf[8], record_buf_mem[8];
13865 uint32_t op1, op2, op3;
13866
13867 ULONGEST u_regval[2];
13868
13869 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13870 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13871 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13872
13873 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13874 {
13875 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13876 {
13877 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13878 record_buf[0] = reg_dest1;
13879 record_buf[1] = ARM_PS_REGNUM;
13880 thumb2_insn_r->reg_rec_count = 2;
13881 }
13882
13883 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13884 {
13885 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13886 record_buf[2] = reg_dest2;
13887 thumb2_insn_r->reg_rec_count = 3;
13888 }
13889 }
13890 else
13891 {
13892 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13893 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13894
13895 if (0 == op1 && 0 == op2)
13896 {
13897 /* Handle STREX. */
13898 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13899 address = u_regval[0] + (offset_imm * 4);
13900 record_buf_mem[0] = 4;
13901 record_buf_mem[1] = address;
13902 thumb2_insn_r->mem_rec_count = 1;
13903 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13904 record_buf[0] = reg_rd;
13905 thumb2_insn_r->reg_rec_count = 1;
13906 }
13907 else if (1 == op1 && 0 == op2)
13908 {
13909 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13910 record_buf[0] = reg_rd;
13911 thumb2_insn_r->reg_rec_count = 1;
13912 address = u_regval[0];
13913 record_buf_mem[1] = address;
13914
13915 if (4 == op3)
13916 {
13917 /* Handle STREXB. */
13918 record_buf_mem[0] = 1;
13919 thumb2_insn_r->mem_rec_count = 1;
13920 }
13921 else if (5 == op3)
13922 {
13923 /* Handle STREXH. */
13924 record_buf_mem[0] = 2 ;
13925 thumb2_insn_r->mem_rec_count = 1;
13926 }
13927 else if (7 == op3)
13928 {
13929 /* Handle STREXD. */
13930 address = u_regval[0];
13931 record_buf_mem[0] = 4;
13932 record_buf_mem[2] = 4;
13933 record_buf_mem[3] = address + 4;
13934 thumb2_insn_r->mem_rec_count = 2;
13935 }
13936 }
13937 else
13938 {
13939 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13940
13941 if (bit (thumb2_insn_r->arm_insn, 24))
13942 {
13943 if (bit (thumb2_insn_r->arm_insn, 23))
13944 offset_addr = u_regval[0] + (offset_imm * 4);
13945 else
13946 offset_addr = u_regval[0] - (offset_imm * 4);
13947
13948 address = offset_addr;
13949 }
13950 else
13951 address = u_regval[0];
13952
13953 record_buf_mem[0] = 4;
13954 record_buf_mem[1] = address;
13955 record_buf_mem[2] = 4;
13956 record_buf_mem[3] = address + 4;
13957 thumb2_insn_r->mem_rec_count = 2;
13958 record_buf[0] = reg_rn;
13959 thumb2_insn_r->reg_rec_count = 1;
13960 }
13961 }
13962
13963 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13964 record_buf);
13965 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13966 record_buf_mem);
13967 return ARM_RECORD_SUCCESS;
13968 }
13969
13970 /* Handler for thumb2 data processing (shift register and modified immediate)
13971 instructions. */
13972
13973 static int
13974 thumb2_record_data_proc_sreg_mimm (arm_insn_decode_record *thumb2_insn_r)
13975 {
13976 uint32_t reg_rd, op;
13977 uint32_t record_buf[8];
13978
13979 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13980 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13981
13982 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13983 {
13984 record_buf[0] = ARM_PS_REGNUM;
13985 thumb2_insn_r->reg_rec_count = 1;
13986 }
13987 else
13988 {
13989 record_buf[0] = reg_rd;
13990 record_buf[1] = ARM_PS_REGNUM;
13991 thumb2_insn_r->reg_rec_count = 2;
13992 }
13993
13994 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13995 record_buf);
13996 return ARM_RECORD_SUCCESS;
13997 }
13998
13999 /* Generic handler for thumb2 instructions which effect destination and PS
14000 registers. */
14001
14002 static int
14003 thumb2_record_ps_dest_generic (arm_insn_decode_record *thumb2_insn_r)
14004 {
14005 uint32_t reg_rd;
14006 uint32_t record_buf[8];
14007
14008 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
14009
14010 record_buf[0] = reg_rd;
14011 record_buf[1] = ARM_PS_REGNUM;
14012 thumb2_insn_r->reg_rec_count = 2;
14013
14014 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14015 record_buf);
14016 return ARM_RECORD_SUCCESS;
14017 }
14018
14019 /* Handler for thumb2 branch and miscellaneous control instructions. */
14020
14021 static int
14022 thumb2_record_branch_misc_cntrl (arm_insn_decode_record *thumb2_insn_r)
14023 {
14024 uint32_t op, op1, op2;
14025 uint32_t record_buf[8];
14026
14027 op = bits (thumb2_insn_r->arm_insn, 20, 26);
14028 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
14029 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
14030
14031 /* Handle MSR insn. */
14032 if (!(op1 & 0x2) && 0x38 == op)
14033 {
14034 if (!(op2 & 0x3))
14035 {
14036 /* CPSR is going to be changed. */
14037 record_buf[0] = ARM_PS_REGNUM;
14038 thumb2_insn_r->reg_rec_count = 1;
14039 }
14040 else
14041 {
14042 arm_record_unsupported_insn(thumb2_insn_r);
14043 return -1;
14044 }
14045 }
14046 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
14047 {
14048 /* BLX. */
14049 record_buf[0] = ARM_PS_REGNUM;
14050 record_buf[1] = ARM_LR_REGNUM;
14051 thumb2_insn_r->reg_rec_count = 2;
14052 }
14053
14054 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14055 record_buf);
14056 return ARM_RECORD_SUCCESS;
14057 }
14058
14059 /* Handler for thumb2 store single data item instructions. */
14060
14061 static int
14062 thumb2_record_str_single_data (arm_insn_decode_record *thumb2_insn_r)
14063 {
14064 struct regcache *reg_cache = thumb2_insn_r->regcache;
14065
14066 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
14067 uint32_t address, offset_addr;
14068 uint32_t record_buf[8], record_buf_mem[8];
14069 uint32_t op1, op2;
14070
14071 ULONGEST u_regval[2];
14072
14073 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
14074 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
14075 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
14076 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
14077
14078 if (bit (thumb2_insn_r->arm_insn, 23))
14079 {
14080 /* T2 encoding. */
14081 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
14082 offset_addr = u_regval[0] + offset_imm;
14083 address = offset_addr;
14084 }
14085 else
14086 {
14087 /* T3 encoding. */
14088 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
14089 {
14090 /* Handle STRB (register). */
14091 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
14092 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
14093 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
14094 offset_addr = u_regval[1] << shift_imm;
14095 address = u_regval[0] + offset_addr;
14096 }
14097 else
14098 {
14099 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
14100 if (bit (thumb2_insn_r->arm_insn, 10))
14101 {
14102 if (bit (thumb2_insn_r->arm_insn, 9))
14103 offset_addr = u_regval[0] + offset_imm;
14104 else
14105 offset_addr = u_regval[0] - offset_imm;
14106
14107 address = offset_addr;
14108 }
14109 else
14110 address = u_regval[0];
14111 }
14112 }
14113
14114 switch (op1)
14115 {
14116 /* Store byte instructions. */
14117 case 4:
14118 case 0:
14119 record_buf_mem[0] = 1;
14120 break;
14121 /* Store half word instructions. */
14122 case 1:
14123 case 5:
14124 record_buf_mem[0] = 2;
14125 break;
14126 /* Store word instructions. */
14127 case 2:
14128 case 6:
14129 record_buf_mem[0] = 4;
14130 break;
14131
14132 default:
14133 gdb_assert_not_reached ("no decoding pattern found");
14134 break;
14135 }
14136
14137 record_buf_mem[1] = address;
14138 thumb2_insn_r->mem_rec_count = 1;
14139 record_buf[0] = reg_rn;
14140 thumb2_insn_r->reg_rec_count = 1;
14141
14142 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14143 record_buf);
14144 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
14145 record_buf_mem);
14146 return ARM_RECORD_SUCCESS;
14147 }
14148
14149 /* Handler for thumb2 load memory hints instructions. */
14150
14151 static int
14152 thumb2_record_ld_mem_hints (arm_insn_decode_record *thumb2_insn_r)
14153 {
14154 uint32_t record_buf[8];
14155 uint32_t reg_rt, reg_rn;
14156
14157 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
14158 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
14159
14160 if (ARM_PC_REGNUM != reg_rt)
14161 {
14162 record_buf[0] = reg_rt;
14163 record_buf[1] = reg_rn;
14164 record_buf[2] = ARM_PS_REGNUM;
14165 thumb2_insn_r->reg_rec_count = 3;
14166
14167 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14168 record_buf);
14169 return ARM_RECORD_SUCCESS;
14170 }
14171
14172 return ARM_RECORD_FAILURE;
14173 }
14174
14175 /* Handler for thumb2 load word instructions. */
14176
14177 static int
14178 thumb2_record_ld_word (arm_insn_decode_record *thumb2_insn_r)
14179 {
14180 uint32_t record_buf[8];
14181
14182 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
14183 record_buf[1] = ARM_PS_REGNUM;
14184 thumb2_insn_r->reg_rec_count = 2;
14185
14186 if ((thumb2_insn_r->arm_insn & 0xfff00900) == 0xf8500900)
14187 {
14188 /* Detected LDR(immediate), T4, with write-back bit set. Record Rn
14189 update. */
14190 record_buf[2] = bits (thumb2_insn_r->arm_insn, 16, 19);
14191 thumb2_insn_r->reg_rec_count++;
14192 }
14193
14194 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14195 record_buf);
14196 return ARM_RECORD_SUCCESS;
14197 }
14198
14199 /* Handler for thumb2 long multiply, long multiply accumulate, and
14200 divide instructions. */
14201
14202 static int
14203 thumb2_record_lmul_lmla_div (arm_insn_decode_record *thumb2_insn_r)
14204 {
14205 uint32_t opcode1 = 0, opcode2 = 0;
14206 uint32_t record_buf[8];
14207
14208 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
14209 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
14210
14211 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
14212 {
14213 /* Handle SMULL, UMULL, SMULAL. */
14214 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
14215 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
14216 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
14217 record_buf[2] = ARM_PS_REGNUM;
14218 thumb2_insn_r->reg_rec_count = 3;
14219 }
14220 else if (1 == opcode1 || 3 == opcode2)
14221 {
14222 /* Handle SDIV and UDIV. */
14223 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
14224 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
14225 record_buf[2] = ARM_PS_REGNUM;
14226 thumb2_insn_r->reg_rec_count = 3;
14227 }
14228 else
14229 return ARM_RECORD_FAILURE;
14230
14231 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14232 record_buf);
14233 return ARM_RECORD_SUCCESS;
14234 }
14235
14236 /* Record handler for thumb32 coprocessor instructions. */
14237
14238 static int
14239 thumb2_record_coproc_insn (arm_insn_decode_record *thumb2_insn_r)
14240 {
14241 if (bit (thumb2_insn_r->arm_insn, 25))
14242 return arm_record_coproc_data_proc (thumb2_insn_r);
14243 else
14244 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
14245 }
14246
14247 /* Record handler for advance SIMD structure load/store instructions. */
14248
14249 static int
14250 thumb2_record_asimd_struct_ld_st (arm_insn_decode_record *thumb2_insn_r)
14251 {
14252 struct regcache *reg_cache = thumb2_insn_r->regcache;
14253 uint32_t l_bit, a_bit, b_bits;
14254 uint32_t record_buf[128], record_buf_mem[128];
14255 uint32_t reg_rn, reg_vd, address, f_elem;
14256 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
14257 uint8_t f_ebytes;
14258
14259 l_bit = bit (thumb2_insn_r->arm_insn, 21);
14260 a_bit = bit (thumb2_insn_r->arm_insn, 23);
14261 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
14262 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
14263 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
14264 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
14265 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
14266 f_elem = 8 / f_ebytes;
14267
14268 if (!l_bit)
14269 {
14270 ULONGEST u_regval = 0;
14271 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
14272 address = u_regval;
14273
14274 if (!a_bit)
14275 {
14276 /* Handle VST1. */
14277 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
14278 {
14279 if (b_bits == 0x07)
14280 bf_regs = 1;
14281 else if (b_bits == 0x0a)
14282 bf_regs = 2;
14283 else if (b_bits == 0x06)
14284 bf_regs = 3;
14285 else if (b_bits == 0x02)
14286 bf_regs = 4;
14287 else
14288 bf_regs = 0;
14289
14290 for (index_r = 0; index_r < bf_regs; index_r++)
14291 {
14292 for (index_e = 0; index_e < f_elem; index_e++)
14293 {
14294 record_buf_mem[index_m++] = f_ebytes;
14295 record_buf_mem[index_m++] = address;
14296 address = address + f_ebytes;
14297 thumb2_insn_r->mem_rec_count += 1;
14298 }
14299 }
14300 }
14301 /* Handle VST2. */
14302 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
14303 {
14304 if (b_bits == 0x09 || b_bits == 0x08)
14305 bf_regs = 1;
14306 else if (b_bits == 0x03)
14307 bf_regs = 2;
14308 else
14309 bf_regs = 0;
14310
14311 for (index_r = 0; index_r < bf_regs; index_r++)
14312 for (index_e = 0; index_e < f_elem; index_e++)
14313 {
14314 for (loop_t = 0; loop_t < 2; loop_t++)
14315 {
14316 record_buf_mem[index_m++] = f_ebytes;
14317 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14318 thumb2_insn_r->mem_rec_count += 1;
14319 }
14320 address = address + (2 * f_ebytes);
14321 }
14322 }
14323 /* Handle VST3. */
14324 else if ((b_bits & 0x0e) == 0x04)
14325 {
14326 for (index_e = 0; index_e < f_elem; index_e++)
14327 {
14328 for (loop_t = 0; loop_t < 3; loop_t++)
14329 {
14330 record_buf_mem[index_m++] = f_ebytes;
14331 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14332 thumb2_insn_r->mem_rec_count += 1;
14333 }
14334 address = address + (3 * f_ebytes);
14335 }
14336 }
14337 /* Handle VST4. */
14338 else if (!(b_bits & 0x0e))
14339 {
14340 for (index_e = 0; index_e < f_elem; index_e++)
14341 {
14342 for (loop_t = 0; loop_t < 4; loop_t++)
14343 {
14344 record_buf_mem[index_m++] = f_ebytes;
14345 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14346 thumb2_insn_r->mem_rec_count += 1;
14347 }
14348 address = address + (4 * f_ebytes);
14349 }
14350 }
14351 }
14352 else
14353 {
14354 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
14355
14356 if (bft_size == 0x00)
14357 f_ebytes = 1;
14358 else if (bft_size == 0x01)
14359 f_ebytes = 2;
14360 else if (bft_size == 0x02)
14361 f_ebytes = 4;
14362 else
14363 f_ebytes = 0;
14364
14365 /* Handle VST1. */
14366 if (!(b_bits & 0x0b) || b_bits == 0x08)
14367 thumb2_insn_r->mem_rec_count = 1;
14368 /* Handle VST2. */
14369 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
14370 thumb2_insn_r->mem_rec_count = 2;
14371 /* Handle VST3. */
14372 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
14373 thumb2_insn_r->mem_rec_count = 3;
14374 /* Handle VST4. */
14375 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
14376 thumb2_insn_r->mem_rec_count = 4;
14377
14378 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
14379 {
14380 record_buf_mem[index_m] = f_ebytes;
14381 record_buf_mem[index_m] = address + (index_m * f_ebytes);
14382 }
14383 }
14384 }
14385 else
14386 {
14387 if (!a_bit)
14388 {
14389 /* Handle VLD1. */
14390 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
14391 thumb2_insn_r->reg_rec_count = 1;
14392 /* Handle VLD2. */
14393 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
14394 thumb2_insn_r->reg_rec_count = 2;
14395 /* Handle VLD3. */
14396 else if ((b_bits & 0x0e) == 0x04)
14397 thumb2_insn_r->reg_rec_count = 3;
14398 /* Handle VLD4. */
14399 else if (!(b_bits & 0x0e))
14400 thumb2_insn_r->reg_rec_count = 4;
14401 }
14402 else
14403 {
14404 /* Handle VLD1. */
14405 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
14406 thumb2_insn_r->reg_rec_count = 1;
14407 /* Handle VLD2. */
14408 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
14409 thumb2_insn_r->reg_rec_count = 2;
14410 /* Handle VLD3. */
14411 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
14412 thumb2_insn_r->reg_rec_count = 3;
14413 /* Handle VLD4. */
14414 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
14415 thumb2_insn_r->reg_rec_count = 4;
14416
14417 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
14418 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
14419 }
14420 }
14421
14422 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
14423 {
14424 record_buf[index_r] = reg_rn;
14425 thumb2_insn_r->reg_rec_count += 1;
14426 }
14427
14428 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14429 record_buf);
14430 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
14431 record_buf_mem);
14432 return 0;
14433 }
14434
14435 /* Decodes thumb2 instruction type and invokes its record handler. */
14436
14437 static unsigned int
14438 thumb2_record_decode_insn_handler (arm_insn_decode_record *thumb2_insn_r)
14439 {
14440 uint32_t op, op1, op2;
14441
14442 op = bit (thumb2_insn_r->arm_insn, 15);
14443 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
14444 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
14445
14446 if (op1 == 0x01)
14447 {
14448 if (!(op2 & 0x64 ))
14449 {
14450 /* Load/store multiple instruction. */
14451 return thumb2_record_ld_st_multiple (thumb2_insn_r);
14452 }
14453 else if ((op2 & 0x64) == 0x4)
14454 {
14455 /* Load/store (dual/exclusive) and table branch instruction. */
14456 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
14457 }
14458 else if ((op2 & 0x60) == 0x20)
14459 {
14460 /* Data-processing (shifted register). */
14461 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
14462 }
14463 else if (op2 & 0x40)
14464 {
14465 /* Co-processor instructions. */
14466 return thumb2_record_coproc_insn (thumb2_insn_r);
14467 }
14468 }
14469 else if (op1 == 0x02)
14470 {
14471 if (op)
14472 {
14473 /* Branches and miscellaneous control instructions. */
14474 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
14475 }
14476 else if (op2 & 0x20)
14477 {
14478 /* Data-processing (plain binary immediate) instruction. */
14479 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14480 }
14481 else
14482 {
14483 /* Data-processing (modified immediate). */
14484 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
14485 }
14486 }
14487 else if (op1 == 0x03)
14488 {
14489 if (!(op2 & 0x71 ))
14490 {
14491 /* Store single data item. */
14492 return thumb2_record_str_single_data (thumb2_insn_r);
14493 }
14494 else if (!((op2 & 0x71) ^ 0x10))
14495 {
14496 /* Advanced SIMD or structure load/store instructions. */
14497 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
14498 }
14499 else if (!((op2 & 0x67) ^ 0x01))
14500 {
14501 /* Load byte, memory hints instruction. */
14502 return thumb2_record_ld_mem_hints (thumb2_insn_r);
14503 }
14504 else if (!((op2 & 0x67) ^ 0x03))
14505 {
14506 /* Load halfword, memory hints instruction. */
14507 return thumb2_record_ld_mem_hints (thumb2_insn_r);
14508 }
14509 else if (!((op2 & 0x67) ^ 0x05))
14510 {
14511 /* Load word instruction. */
14512 return thumb2_record_ld_word (thumb2_insn_r);
14513 }
14514 else if (!((op2 & 0x70) ^ 0x20))
14515 {
14516 /* Data-processing (register) instruction. */
14517 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14518 }
14519 else if (!((op2 & 0x78) ^ 0x30))
14520 {
14521 /* Multiply, multiply accumulate, abs diff instruction. */
14522 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14523 }
14524 else if (!((op2 & 0x78) ^ 0x38))
14525 {
14526 /* Long multiply, long multiply accumulate, and divide. */
14527 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
14528 }
14529 else if (op2 & 0x40)
14530 {
14531 /* Co-processor instructions. */
14532 return thumb2_record_coproc_insn (thumb2_insn_r);
14533 }
14534 }
14535
14536 return -1;
14537 }
14538
14539 namespace {
14540 /* Abstract instruction reader. */
14541
14542 class abstract_instruction_reader
14543 {
14544 public:
14545 /* Read one instruction of size LEN from address MEMADDR and using
14546 BYTE_ORDER endianness. */
14547
14548 virtual ULONGEST read (CORE_ADDR memaddr, const size_t len,
14549 enum bfd_endian byte_order) = 0;
14550 };
14551
14552 /* Instruction reader from real target. */
14553
14554 class instruction_reader : public abstract_instruction_reader
14555 {
14556 public:
14557 ULONGEST read (CORE_ADDR memaddr, const size_t len,
14558 enum bfd_endian byte_order) override
14559 {
14560 return read_code_unsigned_integer (memaddr, len, byte_order);
14561 }
14562 };
14563
14564 } // namespace
14565
14566 typedef int (*sti_arm_hdl_fp_t) (arm_insn_decode_record*);
14567
14568 /* Decode arm/thumb insn depending on condition cods and opcodes; and
14569 dispatch it. */
14570
14571 static int
14572 decode_insn (abstract_instruction_reader &reader,
14573 arm_insn_decode_record *arm_record,
14574 record_type_t record_type, uint32_t insn_size)
14575 {
14576
14577 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
14578 instruction. */
14579 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
14580 {
14581 arm_record_data_proc_misc_ld_str, /* 000. */
14582 arm_record_data_proc_imm, /* 001. */
14583 arm_record_ld_st_imm_offset, /* 010. */
14584 arm_record_ld_st_reg_offset, /* 011. */
14585 arm_record_ld_st_multiple, /* 100. */
14586 arm_record_b_bl, /* 101. */
14587 arm_record_asimd_vfp_coproc, /* 110. */
14588 arm_record_coproc_data_proc /* 111. */
14589 };
14590
14591 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
14592 instruction. */
14593 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
14594 { \
14595 thumb_record_shift_add_sub, /* 000. */
14596 thumb_record_add_sub_cmp_mov, /* 001. */
14597 thumb_record_ld_st_reg_offset, /* 010. */
14598 thumb_record_ld_st_imm_offset, /* 011. */
14599 thumb_record_ld_st_stack, /* 100. */
14600 thumb_record_misc, /* 101. */
14601 thumb_record_ldm_stm_swi, /* 110. */
14602 thumb_record_branch /* 111. */
14603 };
14604
14605 uint32_t ret = 0; /* return value: negative:failure 0:success. */
14606 uint32_t insn_id = 0;
14607 enum bfd_endian code_endian
14608 = gdbarch_byte_order_for_code (arm_record->gdbarch);
14609 arm_record->arm_insn
14610 = reader.read (arm_record->this_addr, insn_size, code_endian);
14611
14612 if (ARM_RECORD == record_type)
14613 {
14614 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
14615 insn_id = bits (arm_record->arm_insn, 25, 27);
14616
14617 if (arm_record->cond == 0xf)
14618 ret = arm_record_extension_space (arm_record);
14619 else
14620 {
14621 /* If this insn has fallen into extension space
14622 then we need not decode it anymore. */
14623 ret = arm_handle_insn[insn_id] (arm_record);
14624 }
14625 if (ret != ARM_RECORD_SUCCESS)
14626 {
14627 arm_record_unsupported_insn (arm_record);
14628 ret = -1;
14629 }
14630 }
14631 else if (THUMB_RECORD == record_type)
14632 {
14633 /* As thumb does not have condition codes, we set negative. */
14634 arm_record->cond = -1;
14635 insn_id = bits (arm_record->arm_insn, 13, 15);
14636 ret = thumb_handle_insn[insn_id] (arm_record);
14637 if (ret != ARM_RECORD_SUCCESS)
14638 {
14639 arm_record_unsupported_insn (arm_record);
14640 ret = -1;
14641 }
14642 }
14643 else if (THUMB2_RECORD == record_type)
14644 {
14645 /* As thumb does not have condition codes, we set negative. */
14646 arm_record->cond = -1;
14647
14648 /* Swap first half of 32bit thumb instruction with second half. */
14649 arm_record->arm_insn
14650 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
14651
14652 ret = thumb2_record_decode_insn_handler (arm_record);
14653
14654 if (ret != ARM_RECORD_SUCCESS)
14655 {
14656 arm_record_unsupported_insn (arm_record);
14657 ret = -1;
14658 }
14659 }
14660 else
14661 {
14662 /* Throw assertion. */
14663 gdb_assert_not_reached ("not a valid instruction, could not decode");
14664 }
14665
14666 return ret;
14667 }
14668
14669 #if GDB_SELF_TEST
14670 namespace selftests {
14671
14672 /* Instruction reader class for selftests.
14673
14674 For 16-bit Thumb instructions, an array of uint16_t should be used.
14675
14676 For 32-bit Thumb instructions and regular 32-bit Arm instructions, an array
14677 of uint32_t should be used. */
14678
14679 template<typename T>
14680 class instruction_reader_selftest : public abstract_instruction_reader
14681 {
14682 public:
14683 template<size_t SIZE>
14684 instruction_reader_selftest (const T (&insns)[SIZE])
14685 : m_insns (insns), m_insns_size (SIZE)
14686 {}
14687
14688 ULONGEST read (CORE_ADDR memaddr, const size_t length,
14689 enum bfd_endian byte_order) override
14690 {
14691 SELF_CHECK (length == sizeof (T));
14692 SELF_CHECK (memaddr % sizeof (T) == 0);
14693 SELF_CHECK ((memaddr / sizeof (T)) < m_insns_size);
14694
14695 return m_insns[memaddr / sizeof (T)];
14696 }
14697
14698 private:
14699 const T *m_insns;
14700 const size_t m_insns_size;
14701 };
14702
14703 static void
14704 arm_record_test (void)
14705 {
14706 struct gdbarch_info info;
14707 info.bfd_arch_info = bfd_scan_arch ("arm");
14708
14709 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14710
14711 SELF_CHECK (gdbarch != NULL);
14712
14713 /* 16-bit Thumb instructions. */
14714 {
14715 arm_insn_decode_record arm_record;
14716
14717 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14718 arm_record.gdbarch = gdbarch;
14719
14720 /* Use the endian-free representation of the instructions here. The test
14721 will handle endianness conversions. */
14722 static const uint16_t insns[] = {
14723 /* db b2 uxtb r3, r3 */
14724 0xb2db,
14725 /* cd 58 ldr r5, [r1, r3] */
14726 0x58cd,
14727 };
14728
14729 instruction_reader_selftest<uint16_t> reader (insns);
14730 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14731 THUMB_INSN_SIZE_BYTES);
14732
14733 SELF_CHECK (ret == 0);
14734 SELF_CHECK (arm_record.mem_rec_count == 0);
14735 SELF_CHECK (arm_record.reg_rec_count == 1);
14736 SELF_CHECK (arm_record.arm_regs[0] == 3);
14737
14738 arm_record.this_addr += 2;
14739 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14740 THUMB_INSN_SIZE_BYTES);
14741
14742 SELF_CHECK (ret == 0);
14743 SELF_CHECK (arm_record.mem_rec_count == 0);
14744 SELF_CHECK (arm_record.reg_rec_count == 1);
14745 SELF_CHECK (arm_record.arm_regs[0] == 5);
14746 }
14747
14748 /* 32-bit Thumb-2 instructions. */
14749 {
14750 arm_insn_decode_record arm_record;
14751
14752 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14753 arm_record.gdbarch = gdbarch;
14754
14755 /* Use the endian-free representation of the instruction here. The test
14756 will handle endianness conversions. */
14757 static const uint32_t insns[] = {
14758 /* mrc 15, 0, r7, cr13, cr0, {3} */
14759 0x7f70ee1d,
14760 };
14761
14762 instruction_reader_selftest<uint32_t> reader (insns);
14763 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14764 THUMB2_INSN_SIZE_BYTES);
14765
14766 SELF_CHECK (ret == 0);
14767 SELF_CHECK (arm_record.mem_rec_count == 0);
14768 SELF_CHECK (arm_record.reg_rec_count == 1);
14769 SELF_CHECK (arm_record.arm_regs[0] == 7);
14770 }
14771
14772 /* 32-bit instructions. */
14773 {
14774 arm_insn_decode_record arm_record;
14775
14776 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14777 arm_record.gdbarch = gdbarch;
14778
14779 /* Use the endian-free representation of the instruction here. The test
14780 will handle endianness conversions. */
14781 static const uint32_t insns[] = {
14782 /* mov r5, r0 */
14783 0xe1a05000,
14784 };
14785
14786 instruction_reader_selftest<uint32_t> reader (insns);
14787 int ret = decode_insn (reader, &arm_record, ARM_RECORD,
14788 ARM_INSN_SIZE_BYTES);
14789
14790 SELF_CHECK (ret == 0);
14791 }
14792 }
14793
14794 /* Instruction reader from manually cooked instruction sequences. */
14795
14796 class test_arm_instruction_reader : public arm_instruction_reader
14797 {
14798 public:
14799 explicit test_arm_instruction_reader (gdb::array_view<const uint32_t> insns)
14800 : m_insns (insns)
14801 {}
14802
14803 uint32_t read (CORE_ADDR memaddr, enum bfd_endian byte_order) const override
14804 {
14805 SELF_CHECK (memaddr % 4 == 0);
14806 SELF_CHECK (memaddr / 4 < m_insns.size ());
14807
14808 return m_insns[memaddr / 4];
14809 }
14810
14811 private:
14812 const gdb::array_view<const uint32_t> m_insns;
14813 };
14814
14815 static void
14816 arm_analyze_prologue_test ()
14817 {
14818 for (bfd_endian endianness : {BFD_ENDIAN_LITTLE, BFD_ENDIAN_BIG})
14819 {
14820 struct gdbarch_info info;
14821 info.byte_order = endianness;
14822 info.byte_order_for_code = endianness;
14823 info.bfd_arch_info = bfd_scan_arch ("arm");
14824
14825 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14826
14827 SELF_CHECK (gdbarch != NULL);
14828
14829 /* The "sub" instruction contains an immediate value rotate count of 0,
14830 which resulted in a 32-bit shift of a 32-bit value, caught by
14831 UBSan. */
14832 const uint32_t insns[] = {
14833 0xe92d4ff0, /* push {r4, r5, r6, r7, r8, r9, sl, fp, lr} */
14834 0xe1a05000, /* mov r5, r0 */
14835 0xe5903020, /* ldr r3, [r0, #32] */
14836 0xe24dd044, /* sub sp, sp, #68 ; 0x44 */
14837 };
14838
14839 test_arm_instruction_reader mem_reader (insns);
14840 arm_prologue_cache cache;
14841 arm_cache_init (&cache, gdbarch);
14842
14843 arm_analyze_prologue (gdbarch, 0, sizeof (insns) - 1, &cache, mem_reader);
14844 }
14845 }
14846
14847 } /* namespace selftests */
14848 #endif /* GDB_SELF_TEST */
14849
14850 /* Cleans up local record registers and memory allocations. */
14851
14852 static void
14853 deallocate_reg_mem (arm_insn_decode_record *record)
14854 {
14855 xfree (record->arm_regs);
14856 xfree (record->arm_mems);
14857 }
14858
14859
14860 /* Parse the current instruction and record the values of the registers and
14861 memory that will be changed in current instruction to record_arch_list".
14862 Return -1 if something is wrong. */
14863
14864 int
14865 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
14866 CORE_ADDR insn_addr)
14867 {
14868
14869 uint32_t no_of_rec = 0;
14870 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
14871 ULONGEST t_bit = 0, insn_id = 0;
14872
14873 ULONGEST u_regval = 0;
14874
14875 arm_insn_decode_record arm_record;
14876
14877 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14878 arm_record.regcache = regcache;
14879 arm_record.this_addr = insn_addr;
14880 arm_record.gdbarch = gdbarch;
14881
14882
14883 if (record_debug > 1)
14884 {
14885 gdb_printf (gdb_stdlog, "Process record: arm_process_record "
14886 "addr = %s\n",
14887 paddress (gdbarch, arm_record.this_addr));
14888 }
14889
14890 instruction_reader reader;
14891 enum bfd_endian code_endian
14892 = gdbarch_byte_order_for_code (arm_record.gdbarch);
14893 arm_record.arm_insn
14894 = reader.read (arm_record.this_addr, 2, code_endian);
14895
14896 /* Check the insn, whether it is thumb or arm one. */
14897
14898 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
14899 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
14900
14901
14902 if (!(u_regval & t_bit))
14903 {
14904 /* We are decoding arm insn. */
14905 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
14906 }
14907 else
14908 {
14909 insn_id = bits (arm_record.arm_insn, 11, 15);
14910 /* is it thumb2 insn? */
14911 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
14912 {
14913 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14914 THUMB2_INSN_SIZE_BYTES);
14915 }
14916 else
14917 {
14918 /* We are decoding thumb insn. */
14919 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14920 THUMB_INSN_SIZE_BYTES);
14921 }
14922 }
14923
14924 if (0 == ret)
14925 {
14926 /* Record registers. */
14927 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
14928 if (arm_record.arm_regs)
14929 {
14930 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
14931 {
14932 if (record_full_arch_list_add_reg
14933 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
14934 ret = -1;
14935 }
14936 }
14937 /* Record memories. */
14938 if (arm_record.arm_mems)
14939 {
14940 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
14941 {
14942 if (record_full_arch_list_add_mem
14943 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
14944 arm_record.arm_mems[no_of_rec].len))
14945 ret = -1;
14946 }
14947 }
14948
14949 if (record_full_arch_list_add_end ())
14950 ret = -1;
14951 }
14952
14953
14954 deallocate_reg_mem (&arm_record);
14955
14956 return ret;
14957 }
14958
14959 /* See arm-tdep.h. */
14960
14961 const target_desc *
14962 arm_read_description (arm_fp_type fp_type, bool tls)
14963 {
14964 struct target_desc *tdesc = tdesc_arm_list[fp_type][tls];
14965
14966 if (tdesc == nullptr)
14967 {
14968 tdesc = arm_create_target_description (fp_type, tls);
14969 tdesc_arm_list[fp_type][tls] = tdesc;
14970 }
14971
14972 return tdesc;
14973 }
14974
14975 /* See arm-tdep.h. */
14976
14977 const target_desc *
14978 arm_read_mprofile_description (arm_m_profile_type m_type)
14979 {
14980 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
14981
14982 if (tdesc == nullptr)
14983 {
14984 tdesc = arm_create_mprofile_target_description (m_type);
14985 tdesc_arm_mprofile_list[m_type] = tdesc;
14986 }
14987
14988 return tdesc;
14989 }