]> git.ipfire.org Git - thirdparty/binutils-gdb.git/blame_incremental - gdb/arm-tdep.c
Turn some value_contents functions into methods
[thirdparty/binutils-gdb.git] / gdb / arm-tdep.c
... / ...
CommitLineData
1/* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2023 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20#include "defs.h"
21
22#include <ctype.h> /* XXX for isupper (). */
23
24#include "frame.h"
25#include "language.h"
26#include "inferior.h"
27#include "infrun.h"
28#include "gdbcmd.h"
29#include "gdbcore.h"
30#include "dis-asm.h" /* For register styles. */
31#include "disasm.h"
32#include "regcache.h"
33#include "reggroups.h"
34#include "target-float.h"
35#include "value.h"
36#include "arch-utils.h"
37#include "osabi.h"
38#include "frame-unwind.h"
39#include "frame-base.h"
40#include "trad-frame.h"
41#include "objfiles.h"
42#include "dwarf2.h"
43#include "dwarf2/frame.h"
44#include "gdbtypes.h"
45#include "prologue-value.h"
46#include "remote.h"
47#include "target-descriptions.h"
48#include "user-regs.h"
49#include "observable.h"
50#include "count-one-bits.h"
51
52#include "arch/arm.h"
53#include "arch/arm-get-next-pcs.h"
54#include "arm-tdep.h"
55#include "sim/sim-arm.h"
56
57#include "elf-bfd.h"
58#include "coff/internal.h"
59#include "elf/arm.h"
60
61#include "record.h"
62#include "record-full.h"
63#include <algorithm>
64
65#include "producer.h"
66
67#if GDB_SELF_TEST
68#include "gdbsupport/selftest.h"
69#endif
70
71static bool arm_debug;
72
73/* Print an "arm" debug statement. */
74
75#define arm_debug_printf(fmt, ...) \
76 debug_prefixed_printf_cond (arm_debug, "arm", fmt, ##__VA_ARGS__)
77
78/* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
81
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
84
85#define MSYMBOL_SET_SPECIAL(msym) \
86 (msym)->set_target_flag_1 (true)
87
88#define MSYMBOL_IS_SPECIAL(msym) \
89 (msym)->target_flag_1 ()
90
91struct arm_mapping_symbol
92{
93 CORE_ADDR value;
94 char type;
95
96 bool operator< (const arm_mapping_symbol &other) const
97 { return this->value < other.value; }
98};
99
100typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
101
102struct arm_per_bfd
103{
104 explicit arm_per_bfd (size_t num_sections)
105 : section_maps (new arm_mapping_symbol_vec[num_sections]),
106 section_maps_sorted (new bool[num_sections] ())
107 {}
108
109 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
110
111 /* Information about mapping symbols ($a, $d, $t) in the objfile.
112
113 The format is an array of vectors of arm_mapping_symbols, there is one
114 vector for each section of the objfile (the array is index by BFD section
115 index).
116
117 For each section, the vector of arm_mapping_symbol is sorted by
118 symbol value (address). */
119 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
120
121 /* For each corresponding element of section_maps above, is this vector
122 sorted. */
123 std::unique_ptr<bool[]> section_maps_sorted;
124};
125
126/* Per-bfd data used for mapping symbols. */
127static const registry<bfd>::key<arm_per_bfd> arm_bfd_data_key;
128
129/* The list of available "set arm ..." and "show arm ..." commands. */
130static struct cmd_list_element *setarmcmdlist = NULL;
131static struct cmd_list_element *showarmcmdlist = NULL;
132
133/* The type of floating-point to use. Keep this in sync with enum
134 arm_float_model, and the help string in _initialize_arm_tdep. */
135static const char *const fp_model_strings[] =
136{
137 "auto",
138 "softfpa",
139 "fpa",
140 "softvfp",
141 "vfp",
142 NULL
143};
144
145/* A variable that can be configured by the user. */
146static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
147static const char *current_fp_model = "auto";
148
149/* The ABI to use. Keep this in sync with arm_abi_kind. */
150static const char *const arm_abi_strings[] =
151{
152 "auto",
153 "APCS",
154 "AAPCS",
155 NULL
156};
157
158/* A variable that can be configured by the user. */
159static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
160static const char *arm_abi_string = "auto";
161
162/* The execution mode to assume. */
163static const char *const arm_mode_strings[] =
164 {
165 "auto",
166 "arm",
167 "thumb",
168 NULL
169 };
170
171static const char *arm_fallback_mode_string = "auto";
172static const char *arm_force_mode_string = "auto";
173
174/* The standard register names, and all the valid aliases for them. Note
175 that `fp', `sp' and `pc' are not added in this alias list, because they
176 have been added as builtin user registers in
177 std-regs.c:_initialize_frame_reg. */
178static const struct
179{
180 const char *name;
181 int regnum;
182} arm_register_aliases[] = {
183 /* Basic register numbers. */
184 { "r0", 0 },
185 { "r1", 1 },
186 { "r2", 2 },
187 { "r3", 3 },
188 { "r4", 4 },
189 { "r5", 5 },
190 { "r6", 6 },
191 { "r7", 7 },
192 { "r8", 8 },
193 { "r9", 9 },
194 { "r10", 10 },
195 { "r11", 11 },
196 { "r12", 12 },
197 { "r13", 13 },
198 { "r14", 14 },
199 { "r15", 15 },
200 /* Synonyms (argument and variable registers). */
201 { "a1", 0 },
202 { "a2", 1 },
203 { "a3", 2 },
204 { "a4", 3 },
205 { "v1", 4 },
206 { "v2", 5 },
207 { "v3", 6 },
208 { "v4", 7 },
209 { "v5", 8 },
210 { "v6", 9 },
211 { "v7", 10 },
212 { "v8", 11 },
213 /* Other platform-specific names for r9. */
214 { "sb", 9 },
215 { "tr", 9 },
216 /* Special names. */
217 { "ip", 12 },
218 { "lr", 14 },
219 /* Names used by GCC (not listed in the ARM EABI). */
220 { "sl", 10 },
221 /* A special name from the older ATPCS. */
222 { "wr", 7 },
223};
224
225static const char *const arm_register_names[] =
226{"r0", "r1", "r2", "r3", /* 0 1 2 3 */
227 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
228 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
229 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
230 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
231 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
232 "fps", "cpsr" }; /* 24 25 */
233
234/* Holds the current set of options to be passed to the disassembler. */
235static char *arm_disassembler_options;
236
237/* Valid register name styles. */
238static const char **valid_disassembly_styles;
239
240/* Disassembly style to use. Default to "std" register names. */
241static const char *disassembly_style;
242
243/* All possible arm target descriptors. */
244static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID][2];
245static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
246
247/* This is used to keep the bfd arch_info in sync with the disassembly
248 style. */
249static void set_disassembly_style_sfunc (const char *, int,
250 struct cmd_list_element *);
251static void show_disassembly_style_sfunc (struct ui_file *, int,
252 struct cmd_list_element *,
253 const char *);
254
255static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
256 readable_regcache *regcache,
257 int regnum, gdb_byte *buf);
258static void arm_neon_quad_write (struct gdbarch *gdbarch,
259 struct regcache *regcache,
260 int regnum, const gdb_byte *buf);
261
262static CORE_ADDR
263 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
264
265
266/* get_next_pcs operations. */
267static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
268 arm_get_next_pcs_read_memory_unsigned_integer,
269 arm_get_next_pcs_syscall_next_pc,
270 arm_get_next_pcs_addr_bits_remove,
271 arm_get_next_pcs_is_thumb,
272 NULL,
273};
274
275struct arm_prologue_cache
276{
277 /* The stack pointer at the time this frame was created; i.e. the
278 caller's stack pointer when this function was called. It is used
279 to identify this frame. */
280 CORE_ADDR sp;
281
282 /* Additional stack pointers used by M-profile with Security extension. */
283 /* Use msp_s / psp_s to hold the values of msp / psp when there is
284 no Security extension. */
285 CORE_ADDR msp_s;
286 CORE_ADDR msp_ns;
287 CORE_ADDR psp_s;
288 CORE_ADDR psp_ns;
289
290 /* Active stack pointer. */
291 int active_sp_regnum;
292 int active_msp_regnum;
293 int active_psp_regnum;
294
295 /* The frame base for this frame is just prev_sp - frame size.
296 FRAMESIZE is the distance from the frame pointer to the
297 initial stack pointer. */
298
299 int framesize;
300
301 /* The register used to hold the frame pointer for this frame. */
302 int framereg;
303
304 /* True if the return address is signed, false otherwise. */
305 gdb::optional<bool> ra_signed_state;
306
307 /* Saved register offsets. */
308 trad_frame_saved_reg *saved_regs;
309
310 arm_prologue_cache() = default;
311};
312
313
314/* Reconstruct T bit in program status register from LR value. */
315
316static inline ULONGEST
317reconstruct_t_bit(struct gdbarch *gdbarch, CORE_ADDR lr, ULONGEST psr)
318{
319 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
320 if (IS_THUMB_ADDR (lr))
321 psr |= t_bit;
322 else
323 psr &= ~t_bit;
324
325 return psr;
326}
327
328/* Initialize CACHE fields for which zero is not adequate (CACHE is
329 expected to have been ZALLOC'ed before calling this function). */
330
331static void
332arm_cache_init (struct arm_prologue_cache *cache, struct gdbarch *gdbarch)
333{
334 cache->active_sp_regnum = ARM_SP_REGNUM;
335
336 cache->saved_regs = trad_frame_alloc_saved_regs (gdbarch);
337}
338
339/* Similar to the previous function, but extracts GDBARCH from FRAME. */
340
341static void
342arm_cache_init (struct arm_prologue_cache *cache, frame_info_ptr frame)
343{
344 struct gdbarch *gdbarch = get_frame_arch (frame);
345 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
346
347 arm_cache_init (cache, gdbarch);
348 cache->sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
349
350 if (tdep->have_sec_ext)
351 {
352 const CORE_ADDR msp_val
353 = get_frame_register_unsigned (frame, tdep->m_profile_msp_regnum);
354 const CORE_ADDR psp_val
355 = get_frame_register_unsigned (frame, tdep->m_profile_psp_regnum);
356
357 cache->msp_s
358 = get_frame_register_unsigned (frame, tdep->m_profile_msp_s_regnum);
359 cache->msp_ns
360 = get_frame_register_unsigned (frame, tdep->m_profile_msp_ns_regnum);
361 cache->psp_s
362 = get_frame_register_unsigned (frame, tdep->m_profile_psp_s_regnum);
363 cache->psp_ns
364 = get_frame_register_unsigned (frame, tdep->m_profile_psp_ns_regnum);
365
366 /* Identify what msp is alias for (msp_s or msp_ns). */
367 if (msp_val == cache->msp_s)
368 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
369 else if (msp_val == cache->msp_ns)
370 cache->active_msp_regnum = tdep->m_profile_msp_ns_regnum;
371 else
372 {
373 warning (_("Invalid state, unable to determine msp alias, assuming "
374 "msp_s."));
375 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
376 }
377
378 /* Identify what psp is alias for (psp_s or psp_ns). */
379 if (psp_val == cache->psp_s)
380 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
381 else if (psp_val == cache->psp_ns)
382 cache->active_psp_regnum = tdep->m_profile_psp_ns_regnum;
383 else
384 {
385 warning (_("Invalid state, unable to determine psp alias, assuming "
386 "psp_s."));
387 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
388 }
389
390 /* Identify what sp is alias for (msp_s, msp_ns, psp_s or psp_ns). */
391 if (msp_val == cache->sp)
392 cache->active_sp_regnum = cache->active_msp_regnum;
393 else if (psp_val == cache->sp)
394 cache->active_sp_regnum = cache->active_psp_regnum;
395 else
396 {
397 warning (_("Invalid state, unable to determine sp alias, assuming "
398 "msp."));
399 cache->active_sp_regnum = cache->active_msp_regnum;
400 }
401 }
402 else if (tdep->is_m)
403 {
404 cache->msp_s
405 = get_frame_register_unsigned (frame, tdep->m_profile_msp_regnum);
406 cache->psp_s
407 = get_frame_register_unsigned (frame, tdep->m_profile_psp_regnum);
408
409 /* Identify what sp is alias for (msp or psp). */
410 if (cache->msp_s == cache->sp)
411 cache->active_sp_regnum = tdep->m_profile_msp_regnum;
412 else if (cache->psp_s == cache->sp)
413 cache->active_sp_regnum = tdep->m_profile_psp_regnum;
414 else
415 {
416 warning (_("Invalid state, unable to determine sp alias, assuming "
417 "msp."));
418 cache->active_sp_regnum = tdep->m_profile_msp_regnum;
419 }
420 }
421 else
422 {
423 cache->msp_s
424 = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
425
426 cache->active_sp_regnum = ARM_SP_REGNUM;
427 }
428}
429
430/* Return the requested stack pointer value (in REGNUM), taking into
431 account whether we have a Security extension or an M-profile
432 CPU. */
433
434static CORE_ADDR
435arm_cache_get_sp_register (struct arm_prologue_cache *cache,
436 arm_gdbarch_tdep *tdep, int regnum)
437{
438 if (tdep->have_sec_ext)
439 {
440 if (regnum == tdep->m_profile_msp_s_regnum)
441 return cache->msp_s;
442 if (regnum == tdep->m_profile_msp_ns_regnum)
443 return cache->msp_ns;
444 if (regnum == tdep->m_profile_psp_s_regnum)
445 return cache->psp_s;
446 if (regnum == tdep->m_profile_psp_ns_regnum)
447 return cache->psp_ns;
448 if (regnum == tdep->m_profile_msp_regnum)
449 return arm_cache_get_sp_register (cache, tdep, cache->active_msp_regnum);
450 if (regnum == tdep->m_profile_psp_regnum)
451 return arm_cache_get_sp_register (cache, tdep, cache->active_psp_regnum);
452 if (regnum == ARM_SP_REGNUM)
453 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
454 }
455 else if (tdep->is_m)
456 {
457 if (regnum == tdep->m_profile_msp_regnum)
458 return cache->msp_s;
459 if (regnum == tdep->m_profile_psp_regnum)
460 return cache->psp_s;
461 if (regnum == ARM_SP_REGNUM)
462 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
463 }
464 else if (regnum == ARM_SP_REGNUM)
465 return cache->sp;
466
467 gdb_assert_not_reached ("Invalid SP selection");
468}
469
470/* Return the previous stack address, depending on which SP register
471 is active. */
472
473static CORE_ADDR
474arm_cache_get_prev_sp_value (struct arm_prologue_cache *cache, arm_gdbarch_tdep *tdep)
475{
476 CORE_ADDR val = arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
477 return val;
478}
479
480/* Set the active stack pointer to VAL. */
481
482static void
483arm_cache_set_active_sp_value (struct arm_prologue_cache *cache,
484 arm_gdbarch_tdep *tdep, CORE_ADDR val)
485{
486 if (tdep->have_sec_ext)
487 {
488 if (cache->active_sp_regnum == tdep->m_profile_msp_s_regnum)
489 cache->msp_s = val;
490 else if (cache->active_sp_regnum == tdep->m_profile_msp_ns_regnum)
491 cache->msp_ns = val;
492 else if (cache->active_sp_regnum == tdep->m_profile_psp_s_regnum)
493 cache->psp_s = val;
494 else if (cache->active_sp_regnum == tdep->m_profile_psp_ns_regnum)
495 cache->psp_ns = val;
496
497 return;
498 }
499 else if (tdep->is_m)
500 {
501 if (cache->active_sp_regnum == tdep->m_profile_msp_regnum)
502 cache->msp_s = val;
503 else if (cache->active_sp_regnum == tdep->m_profile_psp_regnum)
504 cache->psp_s = val;
505
506 return;
507 }
508 else if (cache->active_sp_regnum == ARM_SP_REGNUM)
509 {
510 cache->sp = val;
511 return;
512 }
513
514 gdb_assert_not_reached ("Invalid SP selection");
515}
516
517/* Return true if REGNUM is one of the alternative stack pointers. */
518
519static bool
520arm_is_alternative_sp_register (arm_gdbarch_tdep *tdep, int regnum)
521{
522 if ((regnum == tdep->m_profile_msp_regnum)
523 || (regnum == tdep->m_profile_msp_s_regnum)
524 || (regnum == tdep->m_profile_msp_ns_regnum)
525 || (regnum == tdep->m_profile_psp_regnum)
526 || (regnum == tdep->m_profile_psp_s_regnum)
527 || (regnum == tdep->m_profile_psp_ns_regnum))
528 return true;
529 else
530 return false;
531}
532
533/* Set the active stack pointer to SP_REGNUM. */
534
535static void
536arm_cache_switch_prev_sp (struct arm_prologue_cache *cache,
537 arm_gdbarch_tdep *tdep, int sp_regnum)
538{
539 gdb_assert (arm_is_alternative_sp_register (tdep, sp_regnum));
540
541 if (tdep->have_sec_ext)
542 {
543 gdb_assert (sp_regnum != tdep->m_profile_msp_regnum
544 && sp_regnum != tdep->m_profile_psp_regnum);
545
546 if (sp_regnum == tdep->m_profile_msp_s_regnum
547 || sp_regnum == tdep->m_profile_psp_s_regnum)
548 {
549 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
550 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
551 }
552 else if (sp_regnum == tdep->m_profile_msp_ns_regnum
553 || sp_regnum == tdep->m_profile_psp_ns_regnum)
554 {
555 cache->active_msp_regnum = tdep->m_profile_msp_ns_regnum;
556 cache->active_psp_regnum = tdep->m_profile_psp_ns_regnum;
557 }
558 }
559
560 cache->active_sp_regnum = sp_regnum;
561}
562
563namespace {
564
565/* Abstract class to read ARM instructions from memory. */
566
567class arm_instruction_reader
568{
569public:
570 /* Read a 4 bytes instruction from memory using the BYTE_ORDER endianness. */
571 virtual uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const = 0;
572};
573
574/* Read instructions from target memory. */
575
576class target_arm_instruction_reader : public arm_instruction_reader
577{
578public:
579 uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const override
580 {
581 return read_code_unsigned_integer (memaddr, 4, byte_order);
582 }
583};
584
585} /* namespace */
586
587static CORE_ADDR arm_analyze_prologue
588 (struct gdbarch *gdbarch, CORE_ADDR prologue_start, CORE_ADDR prologue_end,
589 struct arm_prologue_cache *cache, const arm_instruction_reader &insn_reader);
590
591/* Architecture version for displaced stepping. This effects the behaviour of
592 certain instructions, and really should not be hard-wired. */
593
594#define DISPLACED_STEPPING_ARCH_VERSION 5
595
596/* See arm-tdep.h. */
597
598bool arm_apcs_32 = true;
599bool arm_unwind_secure_frames = true;
600
601/* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
602
603int
604arm_psr_thumb_bit (struct gdbarch *gdbarch)
605{
606 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
607
608 if (tdep->is_m)
609 return XPSR_T;
610 else
611 return CPSR_T;
612}
613
614/* Determine if the processor is currently executing in Thumb mode. */
615
616int
617arm_is_thumb (struct regcache *regcache)
618{
619 ULONGEST cpsr;
620 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
621
622 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
623
624 return (cpsr & t_bit) != 0;
625}
626
627/* Determine if FRAME is executing in Thumb mode. FRAME must be an ARM
628 frame. */
629
630int
631arm_frame_is_thumb (frame_info_ptr frame)
632{
633 /* Check the architecture of FRAME. */
634 struct gdbarch *gdbarch = get_frame_arch (frame);
635 gdb_assert (gdbarch_bfd_arch_info (gdbarch)->arch == bfd_arch_arm);
636
637 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
638 directly (from a signal frame or dummy frame) or by interpreting
639 the saved LR (from a prologue or DWARF frame). So consult it and
640 trust the unwinders. */
641 CORE_ADDR cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
642
643 /* Find and extract the thumb bit. */
644 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
645 return (cpsr & t_bit) != 0;
646}
647
648/* Search for the mapping symbol covering MEMADDR. If one is found,
649 return its type. Otherwise, return 0. If START is non-NULL,
650 set *START to the location of the mapping symbol. */
651
652static char
653arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
654{
655 struct obj_section *sec;
656
657 /* If there are mapping symbols, consult them. */
658 sec = find_pc_section (memaddr);
659 if (sec != NULL)
660 {
661 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd.get ());
662 if (data != NULL)
663 {
664 unsigned int section_idx = sec->the_bfd_section->index;
665 arm_mapping_symbol_vec &map
666 = data->section_maps[section_idx];
667
668 /* Sort the vector on first use. */
669 if (!data->section_maps_sorted[section_idx])
670 {
671 std::sort (map.begin (), map.end ());
672 data->section_maps_sorted[section_idx] = true;
673 }
674
675 arm_mapping_symbol map_key = { memaddr - sec->addr (), 0 };
676 arm_mapping_symbol_vec::const_iterator it
677 = std::lower_bound (map.begin (), map.end (), map_key);
678
679 /* std::lower_bound finds the earliest ordered insertion
680 point. If the symbol at this position starts at this exact
681 address, we use that; otherwise, the preceding
682 mapping symbol covers this address. */
683 if (it < map.end ())
684 {
685 if (it->value == map_key.value)
686 {
687 if (start)
688 *start = it->value + sec->addr ();
689 return it->type;
690 }
691 }
692
693 if (it > map.begin ())
694 {
695 arm_mapping_symbol_vec::const_iterator prev_it
696 = it - 1;
697
698 if (start)
699 *start = prev_it->value + sec->addr ();
700 return prev_it->type;
701 }
702 }
703 }
704
705 return 0;
706}
707
708/* Determine if the program counter specified in MEMADDR is in a Thumb
709 function. This function should be called for addresses unrelated to
710 any executing frame; otherwise, prefer arm_frame_is_thumb. */
711
712int
713arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
714{
715 struct bound_minimal_symbol sym;
716 char type;
717 arm_displaced_step_copy_insn_closure *dsc = nullptr;
718 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
719
720 if (gdbarch_displaced_step_copy_insn_closure_by_addr_p (gdbarch))
721 dsc = ((arm_displaced_step_copy_insn_closure * )
722 gdbarch_displaced_step_copy_insn_closure_by_addr
723 (gdbarch, current_inferior (), memaddr));
724
725 /* If checking the mode of displaced instruction in copy area, the mode
726 should be determined by instruction on the original address. */
727 if (dsc)
728 {
729 displaced_debug_printf ("check mode of %.8lx instead of %.8lx",
730 (unsigned long) dsc->insn_addr,
731 (unsigned long) memaddr);
732 memaddr = dsc->insn_addr;
733 }
734
735 /* If bit 0 of the address is set, assume this is a Thumb address. */
736 if (IS_THUMB_ADDR (memaddr))
737 return 1;
738
739 /* If the user wants to override the symbol table, let him. */
740 if (strcmp (arm_force_mode_string, "arm") == 0)
741 return 0;
742 if (strcmp (arm_force_mode_string, "thumb") == 0)
743 return 1;
744
745 /* ARM v6-M and v7-M are always in Thumb mode. */
746 if (tdep->is_m)
747 return 1;
748
749 /* If there are mapping symbols, consult them. */
750 type = arm_find_mapping_symbol (memaddr, NULL);
751 if (type)
752 return type == 't';
753
754 /* Thumb functions have a "special" bit set in minimal symbols. */
755 sym = lookup_minimal_symbol_by_pc (memaddr);
756 if (sym.minsym)
757 return (MSYMBOL_IS_SPECIAL (sym.minsym));
758
759 /* If the user wants to override the fallback mode, let them. */
760 if (strcmp (arm_fallback_mode_string, "arm") == 0)
761 return 0;
762 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
763 return 1;
764
765 /* If we couldn't find any symbol, but we're talking to a running
766 target, then trust the current value of $cpsr. This lets
767 "display/i $pc" always show the correct mode (though if there is
768 a symbol table we will not reach here, so it still may not be
769 displayed in the mode it will be executed). */
770 if (target_has_registers ())
771 return arm_frame_is_thumb (get_current_frame ());
772
773 /* Otherwise we're out of luck; we assume ARM. */
774 return 0;
775}
776
777static inline bool
778arm_m_addr_is_lockup (CORE_ADDR addr)
779{
780 switch (addr)
781 {
782 /* Values for lockup state.
783 For more details see "B1.5.15 Unrecoverable exception cases" in
784 both ARMv6-M and ARMv7-M Architecture Reference Manuals, or
785 see "B4.32 Lockup" in ARMv8-M Architecture Reference Manual. */
786 case 0xeffffffe:
787 case 0xfffffffe:
788 case 0xffffffff:
789 return true;
790
791 default:
792 /* Address is not lockup. */
793 return false;
794 }
795}
796
797/* Determine if the address specified equals any of these magic return
798 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
799 architectures. Also include lockup magic PC value.
800 Check also for FNC_RETURN if we have the v8-M security extension.
801
802 From ARMv6-M Reference Manual B1.5.8
803 Table B1-5 Exception return behavior
804
805 EXC_RETURN Return To Return Stack
806 0xFFFFFFF1 Handler mode Main
807 0xFFFFFFF9 Thread mode Main
808 0xFFFFFFFD Thread mode Process
809
810 From ARMv7-M Reference Manual B1.5.8
811 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
812
813 EXC_RETURN Return To Return Stack
814 0xFFFFFFF1 Handler mode Main
815 0xFFFFFFF9 Thread mode Main
816 0xFFFFFFFD Thread mode Process
817
818 Table B1-9 EXC_RETURN definition of exception return behavior, with
819 FP
820
821 EXC_RETURN Return To Return Stack Frame Type
822 0xFFFFFFE1 Handler mode Main Extended
823 0xFFFFFFE9 Thread mode Main Extended
824 0xFFFFFFED Thread mode Process Extended
825 0xFFFFFFF1 Handler mode Main Basic
826 0xFFFFFFF9 Thread mode Main Basic
827 0xFFFFFFFD Thread mode Process Basic
828
829 For more details see "B1.5.8 Exception return behavior"
830 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
831
832 From ARMv8-M Architecture Technical Reference, D1.2.95
833 FType, Mode and SPSEL bits are to be considered when the Security
834 Extension is not implemented.
835
836 EXC_RETURN Return To Return Stack Frame Type
837 0xFFFFFFA0 Handler mode Main Extended
838 0xFFFFFFA8 Thread mode Main Extended
839 0xFFFFFFAC Thread mode Process Extended
840 0xFFFFFFB0 Handler mode Main Standard
841 0xFFFFFFB8 Thread mode Main Standard
842 0xFFFFFFBC Thread mode Process Standard */
843
844static int
845arm_m_addr_is_magic (struct gdbarch *gdbarch, CORE_ADDR addr)
846{
847 if (arm_m_addr_is_lockup (addr))
848 return 1;
849
850 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
851 if (tdep->have_sec_ext)
852 {
853 switch ((addr & 0xff000000))
854 {
855 case 0xff000000: /* EXC_RETURN pattern. */
856 case 0xfe000000: /* FNC_RETURN pattern. */
857 return 1;
858 default:
859 return 0;
860 }
861 }
862 else
863 {
864 switch (addr)
865 {
866 /* Values from ARMv8-M Architecture Technical Reference. */
867 case 0xffffffa0:
868 case 0xffffffa8:
869 case 0xffffffac:
870 case 0xffffffb0:
871 case 0xffffffb8:
872 case 0xffffffbc:
873 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
874 the exception return behavior. */
875 case 0xffffffe1:
876 case 0xffffffe9:
877 case 0xffffffed:
878 case 0xfffffff1:
879 case 0xfffffff9:
880 case 0xfffffffd:
881 /* Address is magic. */
882 return 1;
883
884 default:
885 /* Address is not magic. */
886 return 0;
887 }
888 }
889}
890
891/* Remove useless bits from addresses in a running program. */
892static CORE_ADDR
893arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
894{
895 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
896
897 /* On M-profile devices, do not strip the low bit from EXC_RETURN
898 (the magic exception return address). */
899 if (tdep->is_m && arm_m_addr_is_magic (gdbarch, val))
900 return val;
901
902 if (arm_apcs_32)
903 return UNMAKE_THUMB_ADDR (val);
904 else
905 return (val & 0x03fffffc);
906}
907
908/* Return 1 if PC is the start of a compiler helper function which
909 can be safely ignored during prologue skipping. IS_THUMB is true
910 if the function is known to be a Thumb function due to the way it
911 is being called. */
912static int
913skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
914{
915 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
916 struct bound_minimal_symbol msym;
917
918 msym = lookup_minimal_symbol_by_pc (pc);
919 if (msym.minsym != NULL
920 && msym.value_address () == pc
921 && msym.minsym->linkage_name () != NULL)
922 {
923 const char *name = msym.minsym->linkage_name ();
924
925 /* The GNU linker's Thumb call stub to foo is named
926 __foo_from_thumb. */
927 if (strstr (name, "_from_thumb") != NULL)
928 name += 2;
929
930 /* On soft-float targets, __truncdfsf2 is called to convert promoted
931 arguments to their argument types in non-prototyped
932 functions. */
933 if (startswith (name, "__truncdfsf2"))
934 return 1;
935 if (startswith (name, "__aeabi_d2f"))
936 return 1;
937
938 /* Internal functions related to thread-local storage. */
939 if (startswith (name, "__tls_get_addr"))
940 return 1;
941 if (startswith (name, "__aeabi_read_tp"))
942 return 1;
943 }
944 else
945 {
946 /* If we run against a stripped glibc, we may be unable to identify
947 special functions by name. Check for one important case,
948 __aeabi_read_tp, by comparing the *code* against the default
949 implementation (this is hand-written ARM assembler in glibc). */
950
951 if (!is_thumb
952 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
953 == 0xe3e00a0f /* mov r0, #0xffff0fff */
954 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
955 == 0xe240f01f) /* sub pc, r0, #31 */
956 return 1;
957 }
958
959 return 0;
960}
961
962/* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
963 the first 16-bit of instruction, and INSN2 is the second 16-bit of
964 instruction. */
965#define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
966 ((bits ((insn1), 0, 3) << 12) \
967 | (bits ((insn1), 10, 10) << 11) \
968 | (bits ((insn2), 12, 14) << 8) \
969 | bits ((insn2), 0, 7))
970
971/* Extract the immediate from instruction movw/movt of encoding A. INSN is
972 the 32-bit instruction. */
973#define EXTRACT_MOVW_MOVT_IMM_A(insn) \
974 ((bits ((insn), 16, 19) << 12) \
975 | bits ((insn), 0, 11))
976
977/* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
978
979static unsigned int
980thumb_expand_immediate (unsigned int imm)
981{
982 unsigned int count = imm >> 7;
983
984 if (count < 8)
985 switch (count / 2)
986 {
987 case 0:
988 return imm & 0xff;
989 case 1:
990 return (imm & 0xff) | ((imm & 0xff) << 16);
991 case 2:
992 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
993 case 3:
994 return (imm & 0xff) | ((imm & 0xff) << 8)
995 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
996 }
997
998 return (0x80 | (imm & 0x7f)) << (32 - count);
999}
1000
1001/* Return 1 if the 16-bit Thumb instruction INSN restores SP in
1002 epilogue, 0 otherwise. */
1003
1004static int
1005thumb_instruction_restores_sp (unsigned short insn)
1006{
1007 return (insn == 0x46bd /* mov sp, r7 */
1008 || (insn & 0xff80) == 0xb000 /* add sp, imm */
1009 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
1010}
1011
1012/* Analyze a Thumb prologue, looking for a recognizable stack frame
1013 and frame pointer. Scan until we encounter a store that could
1014 clobber the stack frame unexpectedly, or an unknown instruction.
1015 Return the last address which is definitely safe to skip for an
1016 initial breakpoint. */
1017
1018static CORE_ADDR
1019thumb_analyze_prologue (struct gdbarch *gdbarch,
1020 CORE_ADDR start, CORE_ADDR limit,
1021 struct arm_prologue_cache *cache)
1022{
1023 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
1024 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1025 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1026 int i;
1027 pv_t regs[16];
1028 CORE_ADDR offset;
1029 CORE_ADDR unrecognized_pc = 0;
1030
1031 for (i = 0; i < 16; i++)
1032 regs[i] = pv_register (i, 0);
1033 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1034
1035 while (start < limit)
1036 {
1037 unsigned short insn;
1038 gdb::optional<bool> ra_signed_state;
1039
1040 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
1041
1042 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
1043 {
1044 int regno;
1045 int mask;
1046
1047 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1048 break;
1049
1050 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
1051 whether to save LR (R14). */
1052 mask = (insn & 0xff) | ((insn & 0x100) << 6);
1053
1054 /* Calculate offsets of saved R0-R7 and LR. */
1055 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
1056 if (mask & (1 << regno))
1057 {
1058 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
1059 -4);
1060 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1061 }
1062 }
1063 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
1064 {
1065 offset = (insn & 0x7f) << 2; /* get scaled offset */
1066 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
1067 -offset);
1068 }
1069 else if (thumb_instruction_restores_sp (insn))
1070 {
1071 /* Don't scan past the epilogue. */
1072 break;
1073 }
1074 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
1075 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
1076 (insn & 0xff) << 2);
1077 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
1078 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
1079 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
1080 bits (insn, 6, 8));
1081 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
1082 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
1083 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
1084 bits (insn, 0, 7));
1085 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
1086 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
1087 && pv_is_constant (regs[bits (insn, 3, 5)]))
1088 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
1089 regs[bits (insn, 6, 8)]);
1090 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
1091 && pv_is_constant (regs[bits (insn, 3, 6)]))
1092 {
1093 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
1094 int rm = bits (insn, 3, 6);
1095 regs[rd] = pv_add (regs[rd], regs[rm]);
1096 }
1097 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
1098 {
1099 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
1100 int src_reg = (insn & 0x78) >> 3;
1101 regs[dst_reg] = regs[src_reg];
1102 }
1103 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
1104 {
1105 /* Handle stores to the stack. Normally pushes are used,
1106 but with GCC -mtpcs-frame, there may be other stores
1107 in the prologue to create the frame. */
1108 int regno = (insn >> 8) & 0x7;
1109 pv_t addr;
1110
1111 offset = (insn & 0xff) << 2;
1112 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
1113
1114 if (stack.store_would_trash (addr))
1115 break;
1116
1117 stack.store (addr, 4, regs[regno]);
1118 }
1119 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
1120 {
1121 int rd = bits (insn, 0, 2);
1122 int rn = bits (insn, 3, 5);
1123 pv_t addr;
1124
1125 offset = bits (insn, 6, 10) << 2;
1126 addr = pv_add_constant (regs[rn], offset);
1127
1128 if (stack.store_would_trash (addr))
1129 break;
1130
1131 stack.store (addr, 4, regs[rd]);
1132 }
1133 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
1134 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
1135 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
1136 /* Ignore stores of argument registers to the stack. */
1137 ;
1138 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
1139 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
1140 /* Ignore block loads from the stack, potentially copying
1141 parameters from memory. */
1142 ;
1143 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
1144 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
1145 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
1146 /* Similarly ignore single loads from the stack. */
1147 ;
1148 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
1149 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
1150 /* Skip register copies, i.e. saves to another register
1151 instead of the stack. */
1152 ;
1153 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
1154 /* Recognize constant loads; even with small stacks these are necessary
1155 on Thumb. */
1156 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
1157 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
1158 {
1159 /* Constant pool loads, for the same reason. */
1160 unsigned int constant;
1161 CORE_ADDR loc;
1162
1163 loc = start + 4 + bits (insn, 0, 7) * 4;
1164 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1165 regs[bits (insn, 8, 10)] = pv_constant (constant);
1166 }
1167 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
1168 {
1169 unsigned short inst2;
1170
1171 inst2 = read_code_unsigned_integer (start + 2, 2,
1172 byte_order_for_code);
1173 uint32_t whole_insn = (insn << 16) | inst2;
1174
1175 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
1176 {
1177 /* BL, BLX. Allow some special function calls when
1178 skipping the prologue; GCC generates these before
1179 storing arguments to the stack. */
1180 CORE_ADDR nextpc;
1181 int j1, j2, imm1, imm2;
1182
1183 imm1 = sbits (insn, 0, 10);
1184 imm2 = bits (inst2, 0, 10);
1185 j1 = bit (inst2, 13);
1186 j2 = bit (inst2, 11);
1187
1188 offset = ((imm1 << 12) + (imm2 << 1));
1189 offset ^= ((!j2) << 22) | ((!j1) << 23);
1190
1191 nextpc = start + 4 + offset;
1192 /* For BLX make sure to clear the low bits. */
1193 if (bit (inst2, 12) == 0)
1194 nextpc = nextpc & 0xfffffffc;
1195
1196 if (!skip_prologue_function (gdbarch, nextpc,
1197 bit (inst2, 12) != 0))
1198 break;
1199 }
1200
1201 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
1202 { registers } */
1203 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1204 {
1205 pv_t addr = regs[bits (insn, 0, 3)];
1206 int regno;
1207
1208 if (stack.store_would_trash (addr))
1209 break;
1210
1211 /* Calculate offsets of saved registers. */
1212 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
1213 if (inst2 & (1 << regno))
1214 {
1215 addr = pv_add_constant (addr, -4);
1216 stack.store (addr, 4, regs[regno]);
1217 }
1218
1219 if (insn & 0x0020)
1220 regs[bits (insn, 0, 3)] = addr;
1221 }
1222
1223 /* vstmdb Rn{!}, { D-registers } (aka vpush). */
1224 else if ((insn & 0xff20) == 0xed20
1225 && (inst2 & 0x0f00) == 0x0b00
1226 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1227 {
1228 /* Address SP points to. */
1229 pv_t addr = regs[bits (insn, 0, 3)];
1230
1231 /* Number of registers saved. */
1232 unsigned int number = bits (inst2, 0, 7) >> 1;
1233
1234 /* First register to save. */
1235 int vd = bits (inst2, 12, 15) | (bits (insn, 6, 6) << 4);
1236
1237 if (stack.store_would_trash (addr))
1238 break;
1239
1240 /* Calculate offsets of saved registers. */
1241 for (; number > 0; number--)
1242 {
1243 addr = pv_add_constant (addr, -8);
1244 stack.store (addr, 8, pv_register (ARM_D0_REGNUM
1245 + vd + number, 0));
1246 }
1247
1248 /* Writeback SP to account for the saved registers. */
1249 regs[bits (insn, 0, 3)] = addr;
1250 }
1251
1252 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
1253 [Rn, #+/-imm]{!} */
1254 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1255 {
1256 int regno1 = bits (inst2, 12, 15);
1257 int regno2 = bits (inst2, 8, 11);
1258 pv_t addr = regs[bits (insn, 0, 3)];
1259
1260 offset = inst2 & 0xff;
1261 if (insn & 0x0080)
1262 addr = pv_add_constant (addr, offset);
1263 else
1264 addr = pv_add_constant (addr, -offset);
1265
1266 if (stack.store_would_trash (addr))
1267 break;
1268
1269 stack.store (addr, 4, regs[regno1]);
1270 stack.store (pv_add_constant (addr, 4),
1271 4, regs[regno2]);
1272
1273 if (insn & 0x0020)
1274 regs[bits (insn, 0, 3)] = addr;
1275 }
1276
1277 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
1278 && (inst2 & 0x0c00) == 0x0c00
1279 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1280 {
1281 int regno = bits (inst2, 12, 15);
1282 pv_t addr = regs[bits (insn, 0, 3)];
1283
1284 offset = inst2 & 0xff;
1285 if (inst2 & 0x0200)
1286 addr = pv_add_constant (addr, offset);
1287 else
1288 addr = pv_add_constant (addr, -offset);
1289
1290 if (stack.store_would_trash (addr))
1291 break;
1292
1293 stack.store (addr, 4, regs[regno]);
1294
1295 if (inst2 & 0x0100)
1296 regs[bits (insn, 0, 3)] = addr;
1297 }
1298
1299 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
1300 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1301 {
1302 int regno = bits (inst2, 12, 15);
1303 pv_t addr;
1304
1305 offset = inst2 & 0xfff;
1306 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
1307
1308 if (stack.store_would_trash (addr))
1309 break;
1310
1311 stack.store (addr, 4, regs[regno]);
1312 }
1313
1314 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
1315 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1316 /* Ignore stores of argument registers to the stack. */
1317 ;
1318
1319 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
1320 && (inst2 & 0x0d00) == 0x0c00
1321 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1322 /* Ignore stores of argument registers to the stack. */
1323 ;
1324
1325 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
1326 { registers } */
1327 && (inst2 & 0x8000) == 0x0000
1328 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1329 /* Ignore block loads from the stack, potentially copying
1330 parameters from memory. */
1331 ;
1332
1333 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
1334 [Rn, #+/-imm] */
1335 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1336 /* Similarly ignore dual loads from the stack. */
1337 ;
1338
1339 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
1340 && (inst2 & 0x0d00) == 0x0c00
1341 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1342 /* Similarly ignore single loads from the stack. */
1343 ;
1344
1345 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1346 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1347 /* Similarly ignore single loads from the stack. */
1348 ;
1349
1350 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1351 && (inst2 & 0x8000) == 0x0000)
1352 {
1353 unsigned int imm = ((bits (insn, 10, 10) << 11)
1354 | (bits (inst2, 12, 14) << 8)
1355 | bits (inst2, 0, 7));
1356
1357 regs[bits (inst2, 8, 11)]
1358 = pv_add_constant (regs[bits (insn, 0, 3)],
1359 thumb_expand_immediate (imm));
1360 }
1361
1362 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1363 && (inst2 & 0x8000) == 0x0000)
1364 {
1365 unsigned int imm = ((bits (insn, 10, 10) << 11)
1366 | (bits (inst2, 12, 14) << 8)
1367 | bits (inst2, 0, 7));
1368
1369 regs[bits (inst2, 8, 11)]
1370 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1371 }
1372
1373 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1374 && (inst2 & 0x8000) == 0x0000)
1375 {
1376 unsigned int imm = ((bits (insn, 10, 10) << 11)
1377 | (bits (inst2, 12, 14) << 8)
1378 | bits (inst2, 0, 7));
1379
1380 regs[bits (inst2, 8, 11)]
1381 = pv_add_constant (regs[bits (insn, 0, 3)],
1382 - (CORE_ADDR) thumb_expand_immediate (imm));
1383 }
1384
1385 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1386 && (inst2 & 0x8000) == 0x0000)
1387 {
1388 unsigned int imm = ((bits (insn, 10, 10) << 11)
1389 | (bits (inst2, 12, 14) << 8)
1390 | bits (inst2, 0, 7));
1391
1392 regs[bits (inst2, 8, 11)]
1393 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1394 }
1395
1396 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1397 {
1398 unsigned int imm = ((bits (insn, 10, 10) << 11)
1399 | (bits (inst2, 12, 14) << 8)
1400 | bits (inst2, 0, 7));
1401
1402 regs[bits (inst2, 8, 11)]
1403 = pv_constant (thumb_expand_immediate (imm));
1404 }
1405
1406 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1407 {
1408 unsigned int imm
1409 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1410
1411 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1412 }
1413
1414 else if (insn == 0xea5f /* mov.w Rd,Rm */
1415 && (inst2 & 0xf0f0) == 0)
1416 {
1417 int dst_reg = (inst2 & 0x0f00) >> 8;
1418 int src_reg = inst2 & 0xf;
1419 regs[dst_reg] = regs[src_reg];
1420 }
1421
1422 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1423 {
1424 /* Constant pool loads. */
1425 unsigned int constant;
1426 CORE_ADDR loc;
1427
1428 offset = bits (inst2, 0, 11);
1429 if (insn & 0x0080)
1430 loc = start + 4 + offset;
1431 else
1432 loc = start + 4 - offset;
1433
1434 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1435 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1436 }
1437
1438 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1439 {
1440 /* Constant pool loads. */
1441 unsigned int constant;
1442 CORE_ADDR loc;
1443
1444 offset = bits (inst2, 0, 7) << 2;
1445 if (insn & 0x0080)
1446 loc = start + 4 + offset;
1447 else
1448 loc = start + 4 - offset;
1449
1450 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1451 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1452
1453 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1454 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1455 }
1456 /* Start of ARMv8.1-m PACBTI extension instructions. */
1457 else if (IS_PAC (whole_insn))
1458 {
1459 /* LR and SP are input registers. PAC is in R12. LR is
1460 signed from this point onwards. NOP space. */
1461 ra_signed_state = true;
1462 }
1463 else if (IS_PACBTI (whole_insn))
1464 {
1465 /* LR and SP are input registers. PAC is in R12 and PC is a
1466 valid BTI landing pad. LR is signed from this point onwards.
1467 NOP space. */
1468 ra_signed_state = true;
1469 }
1470 else if (IS_BTI (whole_insn))
1471 {
1472 /* Valid BTI landing pad. NOP space. */
1473 }
1474 else if (IS_PACG (whole_insn))
1475 {
1476 /* Sign Rn using Rm and store the PAC in Rd. Rd is signed from
1477 this point onwards. */
1478 ra_signed_state = true;
1479 }
1480 else if (IS_AUT (whole_insn) || IS_AUTG (whole_insn))
1481 {
1482 /* These instructions appear close to the epilogue, when signed
1483 pointers are getting authenticated. */
1484 ra_signed_state = false;
1485 }
1486 /* End of ARMv8.1-m PACBTI extension instructions */
1487 else if (thumb2_instruction_changes_pc (insn, inst2))
1488 {
1489 /* Don't scan past anything that might change control flow. */
1490 break;
1491 }
1492 else
1493 {
1494 /* The optimizer might shove anything into the prologue,
1495 so we just skip what we don't recognize. */
1496 unrecognized_pc = start;
1497 }
1498
1499 /* Make sure we are dealing with a target that supports ARMv8.1-m
1500 PACBTI. */
1501 if (cache != nullptr && tdep->have_pacbti
1502 && ra_signed_state.has_value ())
1503 {
1504 arm_debug_printf ("Found pacbti instruction at %s",
1505 paddress (gdbarch, start));
1506 arm_debug_printf ("RA is %s",
1507 *ra_signed_state? "signed" : "not signed");
1508 cache->ra_signed_state = ra_signed_state;
1509 }
1510
1511 start += 2;
1512 }
1513 else if (thumb_instruction_changes_pc (insn))
1514 {
1515 /* Don't scan past anything that might change control flow. */
1516 break;
1517 }
1518 else
1519 {
1520 /* The optimizer might shove anything into the prologue,
1521 so we just skip what we don't recognize. */
1522 unrecognized_pc = start;
1523 }
1524
1525 start += 2;
1526 }
1527
1528 arm_debug_printf ("Prologue scan stopped at %s",
1529 paddress (gdbarch, start));
1530
1531 if (unrecognized_pc == 0)
1532 unrecognized_pc = start;
1533
1534 if (cache == NULL)
1535 return unrecognized_pc;
1536
1537 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1538 {
1539 /* Frame pointer is fp. Frame size is constant. */
1540 cache->framereg = ARM_FP_REGNUM;
1541 cache->framesize = -regs[ARM_FP_REGNUM].k;
1542 }
1543 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1544 {
1545 /* Frame pointer is r7. Frame size is constant. */
1546 cache->framereg = THUMB_FP_REGNUM;
1547 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1548 }
1549 else
1550 {
1551 /* Try the stack pointer... this is a bit desperate. */
1552 cache->framereg = ARM_SP_REGNUM;
1553 cache->framesize = -regs[ARM_SP_REGNUM].k;
1554 }
1555
1556 for (i = 0; i < gdbarch_num_regs (gdbarch); i++)
1557 if (stack.find_reg (gdbarch, i, &offset))
1558 {
1559 cache->saved_regs[i].set_addr (offset);
1560 if (i == ARM_SP_REGNUM)
1561 arm_cache_set_active_sp_value(cache, tdep, offset);
1562 }
1563
1564 return unrecognized_pc;
1565}
1566
1567
1568/* Try to analyze the instructions starting from PC, which load symbol
1569 __stack_chk_guard. Return the address of instruction after loading this
1570 symbol, set the dest register number to *BASEREG, and set the size of
1571 instructions for loading symbol in OFFSET. Return 0 if instructions are
1572 not recognized. */
1573
1574static CORE_ADDR
1575arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1576 unsigned int *destreg, int *offset)
1577{
1578 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1579 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1580 unsigned int low, high, address;
1581
1582 address = 0;
1583 if (is_thumb)
1584 {
1585 unsigned short insn1
1586 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1587
1588 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1589 {
1590 *destreg = bits (insn1, 8, 10);
1591 *offset = 2;
1592 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1593 address = read_memory_unsigned_integer (address, 4,
1594 byte_order_for_code);
1595 }
1596 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1597 {
1598 unsigned short insn2
1599 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1600
1601 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1602
1603 insn1
1604 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1605 insn2
1606 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1607
1608 /* movt Rd, #const */
1609 if ((insn1 & 0xfbc0) == 0xf2c0)
1610 {
1611 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1612 *destreg = bits (insn2, 8, 11);
1613 *offset = 8;
1614 address = (high << 16 | low);
1615 }
1616 }
1617 }
1618 else
1619 {
1620 unsigned int insn
1621 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1622
1623 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1624 {
1625 address = bits (insn, 0, 11) + pc + 8;
1626 address = read_memory_unsigned_integer (address, 4,
1627 byte_order_for_code);
1628
1629 *destreg = bits (insn, 12, 15);
1630 *offset = 4;
1631 }
1632 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1633 {
1634 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1635
1636 insn
1637 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1638
1639 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1640 {
1641 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1642 *destreg = bits (insn, 12, 15);
1643 *offset = 8;
1644 address = (high << 16 | low);
1645 }
1646 }
1647 }
1648
1649 return address;
1650}
1651
1652/* Try to skip a sequence of instructions used for stack protector. If PC
1653 points to the first instruction of this sequence, return the address of
1654 first instruction after this sequence, otherwise, return original PC.
1655
1656 On arm, this sequence of instructions is composed of mainly three steps,
1657 Step 1: load symbol __stack_chk_guard,
1658 Step 2: load from address of __stack_chk_guard,
1659 Step 3: store it to somewhere else.
1660
1661 Usually, instructions on step 2 and step 3 are the same on various ARM
1662 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1663 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1664 instructions in step 1 vary from different ARM architectures. On ARMv7,
1665 they are,
1666
1667 movw Rn, #:lower16:__stack_chk_guard
1668 movt Rn, #:upper16:__stack_chk_guard
1669
1670 On ARMv5t, it is,
1671
1672 ldr Rn, .Label
1673 ....
1674 .Lable:
1675 .word __stack_chk_guard
1676
1677 Since ldr/str is a very popular instruction, we can't use them as
1678 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1679 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1680 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1681
1682static CORE_ADDR
1683arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1684{
1685 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1686 unsigned int basereg;
1687 struct bound_minimal_symbol stack_chk_guard;
1688 int offset;
1689 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1690 CORE_ADDR addr;
1691
1692 /* Try to parse the instructions in Step 1. */
1693 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1694 &basereg, &offset);
1695 if (!addr)
1696 return pc;
1697
1698 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1699 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1700 Otherwise, this sequence cannot be for stack protector. */
1701 if (stack_chk_guard.minsym == NULL
1702 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1703 return pc;
1704
1705 if (is_thumb)
1706 {
1707 unsigned int destreg;
1708 unsigned short insn
1709 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1710
1711 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1712 if ((insn & 0xf800) != 0x6800)
1713 return pc;
1714 if (bits (insn, 3, 5) != basereg)
1715 return pc;
1716 destreg = bits (insn, 0, 2);
1717
1718 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1719 byte_order_for_code);
1720 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1721 if ((insn & 0xf800) != 0x6000)
1722 return pc;
1723 if (destreg != bits (insn, 0, 2))
1724 return pc;
1725 }
1726 else
1727 {
1728 unsigned int destreg;
1729 unsigned int insn
1730 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1731
1732 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1733 if ((insn & 0x0e500000) != 0x04100000)
1734 return pc;
1735 if (bits (insn, 16, 19) != basereg)
1736 return pc;
1737 destreg = bits (insn, 12, 15);
1738 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1739 insn = read_code_unsigned_integer (pc + offset + 4,
1740 4, byte_order_for_code);
1741 if ((insn & 0x0e500000) != 0x04000000)
1742 return pc;
1743 if (bits (insn, 12, 15) != destreg)
1744 return pc;
1745 }
1746 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1747 on arm. */
1748 if (is_thumb)
1749 return pc + offset + 4;
1750 else
1751 return pc + offset + 8;
1752}
1753
1754/* Advance the PC across any function entry prologue instructions to
1755 reach some "real" code.
1756
1757 The APCS (ARM Procedure Call Standard) defines the following
1758 prologue:
1759
1760 mov ip, sp
1761 [stmfd sp!, {a1,a2,a3,a4}]
1762 stmfd sp!, {...,fp,ip,lr,pc}
1763 [stfe f7, [sp, #-12]!]
1764 [stfe f6, [sp, #-12]!]
1765 [stfe f5, [sp, #-12]!]
1766 [stfe f4, [sp, #-12]!]
1767 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1768
1769static CORE_ADDR
1770arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1771{
1772 CORE_ADDR func_addr, limit_pc;
1773
1774 /* See if we can determine the end of the prologue via the symbol table.
1775 If so, then return either PC, or the PC after the prologue, whichever
1776 is greater. */
1777 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1778 {
1779 CORE_ADDR post_prologue_pc
1780 = skip_prologue_using_sal (gdbarch, func_addr);
1781 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1782
1783 if (post_prologue_pc)
1784 post_prologue_pc
1785 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1786
1787
1788 /* GCC always emits a line note before the prologue and another
1789 one after, even if the two are at the same address or on the
1790 same line. Take advantage of this so that we do not need to
1791 know every instruction that might appear in the prologue. We
1792 will have producer information for most binaries; if it is
1793 missing (e.g. for -gstabs), assuming the GNU tools. */
1794 if (post_prologue_pc
1795 && (cust == NULL
1796 || cust->producer () == NULL
1797 || startswith (cust->producer (), "GNU ")
1798 || producer_is_llvm (cust->producer ())))
1799 return post_prologue_pc;
1800
1801 if (post_prologue_pc != 0)
1802 {
1803 CORE_ADDR analyzed_limit;
1804
1805 /* For non-GCC compilers, make sure the entire line is an
1806 acceptable prologue; GDB will round this function's
1807 return value up to the end of the following line so we
1808 can not skip just part of a line (and we do not want to).
1809
1810 RealView does not treat the prologue specially, but does
1811 associate prologue code with the opening brace; so this
1812 lets us skip the first line if we think it is the opening
1813 brace. */
1814 if (arm_pc_is_thumb (gdbarch, func_addr))
1815 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1816 post_prologue_pc, NULL);
1817 else
1818 analyzed_limit
1819 = arm_analyze_prologue (gdbarch, func_addr, post_prologue_pc,
1820 NULL, target_arm_instruction_reader ());
1821
1822 if (analyzed_limit != post_prologue_pc)
1823 return func_addr;
1824
1825 return post_prologue_pc;
1826 }
1827 }
1828
1829 /* Can't determine prologue from the symbol table, need to examine
1830 instructions. */
1831
1832 /* Find an upper limit on the function prologue using the debug
1833 information. If the debug information could not be used to provide
1834 that bound, then use an arbitrary large number as the upper bound. */
1835 /* Like arm_scan_prologue, stop no later than pc + 64. */
1836 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1837 if (limit_pc == 0)
1838 limit_pc = pc + 64; /* Magic. */
1839
1840
1841 /* Check if this is Thumb code. */
1842 if (arm_pc_is_thumb (gdbarch, pc))
1843 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1844 else
1845 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL,
1846 target_arm_instruction_reader ());
1847}
1848
1849/* *INDENT-OFF* */
1850/* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1851 This function decodes a Thumb function prologue to determine:
1852 1) the size of the stack frame
1853 2) which registers are saved on it
1854 3) the offsets of saved regs
1855 4) the offset from the stack pointer to the frame pointer
1856
1857 A typical Thumb function prologue would create this stack frame
1858 (offsets relative to FP)
1859 old SP -> 24 stack parameters
1860 20 LR
1861 16 R7
1862 R7 -> 0 local variables (16 bytes)
1863 SP -> -12 additional stack space (12 bytes)
1864 The frame size would thus be 36 bytes, and the frame offset would be
1865 12 bytes. The frame register is R7.
1866
1867 The comments for thumb_skip_prolog() describe the algorithm we use
1868 to detect the end of the prolog. */
1869/* *INDENT-ON* */
1870
1871static void
1872thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1873 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1874{
1875 CORE_ADDR prologue_start;
1876 CORE_ADDR prologue_end;
1877
1878 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1879 &prologue_end))
1880 {
1881 /* See comment in arm_scan_prologue for an explanation of
1882 this heuristics. */
1883 if (prologue_end > prologue_start + 64)
1884 {
1885 prologue_end = prologue_start + 64;
1886 }
1887 }
1888 else
1889 /* We're in the boondocks: we have no idea where the start of the
1890 function is. */
1891 return;
1892
1893 prologue_end = std::min (prologue_end, prev_pc);
1894
1895 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1896}
1897
1898/* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1899 otherwise. */
1900
1901static int
1902arm_instruction_restores_sp (unsigned int insn)
1903{
1904 if (bits (insn, 28, 31) != INST_NV)
1905 {
1906 if ((insn & 0x0df0f000) == 0x0080d000
1907 /* ADD SP (register or immediate). */
1908 || (insn & 0x0df0f000) == 0x0040d000
1909 /* SUB SP (register or immediate). */
1910 || (insn & 0x0ffffff0) == 0x01a0d000
1911 /* MOV SP. */
1912 || (insn & 0x0fff0000) == 0x08bd0000
1913 /* POP (LDMIA). */
1914 || (insn & 0x0fff0000) == 0x049d0000)
1915 /* POP of a single register. */
1916 return 1;
1917 }
1918
1919 return 0;
1920}
1921
1922/* Implement immediate value decoding, as described in section A5.2.4
1923 (Modified immediate constants in ARM instructions) of the ARM Architecture
1924 Reference Manual (ARMv7-A and ARMv7-R edition). */
1925
1926static uint32_t
1927arm_expand_immediate (uint32_t imm)
1928{
1929 /* Immediate values are 12 bits long. */
1930 gdb_assert ((imm & 0xfffff000) == 0);
1931
1932 uint32_t unrotated_value = imm & 0xff;
1933 uint32_t rotate_amount = (imm & 0xf00) >> 7;
1934
1935 if (rotate_amount == 0)
1936 return unrotated_value;
1937
1938 return ((unrotated_value >> rotate_amount)
1939 | (unrotated_value << (32 - rotate_amount)));
1940}
1941
1942/* Analyze an ARM mode prologue starting at PROLOGUE_START and
1943 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1944 fill it in. Return the first address not recognized as a prologue
1945 instruction.
1946
1947 We recognize all the instructions typically found in ARM prologues,
1948 plus harmless instructions which can be skipped (either for analysis
1949 purposes, or a more restrictive set that can be skipped when finding
1950 the end of the prologue). */
1951
1952static CORE_ADDR
1953arm_analyze_prologue (struct gdbarch *gdbarch,
1954 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1955 struct arm_prologue_cache *cache,
1956 const arm_instruction_reader &insn_reader)
1957{
1958 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1959 int regno;
1960 CORE_ADDR offset, current_pc;
1961 pv_t regs[ARM_FPS_REGNUM];
1962 CORE_ADDR unrecognized_pc = 0;
1963 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
1964
1965 /* Search the prologue looking for instructions that set up the
1966 frame pointer, adjust the stack pointer, and save registers.
1967
1968 Be careful, however, and if it doesn't look like a prologue,
1969 don't try to scan it. If, for instance, a frameless function
1970 begins with stmfd sp!, then we will tell ourselves there is
1971 a frame, which will confuse stack traceback, as well as "finish"
1972 and other operations that rely on a knowledge of the stack
1973 traceback. */
1974
1975 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1976 regs[regno] = pv_register (regno, 0);
1977 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1978
1979 for (current_pc = prologue_start;
1980 current_pc < prologue_end;
1981 current_pc += 4)
1982 {
1983 uint32_t insn = insn_reader.read (current_pc, byte_order_for_code);
1984
1985 if (insn == 0xe1a0c00d) /* mov ip, sp */
1986 {
1987 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1988 continue;
1989 }
1990 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1991 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1992 {
1993 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1994 int rd = bits (insn, 12, 15);
1995 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1996 continue;
1997 }
1998 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1999 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2000 {
2001 uint32_t imm = arm_expand_immediate (insn & 0xfff);
2002 int rd = bits (insn, 12, 15);
2003 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
2004 continue;
2005 }
2006 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
2007 [sp, #-4]! */
2008 {
2009 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2010 break;
2011 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
2012 stack.store (regs[ARM_SP_REGNUM], 4,
2013 regs[bits (insn, 12, 15)]);
2014 continue;
2015 }
2016 else if ((insn & 0xffff0000) == 0xe92d0000)
2017 /* stmfd sp!, {..., fp, ip, lr, pc}
2018 or
2019 stmfd sp!, {a1, a2, a3, a4} */
2020 {
2021 int mask = insn & 0xffff;
2022
2023 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2024 break;
2025
2026 /* Calculate offsets of saved registers. */
2027 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
2028 if (mask & (1 << regno))
2029 {
2030 regs[ARM_SP_REGNUM]
2031 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
2032 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
2033 }
2034 }
2035 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
2036 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
2037 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
2038 {
2039 /* No need to add this to saved_regs -- it's just an arg reg. */
2040 continue;
2041 }
2042 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
2043 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
2044 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
2045 {
2046 /* No need to add this to saved_regs -- it's just an arg reg. */
2047 continue;
2048 }
2049 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
2050 { registers } */
2051 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2052 {
2053 /* No need to add this to saved_regs -- it's just arg regs. */
2054 continue;
2055 }
2056 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
2057 {
2058 uint32_t imm = arm_expand_immediate (insn & 0xfff);
2059 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
2060 }
2061 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
2062 {
2063 uint32_t imm = arm_expand_immediate(insn & 0xfff);
2064 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
2065 }
2066 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
2067 [sp, -#c]! */
2068 && tdep->have_fpa_registers)
2069 {
2070 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2071 break;
2072
2073 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
2074 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
2075 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
2076 }
2077 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
2078 [sp!] */
2079 && tdep->have_fpa_registers)
2080 {
2081 int n_saved_fp_regs;
2082 unsigned int fp_start_reg, fp_bound_reg;
2083
2084 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2085 break;
2086
2087 if ((insn & 0x800) == 0x800) /* N0 is set */
2088 {
2089 if ((insn & 0x40000) == 0x40000) /* N1 is set */
2090 n_saved_fp_regs = 3;
2091 else
2092 n_saved_fp_regs = 1;
2093 }
2094 else
2095 {
2096 if ((insn & 0x40000) == 0x40000) /* N1 is set */
2097 n_saved_fp_regs = 2;
2098 else
2099 n_saved_fp_regs = 4;
2100 }
2101
2102 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
2103 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
2104 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
2105 {
2106 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
2107 stack.store (regs[ARM_SP_REGNUM], 12,
2108 regs[fp_start_reg++]);
2109 }
2110 }
2111 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
2112 {
2113 /* Allow some special function calls when skipping the
2114 prologue; GCC generates these before storing arguments to
2115 the stack. */
2116 CORE_ADDR dest = BranchDest (current_pc, insn);
2117
2118 if (skip_prologue_function (gdbarch, dest, 0))
2119 continue;
2120 else
2121 break;
2122 }
2123 else if ((insn & 0xf0000000) != 0xe0000000)
2124 break; /* Condition not true, exit early. */
2125 else if (arm_instruction_changes_pc (insn))
2126 /* Don't scan past anything that might change control flow. */
2127 break;
2128 else if (arm_instruction_restores_sp (insn))
2129 {
2130 /* Don't scan past the epilogue. */
2131 break;
2132 }
2133 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
2134 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2135 /* Ignore block loads from the stack, potentially copying
2136 parameters from memory. */
2137 continue;
2138 else if ((insn & 0xfc500000) == 0xe4100000
2139 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2140 /* Similarly ignore single loads from the stack. */
2141 continue;
2142 else if ((insn & 0xffff0ff0) == 0xe1a00000)
2143 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
2144 register instead of the stack. */
2145 continue;
2146 else
2147 {
2148 /* The optimizer might shove anything into the prologue, if
2149 we build up cache (cache != NULL) from scanning prologue,
2150 we just skip what we don't recognize and scan further to
2151 make cache as complete as possible. However, if we skip
2152 prologue, we'll stop immediately on unrecognized
2153 instruction. */
2154 unrecognized_pc = current_pc;
2155 if (cache != NULL)
2156 continue;
2157 else
2158 break;
2159 }
2160 }
2161
2162 if (unrecognized_pc == 0)
2163 unrecognized_pc = current_pc;
2164
2165 if (cache)
2166 {
2167 int framereg, framesize;
2168
2169 /* The frame size is just the distance from the frame register
2170 to the original stack pointer. */
2171 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
2172 {
2173 /* Frame pointer is fp. */
2174 framereg = ARM_FP_REGNUM;
2175 framesize = -regs[ARM_FP_REGNUM].k;
2176 }
2177 else
2178 {
2179 /* Try the stack pointer... this is a bit desperate. */
2180 framereg = ARM_SP_REGNUM;
2181 framesize = -regs[ARM_SP_REGNUM].k;
2182 }
2183
2184 cache->framereg = framereg;
2185 cache->framesize = framesize;
2186
2187 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
2188 if (stack.find_reg (gdbarch, regno, &offset))
2189 {
2190 cache->saved_regs[regno].set_addr (offset);
2191 if (regno == ARM_SP_REGNUM)
2192 arm_cache_set_active_sp_value(cache, tdep, offset);
2193 }
2194 }
2195
2196 arm_debug_printf ("Prologue scan stopped at %s",
2197 paddress (gdbarch, unrecognized_pc));
2198
2199 return unrecognized_pc;
2200}
2201
2202static void
2203arm_scan_prologue (frame_info_ptr this_frame,
2204 struct arm_prologue_cache *cache)
2205{
2206 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2207 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2208 CORE_ADDR prologue_start, prologue_end;
2209 CORE_ADDR prev_pc = get_frame_pc (this_frame);
2210 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
2211 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
2212
2213 /* Assume there is no frame until proven otherwise. */
2214 cache->framereg = ARM_SP_REGNUM;
2215 cache->framesize = 0;
2216
2217 /* Check for Thumb prologue. */
2218 if (arm_frame_is_thumb (this_frame))
2219 {
2220 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
2221 return;
2222 }
2223
2224 /* Find the function prologue. If we can't find the function in
2225 the symbol table, peek in the stack frame to find the PC. */
2226 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
2227 &prologue_end))
2228 {
2229 /* One way to find the end of the prologue (which works well
2230 for unoptimized code) is to do the following:
2231
2232 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
2233
2234 if (sal.line == 0)
2235 prologue_end = prev_pc;
2236 else if (sal.end < prologue_end)
2237 prologue_end = sal.end;
2238
2239 This mechanism is very accurate so long as the optimizer
2240 doesn't move any instructions from the function body into the
2241 prologue. If this happens, sal.end will be the last
2242 instruction in the first hunk of prologue code just before
2243 the first instruction that the scheduler has moved from
2244 the body to the prologue.
2245
2246 In order to make sure that we scan all of the prologue
2247 instructions, we use a slightly less accurate mechanism which
2248 may scan more than necessary. To help compensate for this
2249 lack of accuracy, the prologue scanning loop below contains
2250 several clauses which'll cause the loop to terminate early if
2251 an implausible prologue instruction is encountered.
2252
2253 The expression
2254
2255 prologue_start + 64
2256
2257 is a suitable endpoint since it accounts for the largest
2258 possible prologue plus up to five instructions inserted by
2259 the scheduler. */
2260
2261 if (prologue_end > prologue_start + 64)
2262 {
2263 prologue_end = prologue_start + 64; /* See above. */
2264 }
2265 }
2266 else
2267 {
2268 /* We have no symbol information. Our only option is to assume this
2269 function has a standard stack frame and the normal frame register.
2270 Then, we can find the value of our frame pointer on entrance to
2271 the callee (or at the present moment if this is the innermost frame).
2272 The value stored there should be the address of the stmfd + 8. */
2273 CORE_ADDR frame_loc;
2274 ULONGEST return_value;
2275
2276 /* AAPCS does not use a frame register, so we can abort here. */
2277 if (tdep->arm_abi == ARM_ABI_AAPCS)
2278 return;
2279
2280 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
2281 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
2282 &return_value))
2283 return;
2284 else
2285 {
2286 prologue_start = gdbarch_addr_bits_remove
2287 (gdbarch, return_value) - 8;
2288 prologue_end = prologue_start + 64; /* See above. */
2289 }
2290 }
2291
2292 if (prev_pc < prologue_end)
2293 prologue_end = prev_pc;
2294
2295 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache,
2296 target_arm_instruction_reader ());
2297}
2298
2299static struct arm_prologue_cache *
2300arm_make_prologue_cache (frame_info_ptr this_frame)
2301{
2302 int reg;
2303 struct arm_prologue_cache *cache;
2304 CORE_ADDR unwound_fp, prev_sp;
2305
2306 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2307 arm_cache_init (cache, this_frame);
2308
2309 arm_scan_prologue (this_frame, cache);
2310
2311 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2312 if (unwound_fp == 0)
2313 return cache;
2314
2315 arm_gdbarch_tdep *tdep =
2316 gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
2317
2318 prev_sp = unwound_fp + cache->framesize;
2319 arm_cache_set_active_sp_value (cache, tdep, prev_sp);
2320
2321 /* Calculate actual addresses of saved registers using offsets
2322 determined by arm_scan_prologue. */
2323 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2324 if (cache->saved_regs[reg].is_addr ())
2325 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr () +
2326 prev_sp);
2327
2328 return cache;
2329}
2330
2331/* Implementation of the stop_reason hook for arm_prologue frames. */
2332
2333static enum unwind_stop_reason
2334arm_prologue_unwind_stop_reason (frame_info_ptr this_frame,
2335 void **this_cache)
2336{
2337 struct arm_prologue_cache *cache;
2338 CORE_ADDR pc;
2339
2340 if (*this_cache == NULL)
2341 *this_cache = arm_make_prologue_cache (this_frame);
2342 cache = (struct arm_prologue_cache *) *this_cache;
2343
2344 /* This is meant to halt the backtrace at "_start". */
2345 pc = get_frame_pc (this_frame);
2346 gdbarch *arch = get_frame_arch (this_frame);
2347 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (arch);
2348 if (pc <= tdep->lowest_pc)
2349 return UNWIND_OUTERMOST;
2350
2351 /* If we've hit a wall, stop. */
2352 if (arm_cache_get_prev_sp_value (cache, tdep) == 0)
2353 return UNWIND_OUTERMOST;
2354
2355 return UNWIND_NO_REASON;
2356}
2357
2358/* Our frame ID for a normal frame is the current function's starting PC
2359 and the caller's SP when we were called. */
2360
2361static void
2362arm_prologue_this_id (frame_info_ptr this_frame,
2363 void **this_cache,
2364 struct frame_id *this_id)
2365{
2366 struct arm_prologue_cache *cache;
2367 struct frame_id id;
2368 CORE_ADDR pc, func;
2369
2370 if (*this_cache == NULL)
2371 *this_cache = arm_make_prologue_cache (this_frame);
2372 cache = (struct arm_prologue_cache *) *this_cache;
2373
2374 arm_gdbarch_tdep *tdep
2375 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
2376
2377 /* Use function start address as part of the frame ID. If we cannot
2378 identify the start address (due to missing symbol information),
2379 fall back to just using the current PC. */
2380 pc = get_frame_pc (this_frame);
2381 func = get_frame_func (this_frame);
2382 if (!func)
2383 func = pc;
2384
2385 id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), func);
2386 *this_id = id;
2387}
2388
2389static struct value *
2390arm_prologue_prev_register (frame_info_ptr this_frame,
2391 void **this_cache,
2392 int prev_regnum)
2393{
2394 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2395 struct arm_prologue_cache *cache;
2396 CORE_ADDR sp_value;
2397
2398 if (*this_cache == NULL)
2399 *this_cache = arm_make_prologue_cache (this_frame);
2400 cache = (struct arm_prologue_cache *) *this_cache;
2401
2402 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
2403
2404 /* If this frame has signed the return address, mark it as so. */
2405 if (tdep->have_pacbti && cache->ra_signed_state.has_value ()
2406 && *cache->ra_signed_state)
2407 set_frame_previous_pc_masked (this_frame);
2408
2409 /* If we are asked to unwind the PC, then we need to return the LR
2410 instead. The prologue may save PC, but it will point into this
2411 frame's prologue, not the next frame's resume location. Also
2412 strip the saved T bit. A valid LR may have the low bit set, but
2413 a valid PC never does. */
2414 if (prev_regnum == ARM_PC_REGNUM)
2415 {
2416 CORE_ADDR lr;
2417
2418 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2419 return frame_unwind_got_constant (this_frame, prev_regnum,
2420 arm_addr_bits_remove (gdbarch, lr));
2421 }
2422
2423 /* SP is generally not saved to the stack, but this frame is
2424 identified by the next frame's stack pointer at the time of the call.
2425 The value was already reconstructed into PREV_SP. */
2426 if (prev_regnum == ARM_SP_REGNUM)
2427 return frame_unwind_got_constant (this_frame, prev_regnum,
2428 arm_cache_get_prev_sp_value (cache, tdep));
2429
2430 /* The value might be one of the alternative SP, if so, use the
2431 value already constructed. */
2432 if (arm_is_alternative_sp_register (tdep, prev_regnum))
2433 {
2434 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
2435 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
2436 }
2437
2438 /* The CPSR may have been changed by the call instruction and by the
2439 called function. The only bit we can reconstruct is the T bit,
2440 by checking the low bit of LR as of the call. This is a reliable
2441 indicator of Thumb-ness except for some ARM v4T pre-interworking
2442 Thumb code, which could get away with a clear low bit as long as
2443 the called function did not use bx. Guess that all other
2444 bits are unchanged; the condition flags are presumably lost,
2445 but the processor status is likely valid. */
2446 if (prev_regnum == ARM_PS_REGNUM)
2447 {
2448 ULONGEST cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2449 CORE_ADDR lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2450
2451 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr);
2452 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2453 }
2454
2455 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2456 prev_regnum);
2457}
2458
2459static frame_unwind arm_prologue_unwind = {
2460 "arm prologue",
2461 NORMAL_FRAME,
2462 arm_prologue_unwind_stop_reason,
2463 arm_prologue_this_id,
2464 arm_prologue_prev_register,
2465 NULL,
2466 default_frame_sniffer
2467};
2468
2469/* Maintain a list of ARM exception table entries per objfile, similar to the
2470 list of mapping symbols. We only cache entries for standard ARM-defined
2471 personality routines; the cache will contain only the frame unwinding
2472 instructions associated with the entry (not the descriptors). */
2473
2474struct arm_exidx_entry
2475{
2476 CORE_ADDR addr;
2477 gdb_byte *entry;
2478
2479 bool operator< (const arm_exidx_entry &other) const
2480 {
2481 return addr < other.addr;
2482 }
2483};
2484
2485struct arm_exidx_data
2486{
2487 std::vector<std::vector<arm_exidx_entry>> section_maps;
2488};
2489
2490/* Per-BFD key to store exception handling information. */
2491static const registry<bfd>::key<arm_exidx_data> arm_exidx_data_key;
2492
2493static struct obj_section *
2494arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2495{
2496 struct obj_section *osect;
2497
2498 ALL_OBJFILE_OSECTIONS (objfile, osect)
2499 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2500 {
2501 bfd_vma start, size;
2502 start = bfd_section_vma (osect->the_bfd_section);
2503 size = bfd_section_size (osect->the_bfd_section);
2504
2505 if (start <= vma && vma < start + size)
2506 return osect;
2507 }
2508
2509 return NULL;
2510}
2511
2512/* Parse contents of exception table and exception index sections
2513 of OBJFILE, and fill in the exception table entry cache.
2514
2515 For each entry that refers to a standard ARM-defined personality
2516 routine, extract the frame unwinding instructions (from either
2517 the index or the table section). The unwinding instructions
2518 are normalized by:
2519 - extracting them from the rest of the table data
2520 - converting to host endianness
2521 - appending the implicit 0xb0 ("Finish") code
2522
2523 The extracted and normalized instructions are stored for later
2524 retrieval by the arm_find_exidx_entry routine. */
2525
2526static void
2527arm_exidx_new_objfile (struct objfile *objfile)
2528{
2529 struct arm_exidx_data *data;
2530 asection *exidx, *extab;
2531 bfd_vma exidx_vma = 0, extab_vma = 0;
2532 LONGEST i;
2533
2534 /* If we've already touched this file, do nothing. */
2535 if (!objfile || arm_exidx_data_key.get (objfile->obfd.get ()) != NULL)
2536 return;
2537
2538 /* Read contents of exception table and index. */
2539 exidx = bfd_get_section_by_name (objfile->obfd.get (),
2540 ELF_STRING_ARM_unwind);
2541 gdb::byte_vector exidx_data;
2542 if (exidx)
2543 {
2544 exidx_vma = bfd_section_vma (exidx);
2545 exidx_data.resize (bfd_section_size (exidx));
2546
2547 if (!bfd_get_section_contents (objfile->obfd.get (), exidx,
2548 exidx_data.data (), 0,
2549 exidx_data.size ()))
2550 return;
2551 }
2552
2553 extab = bfd_get_section_by_name (objfile->obfd.get (), ".ARM.extab");
2554 gdb::byte_vector extab_data;
2555 if (extab)
2556 {
2557 extab_vma = bfd_section_vma (extab);
2558 extab_data.resize (bfd_section_size (extab));
2559
2560 if (!bfd_get_section_contents (objfile->obfd.get (), extab,
2561 extab_data.data (), 0,
2562 extab_data.size ()))
2563 return;
2564 }
2565
2566 /* Allocate exception table data structure. */
2567 data = arm_exidx_data_key.emplace (objfile->obfd.get ());
2568 data->section_maps.resize (objfile->obfd->section_count);
2569
2570 /* Fill in exception table. */
2571 for (i = 0; i < exidx_data.size () / 8; i++)
2572 {
2573 struct arm_exidx_entry new_exidx_entry;
2574 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2575 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2576 exidx_data.data () + i * 8 + 4);
2577 bfd_vma addr = 0, word = 0;
2578 int n_bytes = 0, n_words = 0;
2579 struct obj_section *sec;
2580 gdb_byte *entry = NULL;
2581
2582 /* Extract address of start of function. */
2583 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2584 idx += exidx_vma + i * 8;
2585
2586 /* Find section containing function and compute section offset. */
2587 sec = arm_obj_section_from_vma (objfile, idx);
2588 if (sec == NULL)
2589 continue;
2590 idx -= bfd_section_vma (sec->the_bfd_section);
2591
2592 /* Determine address of exception table entry. */
2593 if (val == 1)
2594 {
2595 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2596 }
2597 else if ((val & 0xff000000) == 0x80000000)
2598 {
2599 /* Exception table entry embedded in .ARM.exidx
2600 -- must be short form. */
2601 word = val;
2602 n_bytes = 3;
2603 }
2604 else if (!(val & 0x80000000))
2605 {
2606 /* Exception table entry in .ARM.extab. */
2607 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2608 addr += exidx_vma + i * 8 + 4;
2609
2610 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2611 {
2612 word = bfd_h_get_32 (objfile->obfd,
2613 extab_data.data () + addr - extab_vma);
2614 addr += 4;
2615
2616 if ((word & 0xff000000) == 0x80000000)
2617 {
2618 /* Short form. */
2619 n_bytes = 3;
2620 }
2621 else if ((word & 0xff000000) == 0x81000000
2622 || (word & 0xff000000) == 0x82000000)
2623 {
2624 /* Long form. */
2625 n_bytes = 2;
2626 n_words = ((word >> 16) & 0xff);
2627 }
2628 else if (!(word & 0x80000000))
2629 {
2630 bfd_vma pers;
2631 struct obj_section *pers_sec;
2632 int gnu_personality = 0;
2633
2634 /* Custom personality routine. */
2635 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2636 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2637
2638 /* Check whether we've got one of the variants of the
2639 GNU personality routines. */
2640 pers_sec = arm_obj_section_from_vma (objfile, pers);
2641 if (pers_sec)
2642 {
2643 static const char *personality[] =
2644 {
2645 "__gcc_personality_v0",
2646 "__gxx_personality_v0",
2647 "__gcj_personality_v0",
2648 "__gnu_objc_personality_v0",
2649 NULL
2650 };
2651
2652 CORE_ADDR pc = pers + pers_sec->offset ();
2653 int k;
2654
2655 for (k = 0; personality[k]; k++)
2656 if (lookup_minimal_symbol_by_pc_name
2657 (pc, personality[k], objfile))
2658 {
2659 gnu_personality = 1;
2660 break;
2661 }
2662 }
2663
2664 /* If so, the next word contains a word count in the high
2665 byte, followed by the same unwind instructions as the
2666 pre-defined forms. */
2667 if (gnu_personality
2668 && addr + 4 <= extab_vma + extab_data.size ())
2669 {
2670 word = bfd_h_get_32 (objfile->obfd,
2671 (extab_data.data ()
2672 + addr - extab_vma));
2673 addr += 4;
2674 n_bytes = 3;
2675 n_words = ((word >> 24) & 0xff);
2676 }
2677 }
2678 }
2679 }
2680
2681 /* Sanity check address. */
2682 if (n_words)
2683 if (addr < extab_vma
2684 || addr + 4 * n_words > extab_vma + extab_data.size ())
2685 n_words = n_bytes = 0;
2686
2687 /* The unwind instructions reside in WORD (only the N_BYTES least
2688 significant bytes are valid), followed by N_WORDS words in the
2689 extab section starting at ADDR. */
2690 if (n_bytes || n_words)
2691 {
2692 gdb_byte *p = entry
2693 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2694 n_bytes + n_words * 4 + 1);
2695
2696 while (n_bytes--)
2697 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2698
2699 while (n_words--)
2700 {
2701 word = bfd_h_get_32 (objfile->obfd,
2702 extab_data.data () + addr - extab_vma);
2703 addr += 4;
2704
2705 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2706 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2707 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2708 *p++ = (gdb_byte) (word & 0xff);
2709 }
2710
2711 /* Implied "Finish" to terminate the list. */
2712 *p++ = 0xb0;
2713 }
2714
2715 /* Push entry onto vector. They are guaranteed to always
2716 appear in order of increasing addresses. */
2717 new_exidx_entry.addr = idx;
2718 new_exidx_entry.entry = entry;
2719 data->section_maps[sec->the_bfd_section->index].push_back
2720 (new_exidx_entry);
2721 }
2722}
2723
2724/* Search for the exception table entry covering MEMADDR. If one is found,
2725 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2726 set *START to the start of the region covered by this entry. */
2727
2728static gdb_byte *
2729arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2730{
2731 struct obj_section *sec;
2732
2733 sec = find_pc_section (memaddr);
2734 if (sec != NULL)
2735 {
2736 struct arm_exidx_data *data;
2737 struct arm_exidx_entry map_key = { memaddr - sec->addr (), 0 };
2738
2739 data = arm_exidx_data_key.get (sec->objfile->obfd.get ());
2740 if (data != NULL)
2741 {
2742 std::vector<arm_exidx_entry> &map
2743 = data->section_maps[sec->the_bfd_section->index];
2744 if (!map.empty ())
2745 {
2746 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2747
2748 /* std::lower_bound finds the earliest ordered insertion
2749 point. If the following symbol starts at this exact
2750 address, we use that; otherwise, the preceding
2751 exception table entry covers this address. */
2752 if (idx < map.end ())
2753 {
2754 if (idx->addr == map_key.addr)
2755 {
2756 if (start)
2757 *start = idx->addr + sec->addr ();
2758 return idx->entry;
2759 }
2760 }
2761
2762 if (idx > map.begin ())
2763 {
2764 idx = idx - 1;
2765 if (start)
2766 *start = idx->addr + sec->addr ();
2767 return idx->entry;
2768 }
2769 }
2770 }
2771 }
2772
2773 return NULL;
2774}
2775
2776/* Given the current frame THIS_FRAME, and its associated frame unwinding
2777 instruction list from the ARM exception table entry ENTRY, allocate and
2778 return a prologue cache structure describing how to unwind this frame.
2779
2780 Return NULL if the unwinding instruction list contains a "spare",
2781 "reserved" or "refuse to unwind" instruction as defined in section
2782 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2783 for the ARM Architecture" document. */
2784
2785static struct arm_prologue_cache *
2786arm_exidx_fill_cache (frame_info_ptr this_frame, gdb_byte *entry)
2787{
2788 CORE_ADDR vsp = 0;
2789 int vsp_valid = 0;
2790
2791 struct arm_prologue_cache *cache;
2792 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2793 arm_cache_init (cache, this_frame);
2794
2795 for (;;)
2796 {
2797 gdb_byte insn;
2798
2799 /* Whenever we reload SP, we actually have to retrieve its
2800 actual value in the current frame. */
2801 if (!vsp_valid)
2802 {
2803 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2804 {
2805 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2806 vsp = get_frame_register_unsigned (this_frame, reg);
2807 }
2808 else
2809 {
2810 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr ();
2811 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2812 }
2813
2814 vsp_valid = 1;
2815 }
2816
2817 /* Decode next unwind instruction. */
2818 insn = *entry++;
2819
2820 if ((insn & 0xc0) == 0)
2821 {
2822 int offset = insn & 0x3f;
2823 vsp += (offset << 2) + 4;
2824 }
2825 else if ((insn & 0xc0) == 0x40)
2826 {
2827 int offset = insn & 0x3f;
2828 vsp -= (offset << 2) + 4;
2829 }
2830 else if ((insn & 0xf0) == 0x80)
2831 {
2832 int mask = ((insn & 0xf) << 8) | *entry++;
2833 int i;
2834
2835 /* The special case of an all-zero mask identifies
2836 "Refuse to unwind". We return NULL to fall back
2837 to the prologue analyzer. */
2838 if (mask == 0)
2839 return NULL;
2840
2841 /* Pop registers r4..r15 under mask. */
2842 for (i = 0; i < 12; i++)
2843 if (mask & (1 << i))
2844 {
2845 cache->saved_regs[4 + i].set_addr (vsp);
2846 vsp += 4;
2847 }
2848
2849 /* Special-case popping SP -- we need to reload vsp. */
2850 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2851 vsp_valid = 0;
2852 }
2853 else if ((insn & 0xf0) == 0x90)
2854 {
2855 int reg = insn & 0xf;
2856
2857 /* Reserved cases. */
2858 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2859 return NULL;
2860
2861 /* Set SP from another register and mark VSP for reload. */
2862 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2863 vsp_valid = 0;
2864 }
2865 else if ((insn & 0xf0) == 0xa0)
2866 {
2867 int count = insn & 0x7;
2868 int pop_lr = (insn & 0x8) != 0;
2869 int i;
2870
2871 /* Pop r4..r[4+count]. */
2872 for (i = 0; i <= count; i++)
2873 {
2874 cache->saved_regs[4 + i].set_addr (vsp);
2875 vsp += 4;
2876 }
2877
2878 /* If indicated by flag, pop LR as well. */
2879 if (pop_lr)
2880 {
2881 cache->saved_regs[ARM_LR_REGNUM].set_addr (vsp);
2882 vsp += 4;
2883 }
2884 }
2885 else if (insn == 0xb0)
2886 {
2887 /* We could only have updated PC by popping into it; if so, it
2888 will show up as address. Otherwise, copy LR into PC. */
2889 if (!cache->saved_regs[ARM_PC_REGNUM].is_addr ())
2890 cache->saved_regs[ARM_PC_REGNUM]
2891 = cache->saved_regs[ARM_LR_REGNUM];
2892
2893 /* We're done. */
2894 break;
2895 }
2896 else if (insn == 0xb1)
2897 {
2898 int mask = *entry++;
2899 int i;
2900
2901 /* All-zero mask and mask >= 16 is "spare". */
2902 if (mask == 0 || mask >= 16)
2903 return NULL;
2904
2905 /* Pop r0..r3 under mask. */
2906 for (i = 0; i < 4; i++)
2907 if (mask & (1 << i))
2908 {
2909 cache->saved_regs[i].set_addr (vsp);
2910 vsp += 4;
2911 }
2912 }
2913 else if (insn == 0xb2)
2914 {
2915 ULONGEST offset = 0;
2916 unsigned shift = 0;
2917
2918 do
2919 {
2920 offset |= (*entry & 0x7f) << shift;
2921 shift += 7;
2922 }
2923 while (*entry++ & 0x80);
2924
2925 vsp += 0x204 + (offset << 2);
2926 }
2927 else if (insn == 0xb3)
2928 {
2929 int start = *entry >> 4;
2930 int count = (*entry++) & 0xf;
2931 int i;
2932
2933 /* Only registers D0..D15 are valid here. */
2934 if (start + count >= 16)
2935 return NULL;
2936
2937 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2938 for (i = 0; i <= count; i++)
2939 {
2940 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2941 vsp += 8;
2942 }
2943
2944 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2945 vsp += 4;
2946 }
2947 else if ((insn & 0xf8) == 0xb8)
2948 {
2949 int count = insn & 0x7;
2950 int i;
2951
2952 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2953 for (i = 0; i <= count; i++)
2954 {
2955 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2956 vsp += 8;
2957 }
2958
2959 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2960 vsp += 4;
2961 }
2962 else if (insn == 0xc6)
2963 {
2964 int start = *entry >> 4;
2965 int count = (*entry++) & 0xf;
2966 int i;
2967
2968 /* Only registers WR0..WR15 are valid. */
2969 if (start + count >= 16)
2970 return NULL;
2971
2972 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2973 for (i = 0; i <= count; i++)
2974 {
2975 cache->saved_regs[ARM_WR0_REGNUM + start + i].set_addr (vsp);
2976 vsp += 8;
2977 }
2978 }
2979 else if (insn == 0xc7)
2980 {
2981 int mask = *entry++;
2982 int i;
2983
2984 /* All-zero mask and mask >= 16 is "spare". */
2985 if (mask == 0 || mask >= 16)
2986 return NULL;
2987
2988 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2989 for (i = 0; i < 4; i++)
2990 if (mask & (1 << i))
2991 {
2992 cache->saved_regs[ARM_WCGR0_REGNUM + i].set_addr (vsp);
2993 vsp += 4;
2994 }
2995 }
2996 else if ((insn & 0xf8) == 0xc0)
2997 {
2998 int count = insn & 0x7;
2999 int i;
3000
3001 /* Pop iwmmx registers WR[10]..WR[10+count]. */
3002 for (i = 0; i <= count; i++)
3003 {
3004 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].set_addr (vsp);
3005 vsp += 8;
3006 }
3007 }
3008 else if (insn == 0xc8)
3009 {
3010 int start = *entry >> 4;
3011 int count = (*entry++) & 0xf;
3012 int i;
3013
3014 /* Only registers D0..D31 are valid. */
3015 if (start + count >= 16)
3016 return NULL;
3017
3018 /* Pop VFP double-precision registers
3019 D[16+start]..D[16+start+count]. */
3020 for (i = 0; i <= count; i++)
3021 {
3022 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].set_addr (vsp);
3023 vsp += 8;
3024 }
3025 }
3026 else if (insn == 0xc9)
3027 {
3028 int start = *entry >> 4;
3029 int count = (*entry++) & 0xf;
3030 int i;
3031
3032 /* Pop VFP double-precision registers D[start]..D[start+count]. */
3033 for (i = 0; i <= count; i++)
3034 {
3035 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
3036 vsp += 8;
3037 }
3038 }
3039 else if ((insn & 0xf8) == 0xd0)
3040 {
3041 int count = insn & 0x7;
3042 int i;
3043
3044 /* Pop VFP double-precision registers D[8]..D[8+count]. */
3045 for (i = 0; i <= count; i++)
3046 {
3047 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
3048 vsp += 8;
3049 }
3050 }
3051 else
3052 {
3053 /* Everything else is "spare". */
3054 return NULL;
3055 }
3056 }
3057
3058 /* If we restore SP from a register, assume this was the frame register.
3059 Otherwise just fall back to SP as frame register. */
3060 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
3061 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
3062 else
3063 cache->framereg = ARM_SP_REGNUM;
3064
3065 /* Determine offset to previous frame. */
3066 cache->framesize
3067 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
3068
3069 /* We already got the previous SP. */
3070 arm_gdbarch_tdep *tdep
3071 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3072 arm_cache_set_active_sp_value (cache, tdep, vsp);
3073
3074 return cache;
3075}
3076
3077/* Unwinding via ARM exception table entries. Note that the sniffer
3078 already computes a filled-in prologue cache, which is then used
3079 with the same arm_prologue_this_id and arm_prologue_prev_register
3080 routines also used for prologue-parsing based unwinding. */
3081
3082static int
3083arm_exidx_unwind_sniffer (const struct frame_unwind *self,
3084 frame_info_ptr this_frame,
3085 void **this_prologue_cache)
3086{
3087 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3088 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3089 CORE_ADDR addr_in_block, exidx_region, func_start;
3090 struct arm_prologue_cache *cache;
3091 gdb_byte *entry;
3092
3093 /* See if we have an ARM exception table entry covering this address. */
3094 addr_in_block = get_frame_address_in_block (this_frame);
3095 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
3096 if (!entry)
3097 return 0;
3098
3099 /* The ARM exception table does not describe unwind information
3100 for arbitrary PC values, but is guaranteed to be correct only
3101 at call sites. We have to decide here whether we want to use
3102 ARM exception table information for this frame, or fall back
3103 to using prologue parsing. (Note that if we have DWARF CFI,
3104 this sniffer isn't even called -- CFI is always preferred.)
3105
3106 Before we make this decision, however, we check whether we
3107 actually have *symbol* information for the current frame.
3108 If not, prologue parsing would not work anyway, so we might
3109 as well use the exception table and hope for the best. */
3110 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
3111 {
3112 int exc_valid = 0;
3113
3114 /* If the next frame is "normal", we are at a call site in this
3115 frame, so exception information is guaranteed to be valid. */
3116 if (get_next_frame (this_frame)
3117 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
3118 exc_valid = 1;
3119
3120 /* We also assume exception information is valid if we're currently
3121 blocked in a system call. The system library is supposed to
3122 ensure this, so that e.g. pthread cancellation works. */
3123 if (arm_frame_is_thumb (this_frame))
3124 {
3125 ULONGEST insn;
3126
3127 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
3128 2, byte_order_for_code, &insn)
3129 && (insn & 0xff00) == 0xdf00 /* svc */)
3130 exc_valid = 1;
3131 }
3132 else
3133 {
3134 ULONGEST insn;
3135
3136 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
3137 4, byte_order_for_code, &insn)
3138 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
3139 exc_valid = 1;
3140 }
3141
3142 /* Bail out if we don't know that exception information is valid. */
3143 if (!exc_valid)
3144 return 0;
3145
3146 /* The ARM exception index does not mark the *end* of the region
3147 covered by the entry, and some functions will not have any entry.
3148 To correctly recognize the end of the covered region, the linker
3149 should have inserted dummy records with a CANTUNWIND marker.
3150
3151 Unfortunately, current versions of GNU ld do not reliably do
3152 this, and thus we may have found an incorrect entry above.
3153 As a (temporary) sanity check, we only use the entry if it
3154 lies *within* the bounds of the function. Note that this check
3155 might reject perfectly valid entries that just happen to cover
3156 multiple functions; therefore this check ought to be removed
3157 once the linker is fixed. */
3158 if (func_start > exidx_region)
3159 return 0;
3160 }
3161
3162 /* Decode the list of unwinding instructions into a prologue cache.
3163 Note that this may fail due to e.g. a "refuse to unwind" code. */
3164 cache = arm_exidx_fill_cache (this_frame, entry);
3165 if (!cache)
3166 return 0;
3167
3168 *this_prologue_cache = cache;
3169 return 1;
3170}
3171
3172struct frame_unwind arm_exidx_unwind = {
3173 "arm exidx",
3174 NORMAL_FRAME,
3175 default_frame_unwind_stop_reason,
3176 arm_prologue_this_id,
3177 arm_prologue_prev_register,
3178 NULL,
3179 arm_exidx_unwind_sniffer
3180};
3181
3182static struct arm_prologue_cache *
3183arm_make_epilogue_frame_cache (frame_info_ptr this_frame)
3184{
3185 struct arm_prologue_cache *cache;
3186 int reg;
3187
3188 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3189 arm_cache_init (cache, this_frame);
3190
3191 /* Still rely on the offset calculated from prologue. */
3192 arm_scan_prologue (this_frame, cache);
3193
3194 /* Since we are in epilogue, the SP has been restored. */
3195 arm_gdbarch_tdep *tdep
3196 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3197 arm_cache_set_active_sp_value (cache, tdep,
3198 get_frame_register_unsigned (this_frame,
3199 ARM_SP_REGNUM));
3200
3201 /* Calculate actual addresses of saved registers using offsets
3202 determined by arm_scan_prologue. */
3203 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
3204 if (cache->saved_regs[reg].is_addr ())
3205 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
3206 + arm_cache_get_prev_sp_value (cache, tdep));
3207
3208 return cache;
3209}
3210
3211/* Implementation of function hook 'this_id' in
3212 'struct frame_uwnind' for epilogue unwinder. */
3213
3214static void
3215arm_epilogue_frame_this_id (frame_info_ptr this_frame,
3216 void **this_cache,
3217 struct frame_id *this_id)
3218{
3219 struct arm_prologue_cache *cache;
3220 CORE_ADDR pc, func;
3221
3222 if (*this_cache == NULL)
3223 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3224 cache = (struct arm_prologue_cache *) *this_cache;
3225
3226 /* Use function start address as part of the frame ID. If we cannot
3227 identify the start address (due to missing symbol information),
3228 fall back to just using the current PC. */
3229 pc = get_frame_pc (this_frame);
3230 func = get_frame_func (this_frame);
3231 if (func == 0)
3232 func = pc;
3233
3234 arm_gdbarch_tdep *tdep
3235 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3236 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), pc);
3237}
3238
3239/* Implementation of function hook 'prev_register' in
3240 'struct frame_uwnind' for epilogue unwinder. */
3241
3242static struct value *
3243arm_epilogue_frame_prev_register (frame_info_ptr this_frame,
3244 void **this_cache, int regnum)
3245{
3246 if (*this_cache == NULL)
3247 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3248
3249 return arm_prologue_prev_register (this_frame, this_cache, regnum);
3250}
3251
3252static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
3253 CORE_ADDR pc);
3254static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
3255 CORE_ADDR pc);
3256
3257/* Implementation of function hook 'sniffer' in
3258 'struct frame_uwnind' for epilogue unwinder. */
3259
3260static int
3261arm_epilogue_frame_sniffer (const struct frame_unwind *self,
3262 frame_info_ptr this_frame,
3263 void **this_prologue_cache)
3264{
3265 if (frame_relative_level (this_frame) == 0)
3266 {
3267 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3268 CORE_ADDR pc = get_frame_pc (this_frame);
3269
3270 if (arm_frame_is_thumb (this_frame))
3271 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3272 else
3273 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3274 }
3275 else
3276 return 0;
3277}
3278
3279/* Frame unwinder from epilogue. */
3280
3281static const struct frame_unwind arm_epilogue_frame_unwind =
3282{
3283 "arm epilogue",
3284 NORMAL_FRAME,
3285 default_frame_unwind_stop_reason,
3286 arm_epilogue_frame_this_id,
3287 arm_epilogue_frame_prev_register,
3288 NULL,
3289 arm_epilogue_frame_sniffer,
3290};
3291
3292/* Recognize GCC's trampoline for thumb call-indirect. If we are in a
3293 trampoline, return the target PC. Otherwise return 0.
3294
3295 void call0a (char c, short s, int i, long l) {}
3296
3297 int main (void)
3298 {
3299 (*pointer_to_call0a) (c, s, i, l);
3300 }
3301
3302 Instead of calling a stub library function _call_via_xx (xx is
3303 the register name), GCC may inline the trampoline in the object
3304 file as below (register r2 has the address of call0a).
3305
3306 .global main
3307 .type main, %function
3308 ...
3309 bl .L1
3310 ...
3311 .size main, .-main
3312
3313 .L1:
3314 bx r2
3315
3316 The trampoline 'bx r2' doesn't belong to main. */
3317
3318static CORE_ADDR
3319arm_skip_bx_reg (frame_info_ptr frame, CORE_ADDR pc)
3320{
3321 /* The heuristics of recognizing such trampoline is that FRAME is
3322 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
3323 if (arm_frame_is_thumb (frame))
3324 {
3325 gdb_byte buf[2];
3326
3327 if (target_read_memory (pc, buf, 2) == 0)
3328 {
3329 struct gdbarch *gdbarch = get_frame_arch (frame);
3330 enum bfd_endian byte_order_for_code
3331 = gdbarch_byte_order_for_code (gdbarch);
3332 uint16_t insn
3333 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3334
3335 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3336 {
3337 CORE_ADDR dest
3338 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
3339
3340 /* Clear the LSB so that gdb core sets step-resume
3341 breakpoint at the right address. */
3342 return UNMAKE_THUMB_ADDR (dest);
3343 }
3344 }
3345 }
3346
3347 return 0;
3348}
3349
3350static struct arm_prologue_cache *
3351arm_make_stub_cache (frame_info_ptr this_frame)
3352{
3353 struct arm_prologue_cache *cache;
3354
3355 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3356 arm_cache_init (cache, this_frame);
3357
3358 arm_gdbarch_tdep *tdep
3359 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3360 arm_cache_set_active_sp_value (cache, tdep,
3361 get_frame_register_unsigned (this_frame,
3362 ARM_SP_REGNUM));
3363
3364 return cache;
3365}
3366
3367/* Our frame ID for a stub frame is the current SP and LR. */
3368
3369static void
3370arm_stub_this_id (frame_info_ptr this_frame,
3371 void **this_cache,
3372 struct frame_id *this_id)
3373{
3374 struct arm_prologue_cache *cache;
3375
3376 if (*this_cache == NULL)
3377 *this_cache = arm_make_stub_cache (this_frame);
3378 cache = (struct arm_prologue_cache *) *this_cache;
3379
3380 arm_gdbarch_tdep *tdep
3381 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3382 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3383 get_frame_pc (this_frame));
3384}
3385
3386static int
3387arm_stub_unwind_sniffer (const struct frame_unwind *self,
3388 frame_info_ptr this_frame,
3389 void **this_prologue_cache)
3390{
3391 CORE_ADDR addr_in_block;
3392 gdb_byte dummy[4];
3393 CORE_ADDR pc, start_addr;
3394 const char *name;
3395
3396 addr_in_block = get_frame_address_in_block (this_frame);
3397 pc = get_frame_pc (this_frame);
3398 if (in_plt_section (addr_in_block)
3399 /* We also use the stub winder if the target memory is unreadable
3400 to avoid having the prologue unwinder trying to read it. */
3401 || target_read_memory (pc, dummy, 4) != 0)
3402 return 1;
3403
3404 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
3405 && arm_skip_bx_reg (this_frame, pc) != 0)
3406 return 1;
3407
3408 return 0;
3409}
3410
3411struct frame_unwind arm_stub_unwind = {
3412 "arm stub",
3413 NORMAL_FRAME,
3414 default_frame_unwind_stop_reason,
3415 arm_stub_this_id,
3416 arm_prologue_prev_register,
3417 NULL,
3418 arm_stub_unwind_sniffer
3419};
3420
3421/* Put here the code to store, into CACHE->saved_regs, the addresses
3422 of the saved registers of frame described by THIS_FRAME. CACHE is
3423 returned. */
3424
3425static struct arm_prologue_cache *
3426arm_m_exception_cache (frame_info_ptr this_frame)
3427{
3428 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3429 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
3430 struct arm_prologue_cache *cache;
3431
3432 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3433 arm_cache_init (cache, this_frame);
3434
3435 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
3436 describes which bits in LR that define which stack was used prior
3437 to the exception and if FPU is used (causing extended stack frame). */
3438
3439 /* In the lockup state PC contains a lockup magic value.
3440 The PC value of the the next outer frame is irreversibly
3441 lost. The other registers are intact so LR likely contains
3442 PC of some frame next to the outer one, but we cannot analyze
3443 the next outer frame without knowing its PC
3444 therefore we do not know SP fixup for this frame.
3445 Some heuristics to resynchronize SP might be possible.
3446 For simplicity, just terminate the unwinding to prevent it going
3447 astray and attempting to read data/addresses it shouldn't,
3448 which may cause further issues due to side-effects. */
3449 CORE_ADDR pc = get_frame_pc (this_frame);
3450 if (arm_m_addr_is_lockup (pc))
3451 {
3452 /* The lockup can be real just in the innermost frame
3453 as the CPU is stopped and cannot create more frames.
3454 If we hit lockup magic PC in the other frame, it is
3455 just a sentinel at the top of stack: do not warn then. */
3456 if (frame_relative_level (this_frame) == 0)
3457 warning (_("ARM M in lockup state, stack unwinding terminated."));
3458
3459 /* Terminate any further stack unwinding. */
3460 arm_cache_set_active_sp_value (cache, tdep, 0);
3461 return cache;
3462 }
3463
3464 CORE_ADDR lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
3465
3466 /* ARMv7-M Architecture Reference "A2.3.1 Arm core registers"
3467 states that LR is set to 0xffffffff on reset. ARMv8-M Architecture
3468 Reference "B3.3 Registers" states that LR is set to 0xffffffff on warm
3469 reset if Main Extension is implemented, otherwise the value is unknown. */
3470 if (lr == 0xffffffff)
3471 {
3472 /* Terminate any further stack unwinding. */
3473 arm_cache_set_active_sp_value (cache, tdep, 0);
3474 return cache;
3475 }
3476
3477 /* Check FNC_RETURN indicator bits (24-31). */
3478 bool fnc_return = (((lr >> 24) & 0xff) == 0xfe);
3479 if (fnc_return)
3480 {
3481 /* FNC_RETURN is only valid for targets with Security Extension. */
3482 if (!tdep->have_sec_ext)
3483 {
3484 error (_("While unwinding an exception frame, found unexpected Link "
3485 "Register value %s that requires the security extension, "
3486 "but the extension was not found or is disabled. This "
3487 "should not happen and may be caused by corrupt data or a "
3488 "bug in GDB."), phex (lr, ARM_INT_REGISTER_SIZE));
3489 }
3490
3491 if (!arm_unwind_secure_frames)
3492 {
3493 warning (_("Non-secure to secure stack unwinding disabled."));
3494
3495 /* Terminate any further stack unwinding. */
3496 arm_cache_set_active_sp_value (cache, tdep, 0);
3497 return cache;
3498 }
3499
3500 ULONGEST xpsr = get_frame_register_unsigned (this_frame, ARM_PS_REGNUM);
3501 if ((xpsr & 0x1ff) != 0)
3502 /* Handler mode: This is the mode that exceptions are handled in. */
3503 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_s_regnum);
3504 else
3505 /* Thread mode: This is the normal mode that programs run in. */
3506 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_s_regnum);
3507
3508 CORE_ADDR unwound_sp = arm_cache_get_prev_sp_value (cache, tdep);
3509
3510 /* Stack layout for a function call from Secure to Non-Secure state
3511 (ARMv8-M section B3.16):
3512
3513 SP Offset
3514
3515 +-------------------+
3516 0x08 | |
3517 +-------------------+ <-- Original SP
3518 0x04 | Partial xPSR |
3519 +-------------------+
3520 0x00 | Return Address |
3521 +===================+ <-- New SP */
3522
3523 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + 0x00);
3524 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + 0x00);
3525 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + 0x04);
3526
3527 arm_cache_set_active_sp_value (cache, tdep, unwound_sp + 0x08);
3528
3529 return cache;
3530 }
3531
3532 /* Check EXC_RETURN indicator bits (24-31). */
3533 bool exc_return = (((lr >> 24) & 0xff) == 0xff);
3534 if (exc_return)
3535 {
3536 int sp_regnum;
3537 bool secure_stack_used = false;
3538 bool default_callee_register_stacking = false;
3539 bool exception_domain_is_secure = false;
3540 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3541
3542 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
3543 bool process_stack_used = (bit (lr, 2) != 0);
3544
3545 if (tdep->have_sec_ext)
3546 {
3547 secure_stack_used = (bit (lr, 6) != 0);
3548 default_callee_register_stacking = (bit (lr, 5) != 0);
3549 exception_domain_is_secure = (bit (lr, 0) != 0);
3550
3551 /* Unwinding from non-secure to secure can trip security
3552 measures. In order to avoid the debugger being
3553 intrusive, rely on the user to configure the requested
3554 mode. */
3555 if (secure_stack_used && !exception_domain_is_secure
3556 && !arm_unwind_secure_frames)
3557 {
3558 warning (_("Non-secure to secure stack unwinding disabled."));
3559
3560 /* Terminate any further stack unwinding. */
3561 arm_cache_set_active_sp_value (cache, tdep, 0);
3562 return cache;
3563 }
3564
3565 if (process_stack_used)
3566 {
3567 if (secure_stack_used)
3568 /* Secure thread (process) stack used, use PSP_S as SP. */
3569 sp_regnum = tdep->m_profile_psp_s_regnum;
3570 else
3571 /* Non-secure thread (process) stack used, use PSP_NS as SP. */
3572 sp_regnum = tdep->m_profile_psp_ns_regnum;
3573 }
3574 else
3575 {
3576 if (secure_stack_used)
3577 /* Secure main stack used, use MSP_S as SP. */
3578 sp_regnum = tdep->m_profile_msp_s_regnum;
3579 else
3580 /* Non-secure main stack used, use MSP_NS as SP. */
3581 sp_regnum = tdep->m_profile_msp_ns_regnum;
3582 }
3583 }
3584 else
3585 {
3586 if (process_stack_used)
3587 /* Thread (process) stack used, use PSP as SP. */
3588 sp_regnum = tdep->m_profile_psp_regnum;
3589 else
3590 /* Main stack used, use MSP as SP. */
3591 sp_regnum = tdep->m_profile_msp_regnum;
3592 }
3593
3594 /* Set the active SP regnum. */
3595 arm_cache_switch_prev_sp (cache, tdep, sp_regnum);
3596
3597 /* Fetch the SP to use for this frame. */
3598 CORE_ADDR unwound_sp = arm_cache_get_prev_sp_value (cache, tdep);
3599
3600 /* Exception entry context stacking are described in ARMv8-M (section
3601 B3.19) and ARMv7-M (sections B1.5.6 and B1.5.7) Architecture Reference
3602 Manuals.
3603
3604 The following figure shows the structure of the stack frame when
3605 Security and Floating-point extensions are present.
3606
3607 SP Offsets
3608 Without With
3609 Callee Regs Callee Regs
3610 (Secure -> Non-Secure)
3611 +-------------------+
3612 0xA8 | | 0xD0
3613 +===================+ --+ <-- Original SP
3614 0xA4 | S31 | 0xCC |
3615 +-------------------+ |
3616 ... | Additional FP context
3617 +-------------------+ |
3618 0x68 | S16 | 0x90 |
3619 +===================+ --+
3620 0x64 | Reserved | 0x8C |
3621 +-------------------+ |
3622 0x60 | FPSCR | 0x88 |
3623 +-------------------+ |
3624 0x5C | S15 | 0x84 | FP context
3625 +-------------------+ |
3626 ... |
3627 +-------------------+ |
3628 0x20 | S0 | 0x48 |
3629 +===================+ --+
3630 0x1C | xPSR | 0x44 |
3631 +-------------------+ |
3632 0x18 | Return address | 0x40 |
3633 +-------------------+ |
3634 0x14 | LR(R14) | 0x3C |
3635 +-------------------+ |
3636 0x10 | R12 | 0x38 | State context
3637 +-------------------+ |
3638 0x0C | R3 | 0x34 |
3639 +-------------------+ |
3640 ... |
3641 +-------------------+ |
3642 0x00 | R0 | 0x28 |
3643 +===================+ --+
3644 | R11 | 0x24 |
3645 +-------------------+ |
3646 ... |
3647 +-------------------+ | Additional state
3648 | R4 | 0x08 | context when
3649 +-------------------+ | transitioning from
3650 | Reserved | 0x04 | Secure to Non-Secure
3651 +-------------------+ |
3652 | Magic signature | 0x00 |
3653 +===================+ --+ <-- New SP */
3654
3655 uint32_t sp_r0_offset = 0;
3656
3657 /* With the Security extension, the hardware saves R4..R11 too. */
3658 if (tdep->have_sec_ext && secure_stack_used
3659 && (!default_callee_register_stacking || !exception_domain_is_secure))
3660 {
3661 /* Read R4..R11 from the integer callee registers. */
3662 cache->saved_regs[4].set_addr (unwound_sp + 0x08);
3663 cache->saved_regs[5].set_addr (unwound_sp + 0x0C);
3664 cache->saved_regs[6].set_addr (unwound_sp + 0x10);
3665 cache->saved_regs[7].set_addr (unwound_sp + 0x14);
3666 cache->saved_regs[8].set_addr (unwound_sp + 0x18);
3667 cache->saved_regs[9].set_addr (unwound_sp + 0x1C);
3668 cache->saved_regs[10].set_addr (unwound_sp + 0x20);
3669 cache->saved_regs[11].set_addr (unwound_sp + 0x24);
3670 sp_r0_offset = 0x28;
3671 }
3672
3673 /* The hardware saves eight 32-bit words, comprising xPSR,
3674 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3675 "B1.5.6 Exception entry behavior" in
3676 "ARMv7-M Architecture Reference Manual". */
3677 cache->saved_regs[0].set_addr (unwound_sp + sp_r0_offset);
3678 cache->saved_regs[1].set_addr (unwound_sp + sp_r0_offset + 0x04);
3679 cache->saved_regs[2].set_addr (unwound_sp + sp_r0_offset + 0x08);
3680 cache->saved_regs[3].set_addr (unwound_sp + sp_r0_offset + 0x0C);
3681 cache->saved_regs[ARM_IP_REGNUM].set_addr (unwound_sp + sp_r0_offset
3682 + 0x10);
3683 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + sp_r0_offset
3684 + 0x14);
3685 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + sp_r0_offset
3686 + 0x18);
3687 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + sp_r0_offset
3688 + 0x1C);
3689
3690 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3691 type used. */
3692 bool extended_frame_used = (bit (lr, 4) == 0);
3693 if (extended_frame_used)
3694 {
3695 ULONGEST fpccr;
3696 ULONGEST fpcar;
3697
3698 /* Read FPCCR register. */
3699 if (!safe_read_memory_unsigned_integer (FPCCR, ARM_INT_REGISTER_SIZE,
3700 byte_order, &fpccr))
3701 {
3702 warning (_("Could not fetch required FPCCR content. Further "
3703 "unwinding is impossible."));
3704 arm_cache_set_active_sp_value (cache, tdep, 0);
3705 return cache;
3706 }
3707
3708 /* Read FPCAR register. */
3709 if (!safe_read_memory_unsigned_integer (FPCAR, ARM_INT_REGISTER_SIZE,
3710 byte_order, &fpcar))
3711 {
3712 warning (_("Could not fetch FPCAR content. Further unwinding of "
3713 "FP register values will be unreliable."));
3714 fpcar = 0;
3715 }
3716
3717 bool fpccr_aspen = bit (fpccr, 31);
3718 bool fpccr_lspen = bit (fpccr, 30);
3719 bool fpccr_ts = bit (fpccr, 26);
3720 bool fpccr_lspact = bit (fpccr, 0);
3721
3722 /* The LSPEN and ASPEN bits indicate if the lazy state preservation
3723 for FP registers is enabled or disabled. The LSPACT bit indicate,
3724 together with FPCAR, if the lazy state preservation feature is
3725 active for the current frame or for another frame.
3726 See "Lazy context save of FP state", in B1.5.7, also ARM AN298,
3727 supported by Cortex-M4F architecture for details. */
3728 bool fpcar_points_to_this_frame = ((unwound_sp + sp_r0_offset + 0x20)
3729 == (fpcar & ~0x7));
3730 bool read_fp_regs_from_stack = (!(fpccr_aspen && fpccr_lspen
3731 && fpccr_lspact
3732 && fpcar_points_to_this_frame));
3733
3734 /* Extended stack frame type used. */
3735 if (read_fp_regs_from_stack)
3736 {
3737 CORE_ADDR addr = unwound_sp + sp_r0_offset + 0x20;
3738 for (int i = 0; i < 8; i++)
3739 {
3740 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (addr);
3741 addr += 8;
3742 }
3743 }
3744 cache->saved_regs[ARM_FPSCR_REGNUM].set_addr (unwound_sp
3745 + sp_r0_offset + 0x60);
3746
3747 if (tdep->have_sec_ext && !default_callee_register_stacking
3748 && fpccr_ts)
3749 {
3750 /* Handle floating-point callee saved registers. */
3751 if (read_fp_regs_from_stack)
3752 {
3753 CORE_ADDR addr = unwound_sp + sp_r0_offset + 0x68;
3754 for (int i = 8; i < 16; i++)
3755 {
3756 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (addr);
3757 addr += 8;
3758 }
3759 }
3760
3761 arm_cache_set_active_sp_value (cache, tdep,
3762 unwound_sp + sp_r0_offset + 0xA8);
3763 }
3764 else
3765 {
3766 /* Offset 0x64 is reserved. */
3767 arm_cache_set_active_sp_value (cache, tdep,
3768 unwound_sp + sp_r0_offset + 0x68);
3769 }
3770 }
3771 else
3772 {
3773 /* Standard stack frame type used. */
3774 arm_cache_set_active_sp_value (cache, tdep,
3775 unwound_sp + sp_r0_offset + 0x20);
3776 }
3777
3778 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3779 aligner between the top of the 32-byte stack frame and the
3780 previous context's stack pointer. */
3781 ULONGEST xpsr;
3782 if (!safe_read_memory_unsigned_integer (cache->saved_regs[ARM_PS_REGNUM]
3783 .addr (), ARM_INT_REGISTER_SIZE,
3784 byte_order, &xpsr))
3785 {
3786 warning (_("Could not fetch required XPSR content. Further "
3787 "unwinding is impossible."));
3788 arm_cache_set_active_sp_value (cache, tdep, 0);
3789 return cache;
3790 }
3791
3792 if (bit (xpsr, 9) != 0)
3793 {
3794 CORE_ADDR new_sp = arm_cache_get_prev_sp_value (cache, tdep) + 4;
3795 arm_cache_set_active_sp_value (cache, tdep, new_sp);
3796 }
3797
3798 return cache;
3799 }
3800
3801 internal_error (_("While unwinding an exception frame, "
3802 "found unexpected Link Register value "
3803 "%s. This should not happen and may "
3804 "be caused by corrupt data or a bug in"
3805 " GDB."),
3806 phex (lr, ARM_INT_REGISTER_SIZE));
3807}
3808
3809/* Implementation of the stop_reason hook for arm_m_exception frames. */
3810
3811static enum unwind_stop_reason
3812arm_m_exception_frame_unwind_stop_reason (frame_info_ptr this_frame,
3813 void **this_cache)
3814{
3815 struct arm_prologue_cache *cache;
3816 arm_gdbarch_tdep *tdep
3817 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3818
3819 if (*this_cache == NULL)
3820 *this_cache = arm_m_exception_cache (this_frame);
3821 cache = (struct arm_prologue_cache *) *this_cache;
3822
3823 /* If we've hit a wall, stop. */
3824 if (arm_cache_get_prev_sp_value (cache, tdep) == 0)
3825 return UNWIND_OUTERMOST;
3826
3827 return UNWIND_NO_REASON;
3828}
3829
3830/* Implementation of function hook 'this_id' in
3831 'struct frame_uwnind'. */
3832
3833static void
3834arm_m_exception_this_id (frame_info_ptr this_frame,
3835 void **this_cache,
3836 struct frame_id *this_id)
3837{
3838 struct arm_prologue_cache *cache;
3839
3840 if (*this_cache == NULL)
3841 *this_cache = arm_m_exception_cache (this_frame);
3842 cache = (struct arm_prologue_cache *) *this_cache;
3843
3844 /* Our frame ID for a stub frame is the current SP and LR. */
3845 arm_gdbarch_tdep *tdep
3846 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3847 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3848 get_frame_pc (this_frame));
3849}
3850
3851/* Implementation of function hook 'prev_register' in
3852 'struct frame_uwnind'. */
3853
3854static struct value *
3855arm_m_exception_prev_register (frame_info_ptr this_frame,
3856 void **this_cache,
3857 int prev_regnum)
3858{
3859 struct arm_prologue_cache *cache;
3860 CORE_ADDR sp_value;
3861
3862 if (*this_cache == NULL)
3863 *this_cache = arm_m_exception_cache (this_frame);
3864 cache = (struct arm_prologue_cache *) *this_cache;
3865
3866 /* The value was already reconstructed into PREV_SP. */
3867 arm_gdbarch_tdep *tdep
3868 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3869 if (prev_regnum == ARM_SP_REGNUM)
3870 return frame_unwind_got_constant (this_frame, prev_regnum,
3871 arm_cache_get_prev_sp_value (cache, tdep));
3872
3873 /* If we are asked to unwind the PC, strip the saved T bit. */
3874 if (prev_regnum == ARM_PC_REGNUM)
3875 {
3876 struct value *value = trad_frame_get_prev_register (this_frame,
3877 cache->saved_regs,
3878 prev_regnum);
3879 CORE_ADDR pc = value_as_address (value);
3880 return frame_unwind_got_constant (this_frame, prev_regnum,
3881 UNMAKE_THUMB_ADDR (pc));
3882 }
3883
3884 /* The value might be one of the alternative SP, if so, use the
3885 value already constructed. */
3886 if (arm_is_alternative_sp_register (tdep, prev_regnum))
3887 {
3888 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
3889 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
3890 }
3891
3892 /* If we are asked to unwind the xPSR, set T bit if PC is in thumb mode.
3893 LR register is unreliable as it contains FNC_RETURN or EXC_RETURN
3894 pattern. */
3895 if (prev_regnum == ARM_PS_REGNUM)
3896 {
3897 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3898 struct value *value = trad_frame_get_prev_register (this_frame,
3899 cache->saved_regs,
3900 ARM_PC_REGNUM);
3901 CORE_ADDR pc = value_as_address (value);
3902 value = trad_frame_get_prev_register (this_frame, cache->saved_regs,
3903 ARM_PS_REGNUM);
3904 ULONGEST xpsr = value_as_long (value);
3905
3906 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3907 xpsr = reconstruct_t_bit (gdbarch, pc, xpsr);
3908 return frame_unwind_got_constant (this_frame, ARM_PS_REGNUM, xpsr);
3909 }
3910
3911 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3912 prev_regnum);
3913}
3914
3915/* Implementation of function hook 'sniffer' in
3916 'struct frame_uwnind'. */
3917
3918static int
3919arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3920 frame_info_ptr this_frame,
3921 void **this_prologue_cache)
3922{
3923 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3924 CORE_ADDR this_pc = get_frame_pc (this_frame);
3925
3926 /* No need to check is_m; this sniffer is only registered for
3927 M-profile architectures. */
3928
3929 /* Check if exception frame returns to a magic PC value. */
3930 return arm_m_addr_is_magic (gdbarch, this_pc);
3931}
3932
3933/* Frame unwinder for M-profile exceptions (EXC_RETURN on stack),
3934 lockup and secure/nonsecure interstate function calls (FNC_RETURN). */
3935
3936struct frame_unwind arm_m_exception_unwind =
3937{
3938 "arm m exception lockup sec_fnc",
3939 SIGTRAMP_FRAME,
3940 arm_m_exception_frame_unwind_stop_reason,
3941 arm_m_exception_this_id,
3942 arm_m_exception_prev_register,
3943 NULL,
3944 arm_m_exception_unwind_sniffer
3945};
3946
3947static CORE_ADDR
3948arm_normal_frame_base (frame_info_ptr this_frame, void **this_cache)
3949{
3950 struct arm_prologue_cache *cache;
3951
3952 if (*this_cache == NULL)
3953 *this_cache = arm_make_prologue_cache (this_frame);
3954 cache = (struct arm_prologue_cache *) *this_cache;
3955
3956 arm_gdbarch_tdep *tdep
3957 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3958 return arm_cache_get_prev_sp_value (cache, tdep) - cache->framesize;
3959}
3960
3961struct frame_base arm_normal_base = {
3962 &arm_prologue_unwind,
3963 arm_normal_frame_base,
3964 arm_normal_frame_base,
3965 arm_normal_frame_base
3966};
3967
3968struct arm_dwarf2_prev_register_cache
3969{
3970 /* Cached value of the coresponding stack pointer for the inner frame. */
3971 CORE_ADDR sp;
3972 CORE_ADDR msp;
3973 CORE_ADDR msp_s;
3974 CORE_ADDR msp_ns;
3975 CORE_ADDR psp;
3976 CORE_ADDR psp_s;
3977 CORE_ADDR psp_ns;
3978};
3979
3980static struct value *
3981arm_dwarf2_prev_register (frame_info_ptr this_frame, void **this_cache,
3982 int regnum)
3983{
3984 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3985 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
3986 CORE_ADDR lr;
3987 ULONGEST cpsr;
3988 arm_dwarf2_prev_register_cache *cache
3989 = ((arm_dwarf2_prev_register_cache *)
3990 dwarf2_frame_get_fn_data (this_frame, this_cache,
3991 arm_dwarf2_prev_register));
3992
3993 if (!cache)
3994 {
3995 const unsigned int size = sizeof (struct arm_dwarf2_prev_register_cache);
3996 cache = ((arm_dwarf2_prev_register_cache *)
3997 dwarf2_frame_allocate_fn_data (this_frame, this_cache,
3998 arm_dwarf2_prev_register, size));
3999
4000 if (tdep->have_sec_ext)
4001 {
4002 cache->sp
4003 = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
4004
4005 cache->msp_s
4006 = get_frame_register_unsigned (this_frame,
4007 tdep->m_profile_msp_s_regnum);
4008 cache->msp_ns
4009 = get_frame_register_unsigned (this_frame,
4010 tdep->m_profile_msp_ns_regnum);
4011 cache->psp_s
4012 = get_frame_register_unsigned (this_frame,
4013 tdep->m_profile_psp_s_regnum);
4014 cache->psp_ns
4015 = get_frame_register_unsigned (this_frame,
4016 tdep->m_profile_psp_ns_regnum);
4017 }
4018 else if (tdep->is_m)
4019 {
4020 cache->sp
4021 = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
4022
4023 cache->msp
4024 = get_frame_register_unsigned (this_frame,
4025 tdep->m_profile_msp_regnum);
4026 cache->psp
4027 = get_frame_register_unsigned (this_frame,
4028 tdep->m_profile_psp_regnum);
4029 }
4030 }
4031
4032 if (regnum == ARM_PC_REGNUM)
4033 {
4034 /* The PC is normally copied from the return column, which
4035 describes saves of LR. However, that version may have an
4036 extra bit set to indicate Thumb state. The bit is not
4037 part of the PC. */
4038
4039 /* Record in the frame whether the return address was signed. */
4040 if (tdep->have_pacbti)
4041 {
4042 CORE_ADDR ra_auth_code
4043 = frame_unwind_register_unsigned (this_frame,
4044 tdep->pacbti_pseudo_base);
4045
4046 if (ra_auth_code != 0)
4047 set_frame_previous_pc_masked (this_frame);
4048 }
4049
4050 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
4051 return frame_unwind_got_constant (this_frame, regnum,
4052 arm_addr_bits_remove (gdbarch, lr));
4053 }
4054 else if (regnum == ARM_PS_REGNUM)
4055 {
4056 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
4057 cpsr = get_frame_register_unsigned (this_frame, regnum);
4058 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
4059 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr);
4060 return frame_unwind_got_constant (this_frame, regnum, cpsr);
4061 }
4062 else if (arm_is_alternative_sp_register (tdep, regnum))
4063 {
4064 /* Handle the alternative SP registers on Cortex-M. */
4065 bool override_with_sp_value = false;
4066 CORE_ADDR val;
4067
4068 if (tdep->have_sec_ext)
4069 {
4070 bool is_msp = (regnum == tdep->m_profile_msp_regnum)
4071 && (cache->msp_s == cache->sp || cache->msp_ns == cache->sp);
4072 bool is_msp_s = (regnum == tdep->m_profile_msp_s_regnum)
4073 && (cache->msp_s == cache->sp);
4074 bool is_msp_ns = (regnum == tdep->m_profile_msp_ns_regnum)
4075 && (cache->msp_ns == cache->sp);
4076 bool is_psp = (regnum == tdep->m_profile_psp_regnum)
4077 && (cache->psp_s == cache->sp || cache->psp_ns == cache->sp);
4078 bool is_psp_s = (regnum == tdep->m_profile_psp_s_regnum)
4079 && (cache->psp_s == cache->sp);
4080 bool is_psp_ns = (regnum == tdep->m_profile_psp_ns_regnum)
4081 && (cache->psp_ns == cache->sp);
4082
4083 override_with_sp_value = is_msp || is_msp_s || is_msp_ns
4084 || is_psp || is_psp_s || is_psp_ns;
4085
4086 }
4087 else if (tdep->is_m)
4088 {
4089 bool is_msp = (regnum == tdep->m_profile_msp_regnum)
4090 && (cache->sp == cache->msp);
4091 bool is_psp = (regnum == tdep->m_profile_psp_regnum)
4092 && (cache->sp == cache->psp);
4093
4094 override_with_sp_value = is_msp || is_psp;
4095 }
4096
4097 if (override_with_sp_value)
4098 {
4099 /* Use value of SP from previous frame. */
4100 frame_info_ptr prev_frame = get_prev_frame (this_frame);
4101 if (prev_frame)
4102 val = get_frame_register_unsigned (prev_frame, ARM_SP_REGNUM);
4103 else
4104 val = get_frame_base (this_frame);
4105 }
4106 else
4107 /* Use value for the register from previous frame. */
4108 val = get_frame_register_unsigned (this_frame, regnum);
4109
4110 return frame_unwind_got_constant (this_frame, regnum, val);
4111 }
4112
4113 internal_error (_("Unexpected register %d"), regnum);
4114}
4115
4116/* Implement the stack_frame_destroyed_p gdbarch method. */
4117
4118static int
4119thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4120{
4121 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4122 unsigned int insn, insn2;
4123 int found_return = 0, found_stack_adjust = 0;
4124 CORE_ADDR func_start, func_end;
4125 CORE_ADDR scan_pc;
4126 gdb_byte buf[4];
4127
4128 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
4129 return 0;
4130
4131 /* The epilogue is a sequence of instructions along the following lines:
4132
4133 - add stack frame size to SP or FP
4134 - [if frame pointer used] restore SP from FP
4135 - restore registers from SP [may include PC]
4136 - a return-type instruction [if PC wasn't already restored]
4137
4138 In a first pass, we scan forward from the current PC and verify the
4139 instructions we find as compatible with this sequence, ending in a
4140 return instruction.
4141
4142 However, this is not sufficient to distinguish indirect function calls
4143 within a function from indirect tail calls in the epilogue in some cases.
4144 Therefore, if we didn't already find any SP-changing instruction during
4145 forward scan, we add a backward scanning heuristic to ensure we actually
4146 are in the epilogue. */
4147
4148 scan_pc = pc;
4149 while (scan_pc < func_end && !found_return)
4150 {
4151 if (target_read_memory (scan_pc, buf, 2))
4152 break;
4153
4154 scan_pc += 2;
4155 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
4156
4157 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
4158 found_return = 1;
4159 else if (insn == 0x46f7) /* mov pc, lr */
4160 found_return = 1;
4161 else if (thumb_instruction_restores_sp (insn))
4162 {
4163 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4164 found_return = 1;
4165 }
4166 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4167 {
4168 if (target_read_memory (scan_pc, buf, 2))
4169 break;
4170
4171 scan_pc += 2;
4172 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
4173
4174 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
4175 {
4176 if (insn2 & 0x8000) /* <registers> include PC. */
4177 found_return = 1;
4178 }
4179 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
4180 && (insn2 & 0x0fff) == 0x0b04)
4181 {
4182 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
4183 found_return = 1;
4184 }
4185 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
4186 && (insn2 & 0x0e00) == 0x0a00)
4187 ;
4188 else
4189 break;
4190 }
4191 else
4192 break;
4193 }
4194
4195 if (!found_return)
4196 return 0;
4197
4198 /* Since any instruction in the epilogue sequence, with the possible
4199 exception of return itself, updates the stack pointer, we need to
4200 scan backwards for at most one instruction. Try either a 16-bit or
4201 a 32-bit instruction. This is just a heuristic, so we do not worry
4202 too much about false positives. */
4203
4204 if (pc - 4 < func_start)
4205 return 0;
4206 if (target_read_memory (pc - 4, buf, 4))
4207 return 0;
4208
4209 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
4210 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
4211
4212 if (thumb_instruction_restores_sp (insn2))
4213 found_stack_adjust = 1;
4214 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
4215 found_stack_adjust = 1;
4216 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
4217 && (insn2 & 0x0fff) == 0x0b04)
4218 found_stack_adjust = 1;
4219 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
4220 && (insn2 & 0x0e00) == 0x0a00)
4221 found_stack_adjust = 1;
4222
4223 return found_stack_adjust;
4224}
4225
4226static int
4227arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
4228{
4229 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4230 unsigned int insn;
4231 int found_return;
4232 CORE_ADDR func_start, func_end;
4233
4234 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
4235 return 0;
4236
4237 /* We are in the epilogue if the previous instruction was a stack
4238 adjustment and the next instruction is a possible return (bx, mov
4239 pc, or pop). We could have to scan backwards to find the stack
4240 adjustment, or forwards to find the return, but this is a decent
4241 approximation. First scan forwards. */
4242
4243 found_return = 0;
4244 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4245 if (bits (insn, 28, 31) != INST_NV)
4246 {
4247 if ((insn & 0x0ffffff0) == 0x012fff10)
4248 /* BX. */
4249 found_return = 1;
4250 else if ((insn & 0x0ffffff0) == 0x01a0f000)
4251 /* MOV PC. */
4252 found_return = 1;
4253 else if ((insn & 0x0fff0000) == 0x08bd0000
4254 && (insn & 0x0000c000) != 0)
4255 /* POP (LDMIA), including PC or LR. */
4256 found_return = 1;
4257 }
4258
4259 if (!found_return)
4260 return 0;
4261
4262 /* Scan backwards. This is just a heuristic, so do not worry about
4263 false positives from mode changes. */
4264
4265 if (pc < func_start + 4)
4266 return 0;
4267
4268 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
4269 if (arm_instruction_restores_sp (insn))
4270 return 1;
4271
4272 return 0;
4273}
4274
4275/* Implement the stack_frame_destroyed_p gdbarch method. */
4276
4277static int
4278arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4279{
4280 if (arm_pc_is_thumb (gdbarch, pc))
4281 return thumb_stack_frame_destroyed_p (gdbarch, pc);
4282 else
4283 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
4284}
4285
4286/* When arguments must be pushed onto the stack, they go on in reverse
4287 order. The code below implements a FILO (stack) to do this. */
4288
4289struct arm_stack_item
4290{
4291 int len;
4292 struct arm_stack_item *prev;
4293 gdb_byte *data;
4294};
4295
4296static struct arm_stack_item *
4297push_stack_item (struct arm_stack_item *prev, const gdb_byte *contents,
4298 int len)
4299{
4300 struct arm_stack_item *si;
4301 si = XNEW (struct arm_stack_item);
4302 si->data = (gdb_byte *) xmalloc (len);
4303 si->len = len;
4304 si->prev = prev;
4305 memcpy (si->data, contents, len);
4306 return si;
4307}
4308
4309static struct arm_stack_item *
4310pop_stack_item (struct arm_stack_item *si)
4311{
4312 struct arm_stack_item *dead = si;
4313 si = si->prev;
4314 xfree (dead->data);
4315 xfree (dead);
4316 return si;
4317}
4318
4319/* Implement the gdbarch type alignment method, overrides the generic
4320 alignment algorithm for anything that is arm specific. */
4321
4322static ULONGEST
4323arm_type_align (gdbarch *gdbarch, struct type *t)
4324{
4325 t = check_typedef (t);
4326 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
4327 {
4328 /* Use the natural alignment for vector types (the same for
4329 scalar type), but the maximum alignment is 64-bit. */
4330 if (t->length () > 8)
4331 return 8;
4332 else
4333 return t->length ();
4334 }
4335
4336 /* Allow the common code to calculate the alignment. */
4337 return 0;
4338}
4339
4340/* Possible base types for a candidate for passing and returning in
4341 VFP registers. */
4342
4343enum arm_vfp_cprc_base_type
4344{
4345 VFP_CPRC_UNKNOWN,
4346 VFP_CPRC_SINGLE,
4347 VFP_CPRC_DOUBLE,
4348 VFP_CPRC_VEC64,
4349 VFP_CPRC_VEC128
4350};
4351
4352/* The length of one element of base type B. */
4353
4354static unsigned
4355arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
4356{
4357 switch (b)
4358 {
4359 case VFP_CPRC_SINGLE:
4360 return 4;
4361 case VFP_CPRC_DOUBLE:
4362 return 8;
4363 case VFP_CPRC_VEC64:
4364 return 8;
4365 case VFP_CPRC_VEC128:
4366 return 16;
4367 default:
4368 internal_error (_("Invalid VFP CPRC type: %d."),
4369 (int) b);
4370 }
4371}
4372
4373/* The character ('s', 'd' or 'q') for the type of VFP register used
4374 for passing base type B. */
4375
4376static int
4377arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
4378{
4379 switch (b)
4380 {
4381 case VFP_CPRC_SINGLE:
4382 return 's';
4383 case VFP_CPRC_DOUBLE:
4384 return 'd';
4385 case VFP_CPRC_VEC64:
4386 return 'd';
4387 case VFP_CPRC_VEC128:
4388 return 'q';
4389 default:
4390 internal_error (_("Invalid VFP CPRC type: %d."),
4391 (int) b);
4392 }
4393}
4394
4395/* Determine whether T may be part of a candidate for passing and
4396 returning in VFP registers, ignoring the limit on the total number
4397 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
4398 classification of the first valid component found; if it is not
4399 VFP_CPRC_UNKNOWN, all components must have the same classification
4400 as *BASE_TYPE. If it is found that T contains a type not permitted
4401 for passing and returning in VFP registers, a type differently
4402 classified from *BASE_TYPE, or two types differently classified
4403 from each other, return -1, otherwise return the total number of
4404 base-type elements found (possibly 0 in an empty structure or
4405 array). Vector types are not currently supported, matching the
4406 generic AAPCS support. */
4407
4408static int
4409arm_vfp_cprc_sub_candidate (struct type *t,
4410 enum arm_vfp_cprc_base_type *base_type)
4411{
4412 t = check_typedef (t);
4413 switch (t->code ())
4414 {
4415 case TYPE_CODE_FLT:
4416 switch (t->length ())
4417 {
4418 case 4:
4419 if (*base_type == VFP_CPRC_UNKNOWN)
4420 *base_type = VFP_CPRC_SINGLE;
4421 else if (*base_type != VFP_CPRC_SINGLE)
4422 return -1;
4423 return 1;
4424
4425 case 8:
4426 if (*base_type == VFP_CPRC_UNKNOWN)
4427 *base_type = VFP_CPRC_DOUBLE;
4428 else if (*base_type != VFP_CPRC_DOUBLE)
4429 return -1;
4430 return 1;
4431
4432 default:
4433 return -1;
4434 }
4435 break;
4436
4437 case TYPE_CODE_COMPLEX:
4438 /* Arguments of complex T where T is one of the types float or
4439 double get treated as if they are implemented as:
4440
4441 struct complexT
4442 {
4443 T real;
4444 T imag;
4445 };
4446
4447 */
4448 switch (t->length ())
4449 {
4450 case 8:
4451 if (*base_type == VFP_CPRC_UNKNOWN)
4452 *base_type = VFP_CPRC_SINGLE;
4453 else if (*base_type != VFP_CPRC_SINGLE)
4454 return -1;
4455 return 2;
4456
4457 case 16:
4458 if (*base_type == VFP_CPRC_UNKNOWN)
4459 *base_type = VFP_CPRC_DOUBLE;
4460 else if (*base_type != VFP_CPRC_DOUBLE)
4461 return -1;
4462 return 2;
4463
4464 default:
4465 return -1;
4466 }
4467 break;
4468
4469 case TYPE_CODE_ARRAY:
4470 {
4471 if (t->is_vector ())
4472 {
4473 /* A 64-bit or 128-bit containerized vector type are VFP
4474 CPRCs. */
4475 switch (t->length ())
4476 {
4477 case 8:
4478 if (*base_type == VFP_CPRC_UNKNOWN)
4479 *base_type = VFP_CPRC_VEC64;
4480 return 1;
4481 case 16:
4482 if (*base_type == VFP_CPRC_UNKNOWN)
4483 *base_type = VFP_CPRC_VEC128;
4484 return 1;
4485 default:
4486 return -1;
4487 }
4488 }
4489 else
4490 {
4491 int count;
4492 unsigned unitlen;
4493
4494 count = arm_vfp_cprc_sub_candidate (t->target_type (),
4495 base_type);
4496 if (count == -1)
4497 return -1;
4498 if (t->length () == 0)
4499 {
4500 gdb_assert (count == 0);
4501 return 0;
4502 }
4503 else if (count == 0)
4504 return -1;
4505 unitlen = arm_vfp_cprc_unit_length (*base_type);
4506 gdb_assert ((t->length () % unitlen) == 0);
4507 return t->length () / unitlen;
4508 }
4509 }
4510 break;
4511
4512 case TYPE_CODE_STRUCT:
4513 {
4514 int count = 0;
4515 unsigned unitlen;
4516 int i;
4517 for (i = 0; i < t->num_fields (); i++)
4518 {
4519 int sub_count = 0;
4520
4521 if (!field_is_static (&t->field (i)))
4522 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4523 base_type);
4524 if (sub_count == -1)
4525 return -1;
4526 count += sub_count;
4527 }
4528 if (t->length () == 0)
4529 {
4530 gdb_assert (count == 0);
4531 return 0;
4532 }
4533 else if (count == 0)
4534 return -1;
4535 unitlen = arm_vfp_cprc_unit_length (*base_type);
4536 if (t->length () != unitlen * count)
4537 return -1;
4538 return count;
4539 }
4540
4541 case TYPE_CODE_UNION:
4542 {
4543 int count = 0;
4544 unsigned unitlen;
4545 int i;
4546 for (i = 0; i < t->num_fields (); i++)
4547 {
4548 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4549 base_type);
4550 if (sub_count == -1)
4551 return -1;
4552 count = (count > sub_count ? count : sub_count);
4553 }
4554 if (t->length () == 0)
4555 {
4556 gdb_assert (count == 0);
4557 return 0;
4558 }
4559 else if (count == 0)
4560 return -1;
4561 unitlen = arm_vfp_cprc_unit_length (*base_type);
4562 if (t->length () != unitlen * count)
4563 return -1;
4564 return count;
4565 }
4566
4567 default:
4568 break;
4569 }
4570
4571 return -1;
4572}
4573
4574/* Determine whether T is a VFP co-processor register candidate (CPRC)
4575 if passed to or returned from a non-variadic function with the VFP
4576 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
4577 *BASE_TYPE to the base type for T and *COUNT to the number of
4578 elements of that base type before returning. */
4579
4580static int
4581arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
4582 int *count)
4583{
4584 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
4585 int c = arm_vfp_cprc_sub_candidate (t, &b);
4586 if (c <= 0 || c > 4)
4587 return 0;
4588 *base_type = b;
4589 *count = c;
4590 return 1;
4591}
4592
4593/* Return 1 if the VFP ABI should be used for passing arguments to and
4594 returning values from a function of type FUNC_TYPE, 0
4595 otherwise. */
4596
4597static int
4598arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
4599{
4600 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4601
4602 /* Variadic functions always use the base ABI. Assume that functions
4603 without debug info are not variadic. */
4604 if (func_type && check_typedef (func_type)->has_varargs ())
4605 return 0;
4606
4607 /* The VFP ABI is only supported as a variant of AAPCS. */
4608 if (tdep->arm_abi != ARM_ABI_AAPCS)
4609 return 0;
4610
4611 return tdep->fp_model == ARM_FLOAT_VFP;
4612}
4613
4614/* We currently only support passing parameters in integer registers, which
4615 conforms with GCC's default model, and VFP argument passing following
4616 the VFP variant of AAPCS. Several other variants exist and
4617 we should probably support some of them based on the selected ABI. */
4618
4619static CORE_ADDR
4620arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
4621 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
4622 struct value **args, CORE_ADDR sp,
4623 function_call_return_method return_method,
4624 CORE_ADDR struct_addr)
4625{
4626 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4627 int argnum;
4628 int argreg;
4629 int nstack;
4630 struct arm_stack_item *si = NULL;
4631 int use_vfp_abi;
4632 struct type *ftype;
4633 unsigned vfp_regs_free = (1 << 16) - 1;
4634 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4635
4636 /* Determine the type of this function and whether the VFP ABI
4637 applies. */
4638 ftype = check_typedef (function->type ());
4639 if (ftype->code () == TYPE_CODE_PTR)
4640 ftype = check_typedef (ftype->target_type ());
4641 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
4642
4643 /* Set the return address. For the ARM, the return breakpoint is
4644 always at BP_ADDR. */
4645 if (arm_pc_is_thumb (gdbarch, bp_addr))
4646 bp_addr |= 1;
4647 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
4648
4649 /* Walk through the list of args and determine how large a temporary
4650 stack is required. Need to take care here as structs may be
4651 passed on the stack, and we have to push them. */
4652 nstack = 0;
4653
4654 argreg = ARM_A1_REGNUM;
4655 nstack = 0;
4656
4657 /* The struct_return pointer occupies the first parameter
4658 passing register. */
4659 if (return_method == return_method_struct)
4660 {
4661 arm_debug_printf ("struct return in %s = %s",
4662 gdbarch_register_name (gdbarch, argreg),
4663 paddress (gdbarch, struct_addr));
4664
4665 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
4666 argreg++;
4667 }
4668
4669 for (argnum = 0; argnum < nargs; argnum++)
4670 {
4671 int len;
4672 struct type *arg_type;
4673 struct type *target_type;
4674 enum type_code typecode;
4675 const bfd_byte *val;
4676 int align;
4677 enum arm_vfp_cprc_base_type vfp_base_type;
4678 int vfp_base_count;
4679 int may_use_core_reg = 1;
4680
4681 arg_type = check_typedef (args[argnum]->type ());
4682 len = arg_type->length ();
4683 target_type = arg_type->target_type ();
4684 typecode = arg_type->code ();
4685 val = value_contents (args[argnum]).data ();
4686
4687 align = type_align (arg_type);
4688 /* Round alignment up to a whole number of words. */
4689 align = (align + ARM_INT_REGISTER_SIZE - 1)
4690 & ~(ARM_INT_REGISTER_SIZE - 1);
4691 /* Different ABIs have different maximum alignments. */
4692 if (tdep->arm_abi == ARM_ABI_APCS)
4693 {
4694 /* The APCS ABI only requires word alignment. */
4695 align = ARM_INT_REGISTER_SIZE;
4696 }
4697 else
4698 {
4699 /* The AAPCS requires at most doubleword alignment. */
4700 if (align > ARM_INT_REGISTER_SIZE * 2)
4701 align = ARM_INT_REGISTER_SIZE * 2;
4702 }
4703
4704 if (use_vfp_abi
4705 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
4706 &vfp_base_count))
4707 {
4708 int regno;
4709 int unit_length;
4710 int shift;
4711 unsigned mask;
4712
4713 /* Because this is a CPRC it cannot go in a core register or
4714 cause a core register to be skipped for alignment.
4715 Either it goes in VFP registers and the rest of this loop
4716 iteration is skipped for this argument, or it goes on the
4717 stack (and the stack alignment code is correct for this
4718 case). */
4719 may_use_core_reg = 0;
4720
4721 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
4722 shift = unit_length / 4;
4723 mask = (1 << (shift * vfp_base_count)) - 1;
4724 for (regno = 0; regno < 16; regno += shift)
4725 if (((vfp_regs_free >> regno) & mask) == mask)
4726 break;
4727
4728 if (regno < 16)
4729 {
4730 int reg_char;
4731 int reg_scaled;
4732 int i;
4733
4734 vfp_regs_free &= ~(mask << regno);
4735 reg_scaled = regno / shift;
4736 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
4737 for (i = 0; i < vfp_base_count; i++)
4738 {
4739 char name_buf[4];
4740 int regnum;
4741 if (reg_char == 'q')
4742 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
4743 val + i * unit_length);
4744 else
4745 {
4746 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
4747 reg_char, reg_scaled + i);
4748 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
4749 strlen (name_buf));
4750 regcache->cooked_write (regnum, val + i * unit_length);
4751 }
4752 }
4753 continue;
4754 }
4755 else
4756 {
4757 /* This CPRC could not go in VFP registers, so all VFP
4758 registers are now marked as used. */
4759 vfp_regs_free = 0;
4760 }
4761 }
4762
4763 /* Push stack padding for doubleword alignment. */
4764 if (nstack & (align - 1))
4765 {
4766 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
4767 nstack += ARM_INT_REGISTER_SIZE;
4768 }
4769
4770 /* Doubleword aligned quantities must go in even register pairs. */
4771 if (may_use_core_reg
4772 && argreg <= ARM_LAST_ARG_REGNUM
4773 && align > ARM_INT_REGISTER_SIZE
4774 && argreg & 1)
4775 argreg++;
4776
4777 /* If the argument is a pointer to a function, and it is a
4778 Thumb function, create a LOCAL copy of the value and set
4779 the THUMB bit in it. */
4780 if (TYPE_CODE_PTR == typecode
4781 && target_type != NULL
4782 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
4783 {
4784 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
4785 if (arm_pc_is_thumb (gdbarch, regval))
4786 {
4787 bfd_byte *copy = (bfd_byte *) alloca (len);
4788 store_unsigned_integer (copy, len, byte_order,
4789 MAKE_THUMB_ADDR (regval));
4790 val = copy;
4791 }
4792 }
4793
4794 /* Copy the argument to general registers or the stack in
4795 register-sized pieces. Large arguments are split between
4796 registers and stack. */
4797 while (len > 0)
4798 {
4799 int partial_len = len < ARM_INT_REGISTER_SIZE
4800 ? len : ARM_INT_REGISTER_SIZE;
4801 CORE_ADDR regval
4802 = extract_unsigned_integer (val, partial_len, byte_order);
4803
4804 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
4805 {
4806 /* The argument is being passed in a general purpose
4807 register. */
4808 if (byte_order == BFD_ENDIAN_BIG)
4809 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
4810
4811 arm_debug_printf ("arg %d in %s = 0x%s", argnum,
4812 gdbarch_register_name (gdbarch, argreg),
4813 phex (regval, ARM_INT_REGISTER_SIZE));
4814
4815 regcache_cooked_write_unsigned (regcache, argreg, regval);
4816 argreg++;
4817 }
4818 else
4819 {
4820 gdb_byte buf[ARM_INT_REGISTER_SIZE];
4821
4822 memset (buf, 0, sizeof (buf));
4823 store_unsigned_integer (buf, partial_len, byte_order, regval);
4824
4825 /* Push the arguments onto the stack. */
4826 arm_debug_printf ("arg %d @ sp + %d", argnum, nstack);
4827 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
4828 nstack += ARM_INT_REGISTER_SIZE;
4829 }
4830
4831 len -= partial_len;
4832 val += partial_len;
4833 }
4834 }
4835 /* If we have an odd number of words to push, then decrement the stack
4836 by one word now, so first stack argument will be dword aligned. */
4837 if (nstack & 4)
4838 sp -= 4;
4839
4840 while (si)
4841 {
4842 sp -= si->len;
4843 write_memory (sp, si->data, si->len);
4844 si = pop_stack_item (si);
4845 }
4846
4847 /* Finally, update teh SP register. */
4848 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
4849
4850 return sp;
4851}
4852
4853
4854/* Always align the frame to an 8-byte boundary. This is required on
4855 some platforms and harmless on the rest. */
4856
4857static CORE_ADDR
4858arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
4859{
4860 /* Align the stack to eight bytes. */
4861 return sp & ~ (CORE_ADDR) 7;
4862}
4863
4864static void
4865print_fpu_flags (struct ui_file *file, int flags)
4866{
4867 if (flags & (1 << 0))
4868 gdb_puts ("IVO ", file);
4869 if (flags & (1 << 1))
4870 gdb_puts ("DVZ ", file);
4871 if (flags & (1 << 2))
4872 gdb_puts ("OFL ", file);
4873 if (flags & (1 << 3))
4874 gdb_puts ("UFL ", file);
4875 if (flags & (1 << 4))
4876 gdb_puts ("INX ", file);
4877 gdb_putc ('\n', file);
4878}
4879
4880/* Print interesting information about the floating point processor
4881 (if present) or emulator. */
4882static void
4883arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4884 frame_info_ptr frame, const char *args)
4885{
4886 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4887 int type;
4888
4889 type = (status >> 24) & 127;
4890 if (status & (1 << 31))
4891 gdb_printf (file, _("Hardware FPU type %d\n"), type);
4892 else
4893 gdb_printf (file, _("Software FPU type %d\n"), type);
4894 /* i18n: [floating point unit] mask */
4895 gdb_puts (_("mask: "), file);
4896 print_fpu_flags (file, status >> 16);
4897 /* i18n: [floating point unit] flags */
4898 gdb_puts (_("flags: "), file);
4899 print_fpu_flags (file, status);
4900}
4901
4902/* Construct the ARM extended floating point type. */
4903static struct type *
4904arm_ext_type (struct gdbarch *gdbarch)
4905{
4906 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4907
4908 if (!tdep->arm_ext_type)
4909 tdep->arm_ext_type
4910 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4911 floatformats_arm_ext);
4912
4913 return tdep->arm_ext_type;
4914}
4915
4916static struct type *
4917arm_neon_double_type (struct gdbarch *gdbarch)
4918{
4919 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4920
4921 if (tdep->neon_double_type == NULL)
4922 {
4923 struct type *t, *elem;
4924
4925 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4926 TYPE_CODE_UNION);
4927 elem = builtin_type (gdbarch)->builtin_uint8;
4928 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4929 elem = builtin_type (gdbarch)->builtin_uint16;
4930 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4931 elem = builtin_type (gdbarch)->builtin_uint32;
4932 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4933 elem = builtin_type (gdbarch)->builtin_uint64;
4934 append_composite_type_field (t, "u64", elem);
4935 elem = builtin_type (gdbarch)->builtin_float;
4936 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4937 elem = builtin_type (gdbarch)->builtin_double;
4938 append_composite_type_field (t, "f64", elem);
4939
4940 t->set_is_vector (true);
4941 t->set_name ("neon_d");
4942 tdep->neon_double_type = t;
4943 }
4944
4945 return tdep->neon_double_type;
4946}
4947
4948/* FIXME: The vector types are not correctly ordered on big-endian
4949 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4950 bits of d0 - regardless of what unit size is being held in d0. So
4951 the offset of the first uint8 in d0 is 7, but the offset of the
4952 first float is 4. This code works as-is for little-endian
4953 targets. */
4954
4955static struct type *
4956arm_neon_quad_type (struct gdbarch *gdbarch)
4957{
4958 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4959
4960 if (tdep->neon_quad_type == NULL)
4961 {
4962 struct type *t, *elem;
4963
4964 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4965 TYPE_CODE_UNION);
4966 elem = builtin_type (gdbarch)->builtin_uint8;
4967 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4968 elem = builtin_type (gdbarch)->builtin_uint16;
4969 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4970 elem = builtin_type (gdbarch)->builtin_uint32;
4971 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4972 elem = builtin_type (gdbarch)->builtin_uint64;
4973 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4974 elem = builtin_type (gdbarch)->builtin_float;
4975 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4976 elem = builtin_type (gdbarch)->builtin_double;
4977 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4978
4979 t->set_is_vector (true);
4980 t->set_name ("neon_q");
4981 tdep->neon_quad_type = t;
4982 }
4983
4984 return tdep->neon_quad_type;
4985}
4986
4987/* Return true if REGNUM is a Q pseudo register. Return false
4988 otherwise.
4989
4990 REGNUM is the raw register number and not a pseudo-relative register
4991 number. */
4992
4993static bool
4994is_q_pseudo (struct gdbarch *gdbarch, int regnum)
4995{
4996 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4997
4998 /* Q pseudo registers are available for both NEON (Q0~Q15) and
4999 MVE (Q0~Q7) features. */
5000 if (tdep->have_q_pseudos
5001 && regnum >= tdep->q_pseudo_base
5002 && regnum < (tdep->q_pseudo_base + tdep->q_pseudo_count))
5003 return true;
5004
5005 return false;
5006}
5007
5008/* Return true if REGNUM is a VFP S pseudo register. Return false
5009 otherwise.
5010
5011 REGNUM is the raw register number and not a pseudo-relative register
5012 number. */
5013
5014static bool
5015is_s_pseudo (struct gdbarch *gdbarch, int regnum)
5016{
5017 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5018
5019 if (tdep->have_s_pseudos
5020 && regnum >= tdep->s_pseudo_base
5021 && regnum < (tdep->s_pseudo_base + tdep->s_pseudo_count))
5022 return true;
5023
5024 return false;
5025}
5026
5027/* Return true if REGNUM is a MVE pseudo register (P0). Return false
5028 otherwise.
5029
5030 REGNUM is the raw register number and not a pseudo-relative register
5031 number. */
5032
5033static bool
5034is_mve_pseudo (struct gdbarch *gdbarch, int regnum)
5035{
5036 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5037
5038 if (tdep->have_mve
5039 && regnum >= tdep->mve_pseudo_base
5040 && regnum < tdep->mve_pseudo_base + tdep->mve_pseudo_count)
5041 return true;
5042
5043 return false;
5044}
5045
5046/* Return true if REGNUM is a PACBTI pseudo register (ra_auth_code). Return
5047 false otherwise.
5048
5049 REGNUM is the raw register number and not a pseudo-relative register
5050 number. */
5051
5052static bool
5053is_pacbti_pseudo (struct gdbarch *gdbarch, int regnum)
5054{
5055 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5056
5057 if (tdep->have_pacbti
5058 && regnum >= tdep->pacbti_pseudo_base
5059 && regnum < tdep->pacbti_pseudo_base + tdep->pacbti_pseudo_count)
5060 return true;
5061
5062 return false;
5063}
5064
5065/* Return the GDB type object for the "standard" data type of data in
5066 register N. */
5067
5068static struct type *
5069arm_register_type (struct gdbarch *gdbarch, int regnum)
5070{
5071 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5072
5073 if (is_s_pseudo (gdbarch, regnum))
5074 return builtin_type (gdbarch)->builtin_float;
5075
5076 if (is_q_pseudo (gdbarch, regnum))
5077 return arm_neon_quad_type (gdbarch);
5078
5079 if (is_mve_pseudo (gdbarch, regnum))
5080 return builtin_type (gdbarch)->builtin_int16;
5081
5082 if (is_pacbti_pseudo (gdbarch, regnum))
5083 return builtin_type (gdbarch)->builtin_uint32;
5084
5085 /* If the target description has register information, we are only
5086 in this function so that we can override the types of
5087 double-precision registers for NEON. */
5088 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
5089 {
5090 struct type *t = tdesc_register_type (gdbarch, regnum);
5091
5092 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
5093 && t->code () == TYPE_CODE_FLT
5094 && tdep->have_neon)
5095 return arm_neon_double_type (gdbarch);
5096 else
5097 return t;
5098 }
5099
5100 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
5101 {
5102 if (!tdep->have_fpa_registers)
5103 return builtin_type (gdbarch)->builtin_void;
5104
5105 return arm_ext_type (gdbarch);
5106 }
5107 else if (regnum == ARM_SP_REGNUM)
5108 return builtin_type (gdbarch)->builtin_data_ptr;
5109 else if (regnum == ARM_PC_REGNUM)
5110 return builtin_type (gdbarch)->builtin_func_ptr;
5111 else if (regnum >= ARRAY_SIZE (arm_register_names))
5112 /* These registers are only supported on targets which supply
5113 an XML description. */
5114 return builtin_type (gdbarch)->builtin_int0;
5115 else
5116 return builtin_type (gdbarch)->builtin_uint32;
5117}
5118
5119/* Map a DWARF register REGNUM onto the appropriate GDB register
5120 number. */
5121
5122static int
5123arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
5124{
5125 /* Core integer regs. */
5126 if (reg >= 0 && reg <= 15)
5127 return reg;
5128
5129 /* Legacy FPA encoding. These were once used in a way which
5130 overlapped with VFP register numbering, so their use is
5131 discouraged, but GDB doesn't support the ARM toolchain
5132 which used them for VFP. */
5133 if (reg >= 16 && reg <= 23)
5134 return ARM_F0_REGNUM + reg - 16;
5135
5136 /* New assignments for the FPA registers. */
5137 if (reg >= 96 && reg <= 103)
5138 return ARM_F0_REGNUM + reg - 96;
5139
5140 /* WMMX register assignments. */
5141 if (reg >= 104 && reg <= 111)
5142 return ARM_WCGR0_REGNUM + reg - 104;
5143
5144 if (reg >= 112 && reg <= 127)
5145 return ARM_WR0_REGNUM + reg - 112;
5146
5147 /* PACBTI register containing the Pointer Authentication Code. */
5148 if (reg == ARM_DWARF_RA_AUTH_CODE)
5149 {
5150 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5151
5152 if (tdep->have_pacbti)
5153 return tdep->pacbti_pseudo_base;
5154
5155 return -1;
5156 }
5157
5158 if (reg >= 192 && reg <= 199)
5159 return ARM_WC0_REGNUM + reg - 192;
5160
5161 /* VFP v2 registers. A double precision value is actually
5162 in d1 rather than s2, but the ABI only defines numbering
5163 for the single precision registers. This will "just work"
5164 in GDB for little endian targets (we'll read eight bytes,
5165 starting in s0 and then progressing to s1), but will be
5166 reversed on big endian targets with VFP. This won't
5167 be a problem for the new Neon quad registers; you're supposed
5168 to use DW_OP_piece for those. */
5169 if (reg >= 64 && reg <= 95)
5170 {
5171 char name_buf[4];
5172
5173 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
5174 return user_reg_map_name_to_regnum (gdbarch, name_buf,
5175 strlen (name_buf));
5176 }
5177
5178 /* VFP v3 / Neon registers. This range is also used for VFP v2
5179 registers, except that it now describes d0 instead of s0. */
5180 if (reg >= 256 && reg <= 287)
5181 {
5182 char name_buf[4];
5183
5184 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
5185 return user_reg_map_name_to_regnum (gdbarch, name_buf,
5186 strlen (name_buf));
5187 }
5188
5189 return -1;
5190}
5191
5192/* Map GDB internal REGNUM onto the Arm simulator register numbers. */
5193static int
5194arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
5195{
5196 int reg = regnum;
5197 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
5198
5199 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
5200 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
5201
5202 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
5203 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
5204
5205 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
5206 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
5207
5208 if (reg < NUM_GREGS)
5209 return SIM_ARM_R0_REGNUM + reg;
5210 reg -= NUM_GREGS;
5211
5212 if (reg < NUM_FREGS)
5213 return SIM_ARM_FP0_REGNUM + reg;
5214 reg -= NUM_FREGS;
5215
5216 if (reg < NUM_SREGS)
5217 return SIM_ARM_FPS_REGNUM + reg;
5218 reg -= NUM_SREGS;
5219
5220 internal_error (_("Bad REGNUM %d"), regnum);
5221}
5222
5223static const unsigned char op_lit0 = DW_OP_lit0;
5224
5225static void
5226arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
5227 struct dwarf2_frame_state_reg *reg,
5228 frame_info_ptr this_frame)
5229{
5230 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5231
5232 if (is_pacbti_pseudo (gdbarch, regnum))
5233 {
5234 /* Initialize RA_AUTH_CODE to zero. */
5235 reg->how = DWARF2_FRAME_REG_SAVED_VAL_EXP;
5236 reg->loc.exp.start = &op_lit0;
5237 reg->loc.exp.len = 1;
5238 return;
5239 }
5240
5241 if (regnum == ARM_PC_REGNUM || regnum == ARM_PS_REGNUM)
5242 {
5243 reg->how = DWARF2_FRAME_REG_FN;
5244 reg->loc.fn = arm_dwarf2_prev_register;
5245 }
5246 else if (regnum == ARM_SP_REGNUM)
5247 reg->how = DWARF2_FRAME_REG_CFA;
5248 else if (arm_is_alternative_sp_register (tdep, regnum))
5249 {
5250 /* Handle the alternative SP registers on Cortex-M. */
5251 reg->how = DWARF2_FRAME_REG_FN;
5252 reg->loc.fn = arm_dwarf2_prev_register;
5253 }
5254}
5255
5256/* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5257 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5258 NULL if an error occurs. BUF is freed. */
5259
5260static gdb_byte *
5261extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5262 int old_len, int new_len)
5263{
5264 gdb_byte *new_buf;
5265 int bytes_to_read = new_len - old_len;
5266
5267 new_buf = (gdb_byte *) xmalloc (new_len);
5268 memcpy (new_buf + bytes_to_read, buf, old_len);
5269 xfree (buf);
5270 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
5271 {
5272 xfree (new_buf);
5273 return NULL;
5274 }
5275 return new_buf;
5276}
5277
5278/* An IT block is at most the 2-byte IT instruction followed by
5279 four 4-byte instructions. The furthest back we must search to
5280 find an IT block that affects the current instruction is thus
5281 2 + 3 * 4 == 14 bytes. */
5282#define MAX_IT_BLOCK_PREFIX 14
5283
5284/* Use a quick scan if there are more than this many bytes of
5285 code. */
5286#define IT_SCAN_THRESHOLD 32
5287
5288/* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5289 A breakpoint in an IT block may not be hit, depending on the
5290 condition flags. */
5291static CORE_ADDR
5292arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5293{
5294 gdb_byte *buf;
5295 char map_type;
5296 CORE_ADDR boundary, func_start;
5297 int buf_len;
5298 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5299 int i, any, last_it, last_it_count;
5300 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5301
5302 /* If we are using BKPT breakpoints, none of this is necessary. */
5303 if (tdep->thumb2_breakpoint == NULL)
5304 return bpaddr;
5305
5306 /* ARM mode does not have this problem. */
5307 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5308 return bpaddr;
5309
5310 /* We are setting a breakpoint in Thumb code that could potentially
5311 contain an IT block. The first step is to find how much Thumb
5312 code there is; we do not need to read outside of known Thumb
5313 sequences. */
5314 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5315 if (map_type == 0)
5316 /* Thumb-2 code must have mapping symbols to have a chance. */
5317 return bpaddr;
5318
5319 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5320
5321 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5322 && func_start > boundary)
5323 boundary = func_start;
5324
5325 /* Search for a candidate IT instruction. We have to do some fancy
5326 footwork to distinguish a real IT instruction from the second
5327 half of a 32-bit instruction, but there is no need for that if
5328 there's no candidate. */
5329 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
5330 if (buf_len == 0)
5331 /* No room for an IT instruction. */
5332 return bpaddr;
5333
5334 buf = (gdb_byte *) xmalloc (buf_len);
5335 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
5336 return bpaddr;
5337 any = 0;
5338 for (i = 0; i < buf_len; i += 2)
5339 {
5340 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5341 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5342 {
5343 any = 1;
5344 break;
5345 }
5346 }
5347
5348 if (any == 0)
5349 {
5350 xfree (buf);
5351 return bpaddr;
5352 }
5353
5354 /* OK, the code bytes before this instruction contain at least one
5355 halfword which resembles an IT instruction. We know that it's
5356 Thumb code, but there are still two possibilities. Either the
5357 halfword really is an IT instruction, or it is the second half of
5358 a 32-bit Thumb instruction. The only way we can tell is to
5359 scan forwards from a known instruction boundary. */
5360 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5361 {
5362 int definite;
5363
5364 /* There's a lot of code before this instruction. Start with an
5365 optimistic search; it's easy to recognize halfwords that can
5366 not be the start of a 32-bit instruction, and use that to
5367 lock on to the instruction boundaries. */
5368 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5369 if (buf == NULL)
5370 return bpaddr;
5371 buf_len = IT_SCAN_THRESHOLD;
5372
5373 definite = 0;
5374 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5375 {
5376 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5377 if (thumb_insn_size (inst1) == 2)
5378 {
5379 definite = 1;
5380 break;
5381 }
5382 }
5383
5384 /* At this point, if DEFINITE, BUF[I] is the first place we
5385 are sure that we know the instruction boundaries, and it is far
5386 enough from BPADDR that we could not miss an IT instruction
5387 affecting BPADDR. If ! DEFINITE, give up - start from a
5388 known boundary. */
5389 if (! definite)
5390 {
5391 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5392 bpaddr - boundary);
5393 if (buf == NULL)
5394 return bpaddr;
5395 buf_len = bpaddr - boundary;
5396 i = 0;
5397 }
5398 }
5399 else
5400 {
5401 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5402 if (buf == NULL)
5403 return bpaddr;
5404 buf_len = bpaddr - boundary;
5405 i = 0;
5406 }
5407
5408 /* Scan forwards. Find the last IT instruction before BPADDR. */
5409 last_it = -1;
5410 last_it_count = 0;
5411 while (i < buf_len)
5412 {
5413 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5414 last_it_count--;
5415 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5416 {
5417 last_it = i;
5418 if (inst1 & 0x0001)
5419 last_it_count = 4;
5420 else if (inst1 & 0x0002)
5421 last_it_count = 3;
5422 else if (inst1 & 0x0004)
5423 last_it_count = 2;
5424 else
5425 last_it_count = 1;
5426 }
5427 i += thumb_insn_size (inst1);
5428 }
5429
5430 xfree (buf);
5431
5432 if (last_it == -1)
5433 /* There wasn't really an IT instruction after all. */
5434 return bpaddr;
5435
5436 if (last_it_count < 1)
5437 /* It was too far away. */
5438 return bpaddr;
5439
5440 /* This really is a trouble spot. Move the breakpoint to the IT
5441 instruction. */
5442 return bpaddr - buf_len + last_it;
5443}
5444
5445/* ARM displaced stepping support.
5446
5447 Generally ARM displaced stepping works as follows:
5448
5449 1. When an instruction is to be single-stepped, it is first decoded by
5450 arm_process_displaced_insn. Depending on the type of instruction, it is
5451 then copied to a scratch location, possibly in a modified form. The
5452 copy_* set of functions performs such modification, as necessary. A
5453 breakpoint is placed after the modified instruction in the scratch space
5454 to return control to GDB. Note in particular that instructions which
5455 modify the PC will no longer do so after modification.
5456
5457 2. The instruction is single-stepped, by setting the PC to the scratch
5458 location address, and resuming. Control returns to GDB when the
5459 breakpoint is hit.
5460
5461 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5462 function used for the current instruction. This function's job is to
5463 put the CPU/memory state back to what it would have been if the
5464 instruction had been executed unmodified in its original location. */
5465
5466/* NOP instruction (mov r0, r0). */
5467#define ARM_NOP 0xe1a00000
5468#define THUMB_NOP 0x4600
5469
5470/* Helper for register reads for displaced stepping. In particular, this
5471 returns the PC as it would be seen by the instruction at its original
5472 location. */
5473
5474ULONGEST
5475displaced_read_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5476 int regno)
5477{
5478 ULONGEST ret;
5479 CORE_ADDR from = dsc->insn_addr;
5480
5481 if (regno == ARM_PC_REGNUM)
5482 {
5483 /* Compute pipeline offset:
5484 - When executing an ARM instruction, PC reads as the address of the
5485 current instruction plus 8.
5486 - When executing a Thumb instruction, PC reads as the address of the
5487 current instruction plus 4. */
5488
5489 if (!dsc->is_thumb)
5490 from += 8;
5491 else
5492 from += 4;
5493
5494 displaced_debug_printf ("read pc value %.8lx",
5495 (unsigned long) from);
5496 return (ULONGEST) from;
5497 }
5498 else
5499 {
5500 regcache_cooked_read_unsigned (regs, regno, &ret);
5501
5502 displaced_debug_printf ("read r%d value %.8lx",
5503 regno, (unsigned long) ret);
5504
5505 return ret;
5506 }
5507}
5508
5509static int
5510displaced_in_arm_mode (struct regcache *regs)
5511{
5512 ULONGEST ps;
5513 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5514
5515 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5516
5517 return (ps & t_bit) == 0;
5518}
5519
5520/* Write to the PC as from a branch instruction. */
5521
5522static void
5523branch_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5524 ULONGEST val)
5525{
5526 if (!dsc->is_thumb)
5527 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5528 architecture versions < 6. */
5529 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5530 val & ~(ULONGEST) 0x3);
5531 else
5532 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5533 val & ~(ULONGEST) 0x1);
5534}
5535
5536/* Write to the PC as from a branch-exchange instruction. */
5537
5538static void
5539bx_write_pc (struct regcache *regs, ULONGEST val)
5540{
5541 ULONGEST ps;
5542 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5543
5544 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5545
5546 if ((val & 1) == 1)
5547 {
5548 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5549 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5550 }
5551 else if ((val & 2) == 0)
5552 {
5553 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5554 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5555 }
5556 else
5557 {
5558 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5559 mode, align dest to 4 bytes). */
5560 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5561 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5562 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5563 }
5564}
5565
5566/* Write to the PC as if from a load instruction. */
5567
5568static void
5569load_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5570 ULONGEST val)
5571{
5572 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5573 bx_write_pc (regs, val);
5574 else
5575 branch_write_pc (regs, dsc, val);
5576}
5577
5578/* Write to the PC as if from an ALU instruction. */
5579
5580static void
5581alu_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5582 ULONGEST val)
5583{
5584 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5585 bx_write_pc (regs, val);
5586 else
5587 branch_write_pc (regs, dsc, val);
5588}
5589
5590/* Helper for writing to registers for displaced stepping. Writing to the PC
5591 has a varying effects depending on the instruction which does the write:
5592 this is controlled by the WRITE_PC argument. */
5593
5594void
5595displaced_write_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5596 int regno, ULONGEST val, enum pc_write_style write_pc)
5597{
5598 if (regno == ARM_PC_REGNUM)
5599 {
5600 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val);
5601
5602 switch (write_pc)
5603 {
5604 case BRANCH_WRITE_PC:
5605 branch_write_pc (regs, dsc, val);
5606 break;
5607
5608 case BX_WRITE_PC:
5609 bx_write_pc (regs, val);
5610 break;
5611
5612 case LOAD_WRITE_PC:
5613 load_write_pc (regs, dsc, val);
5614 break;
5615
5616 case ALU_WRITE_PC:
5617 alu_write_pc (regs, dsc, val);
5618 break;
5619
5620 case CANNOT_WRITE_PC:
5621 warning (_("Instruction wrote to PC in an unexpected way when "
5622 "single-stepping"));
5623 break;
5624
5625 default:
5626 internal_error (_("Invalid argument to displaced_write_reg"));
5627 }
5628
5629 dsc->wrote_to_pc = 1;
5630 }
5631 else
5632 {
5633 displaced_debug_printf ("writing r%d value %.8lx",
5634 regno, (unsigned long) val);
5635 regcache_cooked_write_unsigned (regs, regno, val);
5636 }
5637}
5638
5639/* This function is used to concisely determine if an instruction INSN
5640 references PC. Register fields of interest in INSN should have the
5641 corresponding fields of BITMASK set to 0b1111. The function
5642 returns return 1 if any of these fields in INSN reference the PC
5643 (also 0b1111, r15), else it returns 0. */
5644
5645static int
5646insn_references_pc (uint32_t insn, uint32_t bitmask)
5647{
5648 uint32_t lowbit = 1;
5649
5650 while (bitmask != 0)
5651 {
5652 uint32_t mask;
5653
5654 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5655 ;
5656
5657 if (!lowbit)
5658 break;
5659
5660 mask = lowbit * 0xf;
5661
5662 if ((insn & mask) == mask)
5663 return 1;
5664
5665 bitmask &= ~mask;
5666 }
5667
5668 return 0;
5669}
5670
5671/* The simplest copy function. Many instructions have the same effect no
5672 matter what address they are executed at: in those cases, use this. */
5673
5674static int
5675arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn, const char *iname,
5676 arm_displaced_step_copy_insn_closure *dsc)
5677{
5678 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified",
5679 (unsigned long) insn, iname);
5680
5681 dsc->modinsn[0] = insn;
5682
5683 return 0;
5684}
5685
5686static int
5687thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5688 uint16_t insn2, const char *iname,
5689 arm_displaced_step_copy_insn_closure *dsc)
5690{
5691 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' "
5692 "unmodified", insn1, insn2, iname);
5693
5694 dsc->modinsn[0] = insn1;
5695 dsc->modinsn[1] = insn2;
5696 dsc->numinsns = 2;
5697
5698 return 0;
5699}
5700
5701/* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5702 modification. */
5703static int
5704thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
5705 const char *iname,
5706 arm_displaced_step_copy_insn_closure *dsc)
5707{
5708 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified",
5709 insn, iname);
5710
5711 dsc->modinsn[0] = insn;
5712
5713 return 0;
5714}
5715
5716/* Preload instructions with immediate offset. */
5717
5718static void
5719cleanup_preload (struct gdbarch *gdbarch, regcache *regs,
5720 arm_displaced_step_copy_insn_closure *dsc)
5721{
5722 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5723 if (!dsc->u.preload.immed)
5724 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5725}
5726
5727static void
5728install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5729 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn)
5730{
5731 ULONGEST rn_val;
5732 /* Preload instructions:
5733
5734 {pli/pld} [rn, #+/-imm]
5735 ->
5736 {pli/pld} [r0, #+/-imm]. */
5737
5738 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5739 rn_val = displaced_read_reg (regs, dsc, rn);
5740 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5741 dsc->u.preload.immed = 1;
5742
5743 dsc->cleanup = &cleanup_preload;
5744}
5745
5746static int
5747arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5748 arm_displaced_step_copy_insn_closure *dsc)
5749{
5750 unsigned int rn = bits (insn, 16, 19);
5751
5752 if (!insn_references_pc (insn, 0x000f0000ul))
5753 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5754
5755 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn);
5756
5757 dsc->modinsn[0] = insn & 0xfff0ffff;
5758
5759 install_preload (gdbarch, regs, dsc, rn);
5760
5761 return 0;
5762}
5763
5764static int
5765thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5766 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5767{
5768 unsigned int rn = bits (insn1, 0, 3);
5769 unsigned int u_bit = bit (insn1, 7);
5770 int imm12 = bits (insn2, 0, 11);
5771 ULONGEST pc_val;
5772
5773 if (rn != ARM_PC_REGNUM)
5774 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5775
5776 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5777 PLD (literal) Encoding T1. */
5778 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x",
5779 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5780 imm12);
5781
5782 if (!u_bit)
5783 imm12 = -1 * imm12;
5784
5785 /* Rewrite instruction {pli/pld} PC imm12 into:
5786 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5787
5788 {pli/pld} [r0, r1]
5789
5790 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5791
5792 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5793 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5794
5795 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5796
5797 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5798 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5799 dsc->u.preload.immed = 0;
5800
5801 /* {pli/pld} [r0, r1] */
5802 dsc->modinsn[0] = insn1 & 0xfff0;
5803 dsc->modinsn[1] = 0xf001;
5804 dsc->numinsns = 2;
5805
5806 dsc->cleanup = &cleanup_preload;
5807 return 0;
5808}
5809
5810/* Preload instructions with register offset. */
5811
5812static void
5813install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5814 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn,
5815 unsigned int rm)
5816{
5817 ULONGEST rn_val, rm_val;
5818
5819 /* Preload register-offset instructions:
5820
5821 {pli/pld} [rn, rm {, shift}]
5822 ->
5823 {pli/pld} [r0, r1 {, shift}]. */
5824
5825 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5826 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5827 rn_val = displaced_read_reg (regs, dsc, rn);
5828 rm_val = displaced_read_reg (regs, dsc, rm);
5829 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5830 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5831 dsc->u.preload.immed = 0;
5832
5833 dsc->cleanup = &cleanup_preload;
5834}
5835
5836static int
5837arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5838 struct regcache *regs,
5839 arm_displaced_step_copy_insn_closure *dsc)
5840{
5841 unsigned int rn = bits (insn, 16, 19);
5842 unsigned int rm = bits (insn, 0, 3);
5843
5844
5845 if (!insn_references_pc (insn, 0x000f000ful))
5846 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5847
5848 displaced_debug_printf ("copying preload insn %.8lx",
5849 (unsigned long) insn);
5850
5851 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5852
5853 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5854 return 0;
5855}
5856
5857/* Copy/cleanup coprocessor load and store instructions. */
5858
5859static void
5860cleanup_copro_load_store (struct gdbarch *gdbarch,
5861 struct regcache *regs,
5862 arm_displaced_step_copy_insn_closure *dsc)
5863{
5864 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5865
5866 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5867
5868 if (dsc->u.ldst.writeback)
5869 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5870}
5871
5872static void
5873install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5874 arm_displaced_step_copy_insn_closure *dsc,
5875 int writeback, unsigned int rn)
5876{
5877 ULONGEST rn_val;
5878
5879 /* Coprocessor load/store instructions:
5880
5881 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5882 ->
5883 {stc/stc2} [r0, #+/-imm].
5884
5885 ldc/ldc2 are handled identically. */
5886
5887 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5888 rn_val = displaced_read_reg (regs, dsc, rn);
5889 /* PC should be 4-byte aligned. */
5890 rn_val = rn_val & 0xfffffffc;
5891 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5892
5893 dsc->u.ldst.writeback = writeback;
5894 dsc->u.ldst.rn = rn;
5895
5896 dsc->cleanup = &cleanup_copro_load_store;
5897}
5898
5899static int
5900arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5901 struct regcache *regs,
5902 arm_displaced_step_copy_insn_closure *dsc)
5903{
5904 unsigned int rn = bits (insn, 16, 19);
5905
5906 if (!insn_references_pc (insn, 0x000f0000ul))
5907 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5908
5909 displaced_debug_printf ("copying coprocessor load/store insn %.8lx",
5910 (unsigned long) insn);
5911
5912 dsc->modinsn[0] = insn & 0xfff0ffff;
5913
5914 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5915
5916 return 0;
5917}
5918
5919static int
5920thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5921 uint16_t insn2, struct regcache *regs,
5922 arm_displaced_step_copy_insn_closure *dsc)
5923{
5924 unsigned int rn = bits (insn1, 0, 3);
5925
5926 if (rn != ARM_PC_REGNUM)
5927 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5928 "copro load/store", dsc);
5929
5930 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x",
5931 insn1, insn2);
5932
5933 dsc->modinsn[0] = insn1 & 0xfff0;
5934 dsc->modinsn[1] = insn2;
5935 dsc->numinsns = 2;
5936
5937 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5938 doesn't support writeback, so pass 0. */
5939 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5940
5941 return 0;
5942}
5943
5944/* Clean up branch instructions (actually perform the branch, by setting
5945 PC). */
5946
5947static void
5948cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5949 arm_displaced_step_copy_insn_closure *dsc)
5950{
5951 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5952 int branch_taken = condition_true (dsc->u.branch.cond, status);
5953 enum pc_write_style write_pc = dsc->u.branch.exchange
5954 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5955
5956 if (!branch_taken)
5957 return;
5958
5959 if (dsc->u.branch.link)
5960 {
5961 /* The value of LR should be the next insn of current one. In order
5962 not to confuse logic handling later insn `bx lr', if current insn mode
5963 is Thumb, the bit 0 of LR value should be set to 1. */
5964 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5965
5966 if (dsc->is_thumb)
5967 next_insn_addr |= 0x1;
5968
5969 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5970 CANNOT_WRITE_PC);
5971 }
5972
5973 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5974}
5975
5976/* Copy B/BL/BLX instructions with immediate destinations. */
5977
5978static void
5979install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5980 arm_displaced_step_copy_insn_closure *dsc,
5981 unsigned int cond, int exchange, int link, long offset)
5982{
5983 /* Implement "BL<cond> <label>" as:
5984
5985 Preparation: cond <- instruction condition
5986 Insn: mov r0, r0 (nop)
5987 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5988
5989 B<cond> similar, but don't set r14 in cleanup. */
5990
5991 dsc->u.branch.cond = cond;
5992 dsc->u.branch.link = link;
5993 dsc->u.branch.exchange = exchange;
5994
5995 dsc->u.branch.dest = dsc->insn_addr;
5996 if (link && exchange)
5997 /* For BLX, offset is computed from the Align (PC, 4). */
5998 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5999
6000 if (dsc->is_thumb)
6001 dsc->u.branch.dest += 4 + offset;
6002 else
6003 dsc->u.branch.dest += 8 + offset;
6004
6005 dsc->cleanup = &cleanup_branch;
6006}
6007static int
6008arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6009 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6010{
6011 unsigned int cond = bits (insn, 28, 31);
6012 int exchange = (cond == 0xf);
6013 int link = exchange || bit (insn, 24);
6014 long offset;
6015
6016 displaced_debug_printf ("copying %s immediate insn %.8lx",
6017 (exchange) ? "blx" : (link) ? "bl" : "b",
6018 (unsigned long) insn);
6019 if (exchange)
6020 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6021 then arrange the switch into Thumb mode. */
6022 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6023 else
6024 offset = bits (insn, 0, 23) << 2;
6025
6026 if (bit (offset, 25))
6027 offset = offset | ~0x3ffffff;
6028
6029 dsc->modinsn[0] = ARM_NOP;
6030
6031 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6032 return 0;
6033}
6034
6035static int
6036thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6037 uint16_t insn2, struct regcache *regs,
6038 arm_displaced_step_copy_insn_closure *dsc)
6039{
6040 int link = bit (insn2, 14);
6041 int exchange = link && !bit (insn2, 12);
6042 int cond = INST_AL;
6043 long offset = 0;
6044 int j1 = bit (insn2, 13);
6045 int j2 = bit (insn2, 11);
6046 int s = sbits (insn1, 10, 10);
6047 int i1 = !(j1 ^ bit (insn1, 10));
6048 int i2 = !(j2 ^ bit (insn1, 10));
6049
6050 if (!link && !exchange) /* B */
6051 {
6052 offset = (bits (insn2, 0, 10) << 1);
6053 if (bit (insn2, 12)) /* Encoding T4 */
6054 {
6055 offset |= (bits (insn1, 0, 9) << 12)
6056 | (i2 << 22)
6057 | (i1 << 23)
6058 | (s << 24);
6059 cond = INST_AL;
6060 }
6061 else /* Encoding T3 */
6062 {
6063 offset |= (bits (insn1, 0, 5) << 12)
6064 | (j1 << 18)
6065 | (j2 << 19)
6066 | (s << 20);
6067 cond = bits (insn1, 6, 9);
6068 }
6069 }
6070 else
6071 {
6072 offset = (bits (insn1, 0, 9) << 12);
6073 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6074 offset |= exchange ?
6075 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6076 }
6077
6078 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx",
6079 link ? (exchange) ? "blx" : "bl" : "b",
6080 insn1, insn2, offset);
6081
6082 dsc->modinsn[0] = THUMB_NOP;
6083
6084 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6085 return 0;
6086}
6087
6088/* Copy B Thumb instructions. */
6089static int
6090thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
6091 arm_displaced_step_copy_insn_closure *dsc)
6092{
6093 unsigned int cond = 0;
6094 int offset = 0;
6095 unsigned short bit_12_15 = bits (insn, 12, 15);
6096 CORE_ADDR from = dsc->insn_addr;
6097
6098 if (bit_12_15 == 0xd)
6099 {
6100 /* offset = SignExtend (imm8:0, 32) */
6101 offset = sbits ((insn << 1), 0, 8);
6102 cond = bits (insn, 8, 11);
6103 }
6104 else if (bit_12_15 == 0xe) /* Encoding T2 */
6105 {
6106 offset = sbits ((insn << 1), 0, 11);
6107 cond = INST_AL;
6108 }
6109
6110 displaced_debug_printf ("copying b immediate insn %.4x with offset %d",
6111 insn, offset);
6112
6113 dsc->u.branch.cond = cond;
6114 dsc->u.branch.link = 0;
6115 dsc->u.branch.exchange = 0;
6116 dsc->u.branch.dest = from + 4 + offset;
6117
6118 dsc->modinsn[0] = THUMB_NOP;
6119
6120 dsc->cleanup = &cleanup_branch;
6121
6122 return 0;
6123}
6124
6125/* Copy BX/BLX with register-specified destinations. */
6126
6127static void
6128install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6129 arm_displaced_step_copy_insn_closure *dsc, int link,
6130 unsigned int cond, unsigned int rm)
6131{
6132 /* Implement {BX,BLX}<cond> <reg>" as:
6133
6134 Preparation: cond <- instruction condition
6135 Insn: mov r0, r0 (nop)
6136 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6137
6138 Don't set r14 in cleanup for BX. */
6139
6140 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6141
6142 dsc->u.branch.cond = cond;
6143 dsc->u.branch.link = link;
6144
6145 dsc->u.branch.exchange = 1;
6146
6147 dsc->cleanup = &cleanup_branch;
6148}
6149
6150static int
6151arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6152 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6153{
6154 unsigned int cond = bits (insn, 28, 31);
6155 /* BX: x12xxx1x
6156 BLX: x12xxx3x. */
6157 int link = bit (insn, 5);
6158 unsigned int rm = bits (insn, 0, 3);
6159
6160 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn);
6161
6162 dsc->modinsn[0] = ARM_NOP;
6163
6164 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6165 return 0;
6166}
6167
6168static int
6169thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6170 struct regcache *regs,
6171 arm_displaced_step_copy_insn_closure *dsc)
6172{
6173 int link = bit (insn, 7);
6174 unsigned int rm = bits (insn, 3, 6);
6175
6176 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn);
6177
6178 dsc->modinsn[0] = THUMB_NOP;
6179
6180 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6181
6182 return 0;
6183}
6184
6185
6186/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6187
6188static void
6189cleanup_alu_imm (struct gdbarch *gdbarch,
6190 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6191{
6192 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6193 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6194 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6195 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6196}
6197
6198static int
6199arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6200 arm_displaced_step_copy_insn_closure *dsc)
6201{
6202 unsigned int rn = bits (insn, 16, 19);
6203 unsigned int rd = bits (insn, 12, 15);
6204 unsigned int op = bits (insn, 21, 24);
6205 int is_mov = (op == 0xd);
6206 ULONGEST rd_val, rn_val;
6207
6208 if (!insn_references_pc (insn, 0x000ff000ul))
6209 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6210
6211 displaced_debug_printf ("copying immediate %s insn %.8lx",
6212 is_mov ? "move" : "ALU",
6213 (unsigned long) insn);
6214
6215 /* Instruction is of form:
6216
6217 <op><cond> rd, [rn,] #imm
6218
6219 Rewrite as:
6220
6221 Preparation: tmp1, tmp2 <- r0, r1;
6222 r0, r1 <- rd, rn
6223 Insn: <op><cond> r0, r1, #imm
6224 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6225 */
6226
6227 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6228 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6229 rn_val = displaced_read_reg (regs, dsc, rn);
6230 rd_val = displaced_read_reg (regs, dsc, rd);
6231 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6232 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6233 dsc->rd = rd;
6234
6235 if (is_mov)
6236 dsc->modinsn[0] = insn & 0xfff00fff;
6237 else
6238 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6239
6240 dsc->cleanup = &cleanup_alu_imm;
6241
6242 return 0;
6243}
6244
6245static int
6246thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6247 uint16_t insn2, struct regcache *regs,
6248 arm_displaced_step_copy_insn_closure *dsc)
6249{
6250 unsigned int op = bits (insn1, 5, 8);
6251 unsigned int rn, rm, rd;
6252 ULONGEST rd_val, rn_val;
6253
6254 rn = bits (insn1, 0, 3); /* Rn */
6255 rm = bits (insn2, 0, 3); /* Rm */
6256 rd = bits (insn2, 8, 11); /* Rd */
6257
6258 /* This routine is only called for instruction MOV. */
6259 gdb_assert (op == 0x2 && rn == 0xf);
6260
6261 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6262 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6263
6264 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1, insn2);
6265
6266 /* Instruction is of form:
6267
6268 <op><cond> rd, [rn,] #imm
6269
6270 Rewrite as:
6271
6272 Preparation: tmp1, tmp2 <- r0, r1;
6273 r0, r1 <- rd, rn
6274 Insn: <op><cond> r0, r1, #imm
6275 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6276 */
6277
6278 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6279 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6280 rn_val = displaced_read_reg (regs, dsc, rn);
6281 rd_val = displaced_read_reg (regs, dsc, rd);
6282 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6283 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6284 dsc->rd = rd;
6285
6286 dsc->modinsn[0] = insn1;
6287 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6288 dsc->numinsns = 2;
6289
6290 dsc->cleanup = &cleanup_alu_imm;
6291
6292 return 0;
6293}
6294
6295/* Copy/cleanup arithmetic/logic insns with register RHS. */
6296
6297static void
6298cleanup_alu_reg (struct gdbarch *gdbarch,
6299 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6300{
6301 ULONGEST rd_val;
6302 int i;
6303
6304 rd_val = displaced_read_reg (regs, dsc, 0);
6305
6306 for (i = 0; i < 3; i++)
6307 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6308
6309 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6310}
6311
6312static void
6313install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6314 arm_displaced_step_copy_insn_closure *dsc,
6315 unsigned int rd, unsigned int rn, unsigned int rm)
6316{
6317 ULONGEST rd_val, rn_val, rm_val;
6318
6319 /* Instruction is of form:
6320
6321 <op><cond> rd, [rn,] rm [, <shift>]
6322
6323 Rewrite as:
6324
6325 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6326 r0, r1, r2 <- rd, rn, rm
6327 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
6328 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6329 */
6330
6331 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6332 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6333 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6334 rd_val = displaced_read_reg (regs, dsc, rd);
6335 rn_val = displaced_read_reg (regs, dsc, rn);
6336 rm_val = displaced_read_reg (regs, dsc, rm);
6337 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6338 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6339 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6340 dsc->rd = rd;
6341
6342 dsc->cleanup = &cleanup_alu_reg;
6343}
6344
6345static int
6346arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6347 arm_displaced_step_copy_insn_closure *dsc)
6348{
6349 unsigned int op = bits (insn, 21, 24);
6350 int is_mov = (op == 0xd);
6351
6352 if (!insn_references_pc (insn, 0x000ff00ful))
6353 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6354
6355 displaced_debug_printf ("copying reg %s insn %.8lx",
6356 is_mov ? "move" : "ALU", (unsigned long) insn);
6357
6358 if (is_mov)
6359 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6360 else
6361 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6362
6363 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6364 bits (insn, 0, 3));
6365 return 0;
6366}
6367
6368static int
6369thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6370 struct regcache *regs,
6371 arm_displaced_step_copy_insn_closure *dsc)
6372{
6373 unsigned rm, rd;
6374
6375 rm = bits (insn, 3, 6);
6376 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6377
6378 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
6379 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6380
6381 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn);
6382
6383 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
6384
6385 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
6386
6387 return 0;
6388}
6389
6390/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6391
6392static void
6393cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6394 struct regcache *regs,
6395 arm_displaced_step_copy_insn_closure *dsc)
6396{
6397 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6398 int i;
6399
6400 for (i = 0; i < 4; i++)
6401 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6402
6403 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6404}
6405
6406static void
6407install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6408 arm_displaced_step_copy_insn_closure *dsc,
6409 unsigned int rd, unsigned int rn, unsigned int rm,
6410 unsigned rs)
6411{
6412 int i;
6413 ULONGEST rd_val, rn_val, rm_val, rs_val;
6414
6415 /* Instruction is of form:
6416
6417 <op><cond> rd, [rn,] rm, <shift> rs
6418
6419 Rewrite as:
6420
6421 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6422 r0, r1, r2, r3 <- rd, rn, rm, rs
6423 Insn: <op><cond> r0, r1, r2, <shift> r3
6424 Cleanup: tmp5 <- r0
6425 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6426 rd <- tmp5
6427 */
6428
6429 for (i = 0; i < 4; i++)
6430 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6431
6432 rd_val = displaced_read_reg (regs, dsc, rd);
6433 rn_val = displaced_read_reg (regs, dsc, rn);
6434 rm_val = displaced_read_reg (regs, dsc, rm);
6435 rs_val = displaced_read_reg (regs, dsc, rs);
6436 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6437 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6438 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6439 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6440 dsc->rd = rd;
6441 dsc->cleanup = &cleanup_alu_shifted_reg;
6442}
6443
6444static int
6445arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6446 struct regcache *regs,
6447 arm_displaced_step_copy_insn_closure *dsc)
6448{
6449 unsigned int op = bits (insn, 21, 24);
6450 int is_mov = (op == 0xd);
6451 unsigned int rd, rn, rm, rs;
6452
6453 if (!insn_references_pc (insn, 0x000fff0ful))
6454 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6455
6456 displaced_debug_printf ("copying shifted reg %s insn %.8lx",
6457 is_mov ? "move" : "ALU",
6458 (unsigned long) insn);
6459
6460 rn = bits (insn, 16, 19);
6461 rm = bits (insn, 0, 3);
6462 rs = bits (insn, 8, 11);
6463 rd = bits (insn, 12, 15);
6464
6465 if (is_mov)
6466 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6467 else
6468 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6469
6470 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6471
6472 return 0;
6473}
6474
6475/* Clean up load instructions. */
6476
6477static void
6478cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6479 arm_displaced_step_copy_insn_closure *dsc)
6480{
6481 ULONGEST rt_val, rt_val2 = 0, rn_val;
6482
6483 rt_val = displaced_read_reg (regs, dsc, 0);
6484 if (dsc->u.ldst.xfersize == 8)
6485 rt_val2 = displaced_read_reg (regs, dsc, 1);
6486 rn_val = displaced_read_reg (regs, dsc, 2);
6487
6488 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6489 if (dsc->u.ldst.xfersize > 4)
6490 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6491 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6492 if (!dsc->u.ldst.immed)
6493 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6494
6495 /* Handle register writeback. */
6496 if (dsc->u.ldst.writeback)
6497 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6498 /* Put result in right place. */
6499 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6500 if (dsc->u.ldst.xfersize == 8)
6501 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6502}
6503
6504/* Clean up store instructions. */
6505
6506static void
6507cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6508 arm_displaced_step_copy_insn_closure *dsc)
6509{
6510 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6511
6512 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6513 if (dsc->u.ldst.xfersize > 4)
6514 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6515 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6516 if (!dsc->u.ldst.immed)
6517 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6518 if (!dsc->u.ldst.restore_r4)
6519 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6520
6521 /* Writeback. */
6522 if (dsc->u.ldst.writeback)
6523 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6524}
6525
6526/* Copy "extra" load/store instructions. These are halfword/doubleword
6527 transfers, which have a different encoding to byte/word transfers. */
6528
6529static int
6530arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
6531 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6532{
6533 unsigned int op1 = bits (insn, 20, 24);
6534 unsigned int op2 = bits (insn, 5, 6);
6535 unsigned int rt = bits (insn, 12, 15);
6536 unsigned int rn = bits (insn, 16, 19);
6537 unsigned int rm = bits (insn, 0, 3);
6538 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6539 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6540 int immed = (op1 & 0x4) != 0;
6541 int opcode;
6542 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6543
6544 if (!insn_references_pc (insn, 0x000ff00ful))
6545 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6546
6547 displaced_debug_printf ("copying %sextra load/store insn %.8lx",
6548 unprivileged ? "unprivileged " : "",
6549 (unsigned long) insn);
6550
6551 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6552
6553 if (opcode < 0)
6554 internal_error (_("copy_extra_ld_st: instruction decode error"));
6555
6556 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6557 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6558 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6559 if (!immed)
6560 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6561
6562 rt_val = displaced_read_reg (regs, dsc, rt);
6563 if (bytesize[opcode] == 8)
6564 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6565 rn_val = displaced_read_reg (regs, dsc, rn);
6566 if (!immed)
6567 rm_val = displaced_read_reg (regs, dsc, rm);
6568
6569 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6570 if (bytesize[opcode] == 8)
6571 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6572 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6573 if (!immed)
6574 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6575
6576 dsc->rd = rt;
6577 dsc->u.ldst.xfersize = bytesize[opcode];
6578 dsc->u.ldst.rn = rn;
6579 dsc->u.ldst.immed = immed;
6580 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6581 dsc->u.ldst.restore_r4 = 0;
6582
6583 if (immed)
6584 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6585 ->
6586 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6587 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6588 else
6589 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6590 ->
6591 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6592 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6593
6594 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6595
6596 return 0;
6597}
6598
6599/* Copy byte/half word/word loads and stores. */
6600
6601static void
6602install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6603 arm_displaced_step_copy_insn_closure *dsc, int load,
6604 int immed, int writeback, int size, int usermode,
6605 int rt, int rm, int rn)
6606{
6607 ULONGEST rt_val, rn_val, rm_val = 0;
6608
6609 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6610 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6611 if (!immed)
6612 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6613 if (!load)
6614 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6615
6616 rt_val = displaced_read_reg (regs, dsc, rt);
6617 rn_val = displaced_read_reg (regs, dsc, rn);
6618 if (!immed)
6619 rm_val = displaced_read_reg (regs, dsc, rm);
6620
6621 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6622 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6623 if (!immed)
6624 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6625 dsc->rd = rt;
6626 dsc->u.ldst.xfersize = size;
6627 dsc->u.ldst.rn = rn;
6628 dsc->u.ldst.immed = immed;
6629 dsc->u.ldst.writeback = writeback;
6630
6631 /* To write PC we can do:
6632
6633 Before this sequence of instructions:
6634 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6635 r2 is the Rn value got from displaced_read_reg.
6636
6637 Insn1: push {pc} Write address of STR instruction + offset on stack
6638 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6639 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6640 = addr(Insn1) + offset - addr(Insn3) - 8
6641 = offset - 16
6642 Insn4: add r4, r4, #8 r4 = offset - 8
6643 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6644 = from + offset
6645 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6646
6647 Otherwise we don't know what value to write for PC, since the offset is
6648 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6649 of this can be found in Section "Saving from r15" in
6650 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6651
6652 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6653}
6654
6655
6656static int
6657thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6658 uint16_t insn2, struct regcache *regs,
6659 arm_displaced_step_copy_insn_closure *dsc, int size)
6660{
6661 unsigned int u_bit = bit (insn1, 7);
6662 unsigned int rt = bits (insn2, 12, 15);
6663 int imm12 = bits (insn2, 0, 11);
6664 ULONGEST pc_val;
6665
6666 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x",
6667 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6668 imm12);
6669
6670 if (!u_bit)
6671 imm12 = -1 * imm12;
6672
6673 /* Rewrite instruction LDR Rt imm12 into:
6674
6675 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6676
6677 LDR R0, R2, R3,
6678
6679 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6680
6681
6682 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6683 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6684 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6685
6686 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6687
6688 pc_val = pc_val & 0xfffffffc;
6689
6690 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6691 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6692
6693 dsc->rd = rt;
6694
6695 dsc->u.ldst.xfersize = size;
6696 dsc->u.ldst.immed = 0;
6697 dsc->u.ldst.writeback = 0;
6698 dsc->u.ldst.restore_r4 = 0;
6699
6700 /* LDR R0, R2, R3 */
6701 dsc->modinsn[0] = 0xf852;
6702 dsc->modinsn[1] = 0x3;
6703 dsc->numinsns = 2;
6704
6705 dsc->cleanup = &cleanup_load;
6706
6707 return 0;
6708}
6709
6710static int
6711thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6712 uint16_t insn2, struct regcache *regs,
6713 arm_displaced_step_copy_insn_closure *dsc,
6714 int writeback, int immed)
6715{
6716 unsigned int rt = bits (insn2, 12, 15);
6717 unsigned int rn = bits (insn1, 0, 3);
6718 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6719 /* In LDR (register), there is also a register Rm, which is not allowed to
6720 be PC, so we don't have to check it. */
6721
6722 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6723 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6724 dsc);
6725
6726 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x",
6727 rt, rn, insn1, insn2);
6728
6729 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6730 0, rt, rm, rn);
6731
6732 dsc->u.ldst.restore_r4 = 0;
6733
6734 if (immed)
6735 /* ldr[b]<cond> rt, [rn, #imm], etc.
6736 ->
6737 ldr[b]<cond> r0, [r2, #imm]. */
6738 {
6739 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6740 dsc->modinsn[1] = insn2 & 0x0fff;
6741 }
6742 else
6743 /* ldr[b]<cond> rt, [rn, rm], etc.
6744 ->
6745 ldr[b]<cond> r0, [r2, r3]. */
6746 {
6747 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6748 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6749 }
6750
6751 dsc->numinsns = 2;
6752
6753 return 0;
6754}
6755
6756
6757static int
6758arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6759 struct regcache *regs,
6760 arm_displaced_step_copy_insn_closure *dsc,
6761 int load, int size, int usermode)
6762{
6763 int immed = !bit (insn, 25);
6764 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6765 unsigned int rt = bits (insn, 12, 15);
6766 unsigned int rn = bits (insn, 16, 19);
6767 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6768
6769 if (!insn_references_pc (insn, 0x000ff00ful))
6770 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6771
6772 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx",
6773 load ? (size == 1 ? "ldrb" : "ldr")
6774 : (size == 1 ? "strb" : "str"),
6775 usermode ? "t" : "",
6776 rt, rn,
6777 (unsigned long) insn);
6778
6779 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6780 usermode, rt, rm, rn);
6781
6782 if (load || rt != ARM_PC_REGNUM)
6783 {
6784 dsc->u.ldst.restore_r4 = 0;
6785
6786 if (immed)
6787 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6788 ->
6789 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6790 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6791 else
6792 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6793 ->
6794 {ldr,str}[b]<cond> r0, [r2, r3]. */
6795 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6796 }
6797 else
6798 {
6799 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6800 dsc->u.ldst.restore_r4 = 1;
6801 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6802 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6803 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6804 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6805 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6806
6807 /* As above. */
6808 if (immed)
6809 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6810 else
6811 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6812
6813 dsc->numinsns = 6;
6814 }
6815
6816 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6817
6818 return 0;
6819}
6820
6821/* Cleanup LDM instructions with fully-populated register list. This is an
6822 unfortunate corner case: it's impossible to implement correctly by modifying
6823 the instruction. The issue is as follows: we have an instruction,
6824
6825 ldm rN, {r0-r15}
6826
6827 which we must rewrite to avoid loading PC. A possible solution would be to
6828 do the load in two halves, something like (with suitable cleanup
6829 afterwards):
6830
6831 mov r8, rN
6832 ldm[id][ab] r8!, {r0-r7}
6833 str r7, <temp>
6834 ldm[id][ab] r8, {r7-r14}
6835 <bkpt>
6836
6837 but at present there's no suitable place for <temp>, since the scratch space
6838 is overwritten before the cleanup routine is called. For now, we simply
6839 emulate the instruction. */
6840
6841static void
6842cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6843 arm_displaced_step_copy_insn_closure *dsc)
6844{
6845 int inc = dsc->u.block.increment;
6846 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6847 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6848 uint32_t regmask = dsc->u.block.regmask;
6849 int regno = inc ? 0 : 15;
6850 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6851 int exception_return = dsc->u.block.load && dsc->u.block.user
6852 && (regmask & 0x8000) != 0;
6853 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6854 int do_transfer = condition_true (dsc->u.block.cond, status);
6855 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6856
6857 if (!do_transfer)
6858 return;
6859
6860 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6861 sensible we can do here. Complain loudly. */
6862 if (exception_return)
6863 error (_("Cannot single-step exception return"));
6864
6865 /* We don't handle any stores here for now. */
6866 gdb_assert (dsc->u.block.load != 0);
6867
6868 displaced_debug_printf ("emulating block transfer: %s %s %s",
6869 dsc->u.block.load ? "ldm" : "stm",
6870 dsc->u.block.increment ? "inc" : "dec",
6871 dsc->u.block.before ? "before" : "after");
6872
6873 while (regmask)
6874 {
6875 uint32_t memword;
6876
6877 if (inc)
6878 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6879 regno++;
6880 else
6881 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6882 regno--;
6883
6884 xfer_addr += bump_before;
6885
6886 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6887 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6888
6889 xfer_addr += bump_after;
6890
6891 regmask &= ~(1 << regno);
6892 }
6893
6894 if (dsc->u.block.writeback)
6895 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6896 CANNOT_WRITE_PC);
6897}
6898
6899/* Clean up an STM which included the PC in the register list. */
6900
6901static void
6902cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6903 arm_displaced_step_copy_insn_closure *dsc)
6904{
6905 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6906 int store_executed = condition_true (dsc->u.block.cond, status);
6907 CORE_ADDR pc_stored_at, transferred_regs
6908 = count_one_bits (dsc->u.block.regmask);
6909 CORE_ADDR stm_insn_addr;
6910 uint32_t pc_val;
6911 long offset;
6912 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6913
6914 /* If condition code fails, there's nothing else to do. */
6915 if (!store_executed)
6916 return;
6917
6918 if (dsc->u.block.increment)
6919 {
6920 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6921
6922 if (dsc->u.block.before)
6923 pc_stored_at += 4;
6924 }
6925 else
6926 {
6927 pc_stored_at = dsc->u.block.xfer_addr;
6928
6929 if (dsc->u.block.before)
6930 pc_stored_at -= 4;
6931 }
6932
6933 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6934 stm_insn_addr = dsc->scratch_base;
6935 offset = pc_val - stm_insn_addr;
6936
6937 displaced_debug_printf ("detected PC offset %.8lx for STM instruction",
6938 offset);
6939
6940 /* Rewrite the stored PC to the proper value for the non-displaced original
6941 instruction. */
6942 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6943 dsc->insn_addr + offset);
6944}
6945
6946/* Clean up an LDM which includes the PC in the register list. We clumped all
6947 the registers in the transferred list into a contiguous range r0...rX (to
6948 avoid loading PC directly and losing control of the debugged program), so we
6949 must undo that here. */
6950
6951static void
6952cleanup_block_load_pc (struct gdbarch *gdbarch,
6953 struct regcache *regs,
6954 arm_displaced_step_copy_insn_closure *dsc)
6955{
6956 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6957 int load_executed = condition_true (dsc->u.block.cond, status);
6958 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6959 unsigned int regs_loaded = count_one_bits (mask);
6960 unsigned int num_to_shuffle = regs_loaded, clobbered;
6961
6962 /* The method employed here will fail if the register list is fully populated
6963 (we need to avoid loading PC directly). */
6964 gdb_assert (num_to_shuffle < 16);
6965
6966 if (!load_executed)
6967 return;
6968
6969 clobbered = (1 << num_to_shuffle) - 1;
6970
6971 while (num_to_shuffle > 0)
6972 {
6973 if ((mask & (1 << write_reg)) != 0)
6974 {
6975 unsigned int read_reg = num_to_shuffle - 1;
6976
6977 if (read_reg != write_reg)
6978 {
6979 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
6980 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6981 displaced_debug_printf ("LDM: move loaded register r%d to r%d",
6982 read_reg, write_reg);
6983 }
6984 else
6985 displaced_debug_printf ("LDM: register r%d already in the right "
6986 "place", write_reg);
6987
6988 clobbered &= ~(1 << write_reg);
6989
6990 num_to_shuffle--;
6991 }
6992
6993 write_reg--;
6994 }
6995
6996 /* Restore any registers we scribbled over. */
6997 for (write_reg = 0; clobbered != 0; write_reg++)
6998 {
6999 if ((clobbered & (1 << write_reg)) != 0)
7000 {
7001 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7002 CANNOT_WRITE_PC);
7003 displaced_debug_printf ("LDM: restored clobbered register r%d",
7004 write_reg);
7005 clobbered &= ~(1 << write_reg);
7006 }
7007 }
7008
7009 /* Perform register writeback manually. */
7010 if (dsc->u.block.writeback)
7011 {
7012 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7013
7014 if (dsc->u.block.increment)
7015 new_rn_val += regs_loaded * 4;
7016 else
7017 new_rn_val -= regs_loaded * 4;
7018
7019 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7020 CANNOT_WRITE_PC);
7021 }
7022}
7023
7024/* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7025 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7026
7027static int
7028arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7029 struct regcache *regs,
7030 arm_displaced_step_copy_insn_closure *dsc)
7031{
7032 int load = bit (insn, 20);
7033 int user = bit (insn, 22);
7034 int increment = bit (insn, 23);
7035 int before = bit (insn, 24);
7036 int writeback = bit (insn, 21);
7037 int rn = bits (insn, 16, 19);
7038
7039 /* Block transfers which don't mention PC can be run directly
7040 out-of-line. */
7041 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7042 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7043
7044 if (rn == ARM_PC_REGNUM)
7045 {
7046 warning (_("displaced: Unpredictable LDM or STM with "
7047 "base register r15"));
7048 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7049 }
7050
7051 displaced_debug_printf ("copying block transfer insn %.8lx",
7052 (unsigned long) insn);
7053
7054 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7055 dsc->u.block.rn = rn;
7056
7057 dsc->u.block.load = load;
7058 dsc->u.block.user = user;
7059 dsc->u.block.increment = increment;
7060 dsc->u.block.before = before;
7061 dsc->u.block.writeback = writeback;
7062 dsc->u.block.cond = bits (insn, 28, 31);
7063
7064 dsc->u.block.regmask = insn & 0xffff;
7065
7066 if (load)
7067 {
7068 if ((insn & 0xffff) == 0xffff)
7069 {
7070 /* LDM with a fully-populated register list. This case is
7071 particularly tricky. Implement for now by fully emulating the
7072 instruction (which might not behave perfectly in all cases, but
7073 these instructions should be rare enough for that not to matter
7074 too much). */
7075 dsc->modinsn[0] = ARM_NOP;
7076
7077 dsc->cleanup = &cleanup_block_load_all;
7078 }
7079 else
7080 {
7081 /* LDM of a list of registers which includes PC. Implement by
7082 rewriting the list of registers to be transferred into a
7083 contiguous chunk r0...rX before doing the transfer, then shuffling
7084 registers into the correct places in the cleanup routine. */
7085 unsigned int regmask = insn & 0xffff;
7086 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
7087 unsigned int i;
7088
7089 for (i = 0; i < num_in_list; i++)
7090 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7091
7092 /* Writeback makes things complicated. We need to avoid clobbering
7093 the base register with one of the registers in our modified
7094 register list, but just using a different register can't work in
7095 all cases, e.g.:
7096
7097 ldm r14!, {r0-r13,pc}
7098
7099 which would need to be rewritten as:
7100
7101 ldm rN!, {r0-r14}
7102
7103 but that can't work, because there's no free register for N.
7104
7105 Solve this by turning off the writeback bit, and emulating
7106 writeback manually in the cleanup routine. */
7107
7108 if (writeback)
7109 insn &= ~(1 << 21);
7110
7111 new_regmask = (1 << num_in_list) - 1;
7112
7113 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
7114 "%.4x, modified list %.4x",
7115 rn, writeback ? "!" : "",
7116 (int) insn & 0xffff, new_regmask);
7117
7118 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7119
7120 dsc->cleanup = &cleanup_block_load_pc;
7121 }
7122 }
7123 else
7124 {
7125 /* STM of a list of registers which includes PC. Run the instruction
7126 as-is, but out of line: this will store the wrong value for the PC,
7127 so we must manually fix up the memory in the cleanup routine.
7128 Doing things this way has the advantage that we can auto-detect
7129 the offset of the PC write (which is architecture-dependent) in
7130 the cleanup routine. */
7131 dsc->modinsn[0] = insn;
7132
7133 dsc->cleanup = &cleanup_block_store_pc;
7134 }
7135
7136 return 0;
7137}
7138
7139static int
7140thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7141 struct regcache *regs,
7142 arm_displaced_step_copy_insn_closure *dsc)
7143{
7144 int rn = bits (insn1, 0, 3);
7145 int load = bit (insn1, 4);
7146 int writeback = bit (insn1, 5);
7147
7148 /* Block transfers which don't mention PC can be run directly
7149 out-of-line. */
7150 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7151 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7152
7153 if (rn == ARM_PC_REGNUM)
7154 {
7155 warning (_("displaced: Unpredictable LDM or STM with "
7156 "base register r15"));
7157 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7158 "unpredictable ldm/stm", dsc);
7159 }
7160
7161 displaced_debug_printf ("copying block transfer insn %.4x%.4x",
7162 insn1, insn2);
7163
7164 /* Clear bit 13, since it should be always zero. */
7165 dsc->u.block.regmask = (insn2 & 0xdfff);
7166 dsc->u.block.rn = rn;
7167
7168 dsc->u.block.load = load;
7169 dsc->u.block.user = 0;
7170 dsc->u.block.increment = bit (insn1, 7);
7171 dsc->u.block.before = bit (insn1, 8);
7172 dsc->u.block.writeback = writeback;
7173 dsc->u.block.cond = INST_AL;
7174 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7175
7176 if (load)
7177 {
7178 if (dsc->u.block.regmask == 0xffff)
7179 {
7180 /* This branch is impossible to happen. */
7181 gdb_assert (0);
7182 }
7183 else
7184 {
7185 unsigned int regmask = dsc->u.block.regmask;
7186 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
7187 unsigned int i;
7188
7189 for (i = 0; i < num_in_list; i++)
7190 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7191
7192 if (writeback)
7193 insn1 &= ~(1 << 5);
7194
7195 new_regmask = (1 << num_in_list) - 1;
7196
7197 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
7198 "%.4x, modified list %.4x",
7199 rn, writeback ? "!" : "",
7200 (int) dsc->u.block.regmask, new_regmask);
7201
7202 dsc->modinsn[0] = insn1;
7203 dsc->modinsn[1] = (new_regmask & 0xffff);
7204 dsc->numinsns = 2;
7205
7206 dsc->cleanup = &cleanup_block_load_pc;
7207 }
7208 }
7209 else
7210 {
7211 dsc->modinsn[0] = insn1;
7212 dsc->modinsn[1] = insn2;
7213 dsc->numinsns = 2;
7214 dsc->cleanup = &cleanup_block_store_pc;
7215 }
7216 return 0;
7217}
7218
7219/* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
7220 This is used to avoid a dependency on BFD's bfd_endian enum. */
7221
7222ULONGEST
7223arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
7224 int byte_order)
7225{
7226 return read_memory_unsigned_integer (memaddr, len,
7227 (enum bfd_endian) byte_order);
7228}
7229
7230/* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
7231
7232CORE_ADDR
7233arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
7234 CORE_ADDR val)
7235{
7236 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
7237}
7238
7239/* Wrapper over syscall_next_pc for use in get_next_pcs. */
7240
7241static CORE_ADDR
7242arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
7243{
7244 return 0;
7245}
7246
7247/* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
7248
7249int
7250arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
7251{
7252 return arm_is_thumb (self->regcache);
7253}
7254
7255/* single_step() is called just before we want to resume the inferior,
7256 if we want to single-step it but there is no hardware or kernel
7257 single-step support. We find the target of the coming instructions
7258 and breakpoint them. */
7259
7260std::vector<CORE_ADDR>
7261arm_software_single_step (struct regcache *regcache)
7262{
7263 struct gdbarch *gdbarch = regcache->arch ();
7264 struct arm_get_next_pcs next_pcs_ctx;
7265
7266 arm_get_next_pcs_ctor (&next_pcs_ctx,
7267 &arm_get_next_pcs_ops,
7268 gdbarch_byte_order (gdbarch),
7269 gdbarch_byte_order_for_code (gdbarch),
7270 0,
7271 regcache);
7272
7273 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7274
7275 for (CORE_ADDR &pc_ref : next_pcs)
7276 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
7277
7278 return next_pcs;
7279}
7280
7281/* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7282 for Linux, where some SVC instructions must be treated specially. */
7283
7284static void
7285cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7286 arm_displaced_step_copy_insn_closure *dsc)
7287{
7288 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7289
7290 displaced_debug_printf ("cleanup for svc, resume at %.8lx",
7291 (unsigned long) resume_addr);
7292
7293 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7294}
7295
7296
7297/* Common copy routine for svc instruction. */
7298
7299static int
7300install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7301 arm_displaced_step_copy_insn_closure *dsc)
7302{
7303 /* Preparation: none.
7304 Insn: unmodified svc.
7305 Cleanup: pc <- insn_addr + insn_size. */
7306
7307 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7308 instruction. */
7309 dsc->wrote_to_pc = 1;
7310
7311 /* Allow OS-specific code to override SVC handling. */
7312 if (dsc->u.svc.copy_svc_os)
7313 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7314 else
7315 {
7316 dsc->cleanup = &cleanup_svc;
7317 return 0;
7318 }
7319}
7320
7321static int
7322arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7323 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7324{
7325
7326 displaced_debug_printf ("copying svc insn %.8lx",
7327 (unsigned long) insn);
7328
7329 dsc->modinsn[0] = insn;
7330
7331 return install_svc (gdbarch, regs, dsc);
7332}
7333
7334static int
7335thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7336 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7337{
7338
7339 displaced_debug_printf ("copying svc insn %.4x", insn);
7340
7341 dsc->modinsn[0] = insn;
7342
7343 return install_svc (gdbarch, regs, dsc);
7344}
7345
7346/* Copy undefined instructions. */
7347
7348static int
7349arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7350 arm_displaced_step_copy_insn_closure *dsc)
7351{
7352 displaced_debug_printf ("copying undefined insn %.8lx",
7353 (unsigned long) insn);
7354
7355 dsc->modinsn[0] = insn;
7356
7357 return 0;
7358}
7359
7360static int
7361thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7362 arm_displaced_step_copy_insn_closure *dsc)
7363{
7364
7365 displaced_debug_printf ("copying undefined insn %.4x %.4x",
7366 (unsigned short) insn1, (unsigned short) insn2);
7367
7368 dsc->modinsn[0] = insn1;
7369 dsc->modinsn[1] = insn2;
7370 dsc->numinsns = 2;
7371
7372 return 0;
7373}
7374
7375/* Copy unpredictable instructions. */
7376
7377static int
7378arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7379 arm_displaced_step_copy_insn_closure *dsc)
7380{
7381 displaced_debug_printf ("copying unpredictable insn %.8lx",
7382 (unsigned long) insn);
7383
7384 dsc->modinsn[0] = insn;
7385
7386 return 0;
7387}
7388
7389/* The decode_* functions are instruction decoding helpers. They mostly follow
7390 the presentation in the ARM ARM. */
7391
7392static int
7393arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7394 struct regcache *regs,
7395 arm_displaced_step_copy_insn_closure *dsc)
7396{
7397 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7398 unsigned int rn = bits (insn, 16, 19);
7399
7400 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
7401 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7402 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
7403 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7404 else if ((op1 & 0x60) == 0x20)
7405 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7406 else if ((op1 & 0x71) == 0x40)
7407 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7408 dsc);
7409 else if ((op1 & 0x77) == 0x41)
7410 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7411 else if ((op1 & 0x77) == 0x45)
7412 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7413 else if ((op1 & 0x77) == 0x51)
7414 {
7415 if (rn != 0xf)
7416 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7417 else
7418 return arm_copy_unpred (gdbarch, insn, dsc);
7419 }
7420 else if ((op1 & 0x77) == 0x55)
7421 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7422 else if (op1 == 0x57)
7423 switch (op2)
7424 {
7425 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7426 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7427 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7428 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7429 default: return arm_copy_unpred (gdbarch, insn, dsc);
7430 }
7431 else if ((op1 & 0x63) == 0x43)
7432 return arm_copy_unpred (gdbarch, insn, dsc);
7433 else if ((op2 & 0x1) == 0x0)
7434 switch (op1 & ~0x80)
7435 {
7436 case 0x61:
7437 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7438 case 0x65:
7439 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7440 case 0x71: case 0x75:
7441 /* pld/pldw reg. */
7442 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7443 case 0x63: case 0x67: case 0x73: case 0x77:
7444 return arm_copy_unpred (gdbarch, insn, dsc);
7445 default:
7446 return arm_copy_undef (gdbarch, insn, dsc);
7447 }
7448 else
7449 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7450}
7451
7452static int
7453arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7454 struct regcache *regs,
7455 arm_displaced_step_copy_insn_closure *dsc)
7456{
7457 if (bit (insn, 27) == 0)
7458 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7459 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7460 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7461 {
7462 case 0x0: case 0x2:
7463 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7464
7465 case 0x1: case 0x3:
7466 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7467
7468 case 0x4: case 0x5: case 0x6: case 0x7:
7469 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7470
7471 case 0x8:
7472 switch ((insn & 0xe00000) >> 21)
7473 {
7474 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7475 /* stc/stc2. */
7476 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7477
7478 case 0x2:
7479 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7480
7481 default:
7482 return arm_copy_undef (gdbarch, insn, dsc);
7483 }
7484
7485 case 0x9:
7486 {
7487 int rn_f = (bits (insn, 16, 19) == 0xf);
7488 switch ((insn & 0xe00000) >> 21)
7489 {
7490 case 0x1: case 0x3:
7491 /* ldc/ldc2 imm (undefined for rn == pc). */
7492 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7493 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7494
7495 case 0x2:
7496 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7497
7498 case 0x4: case 0x5: case 0x6: case 0x7:
7499 /* ldc/ldc2 lit (undefined for rn != pc). */
7500 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7501 : arm_copy_undef (gdbarch, insn, dsc);
7502
7503 default:
7504 return arm_copy_undef (gdbarch, insn, dsc);
7505 }
7506 }
7507
7508 case 0xa:
7509 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7510
7511 case 0xb:
7512 if (bits (insn, 16, 19) == 0xf)
7513 /* ldc/ldc2 lit. */
7514 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7515 else
7516 return arm_copy_undef (gdbarch, insn, dsc);
7517
7518 case 0xc:
7519 if (bit (insn, 4))
7520 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7521 else
7522 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7523
7524 case 0xd:
7525 if (bit (insn, 4))
7526 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7527 else
7528 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7529
7530 default:
7531 return arm_copy_undef (gdbarch, insn, dsc);
7532 }
7533}
7534
7535/* Decode miscellaneous instructions in dp/misc encoding space. */
7536
7537static int
7538arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7539 struct regcache *regs,
7540 arm_displaced_step_copy_insn_closure *dsc)
7541{
7542 unsigned int op2 = bits (insn, 4, 6);
7543 unsigned int op = bits (insn, 21, 22);
7544
7545 switch (op2)
7546 {
7547 case 0x0:
7548 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7549
7550 case 0x1:
7551 if (op == 0x1) /* bx. */
7552 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7553 else if (op == 0x3)
7554 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7555 else
7556 return arm_copy_undef (gdbarch, insn, dsc);
7557
7558 case 0x2:
7559 if (op == 0x1)
7560 /* Not really supported. */
7561 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7562 else
7563 return arm_copy_undef (gdbarch, insn, dsc);
7564
7565 case 0x3:
7566 if (op == 0x1)
7567 return arm_copy_bx_blx_reg (gdbarch, insn,
7568 regs, dsc); /* blx register. */
7569 else
7570 return arm_copy_undef (gdbarch, insn, dsc);
7571
7572 case 0x5:
7573 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7574
7575 case 0x7:
7576 if (op == 0x1)
7577 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7578 else if (op == 0x3)
7579 /* Not really supported. */
7580 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7581 /* Fall through. */
7582
7583 default:
7584 return arm_copy_undef (gdbarch, insn, dsc);
7585 }
7586}
7587
7588static int
7589arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7590 struct regcache *regs,
7591 arm_displaced_step_copy_insn_closure *dsc)
7592{
7593 if (bit (insn, 25))
7594 switch (bits (insn, 20, 24))
7595 {
7596 case 0x10:
7597 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7598
7599 case 0x14:
7600 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7601
7602 case 0x12: case 0x16:
7603 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7604
7605 default:
7606 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7607 }
7608 else
7609 {
7610 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7611
7612 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7613 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7614 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7615 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7616 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7617 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7618 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7619 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7620 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7621 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7622 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7623 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7624 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7625 /* 2nd arg means "unprivileged". */
7626 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7627 dsc);
7628 }
7629
7630 /* Should be unreachable. */
7631 return 1;
7632}
7633
7634static int
7635arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7636 struct regcache *regs,
7637 arm_displaced_step_copy_insn_closure *dsc)
7638{
7639 int a = bit (insn, 25), b = bit (insn, 4);
7640 uint32_t op1 = bits (insn, 20, 24);
7641
7642 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7643 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7644 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7645 else if ((!a && (op1 & 0x17) == 0x02)
7646 || (a && (op1 & 0x17) == 0x02 && !b))
7647 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7648 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7649 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7650 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7651 else if ((!a && (op1 & 0x17) == 0x03)
7652 || (a && (op1 & 0x17) == 0x03 && !b))
7653 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7654 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7655 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7656 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7657 else if ((!a && (op1 & 0x17) == 0x06)
7658 || (a && (op1 & 0x17) == 0x06 && !b))
7659 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7660 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7661 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7662 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7663 else if ((!a && (op1 & 0x17) == 0x07)
7664 || (a && (op1 & 0x17) == 0x07 && !b))
7665 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7666
7667 /* Should be unreachable. */
7668 return 1;
7669}
7670
7671static int
7672arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7673 arm_displaced_step_copy_insn_closure *dsc)
7674{
7675 switch (bits (insn, 20, 24))
7676 {
7677 case 0x00: case 0x01: case 0x02: case 0x03:
7678 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7679
7680 case 0x04: case 0x05: case 0x06: case 0x07:
7681 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7682
7683 case 0x08: case 0x09: case 0x0a: case 0x0b:
7684 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7685 return arm_copy_unmodified (gdbarch, insn,
7686 "decode/pack/unpack/saturate/reverse", dsc);
7687
7688 case 0x18:
7689 if (bits (insn, 5, 7) == 0) /* op2. */
7690 {
7691 if (bits (insn, 12, 15) == 0xf)
7692 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7693 else
7694 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7695 }
7696 else
7697 return arm_copy_undef (gdbarch, insn, dsc);
7698
7699 case 0x1a: case 0x1b:
7700 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7701 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7702 else
7703 return arm_copy_undef (gdbarch, insn, dsc);
7704
7705 case 0x1c: case 0x1d:
7706 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7707 {
7708 if (bits (insn, 0, 3) == 0xf)
7709 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7710 else
7711 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7712 }
7713 else
7714 return arm_copy_undef (gdbarch, insn, dsc);
7715
7716 case 0x1e: case 0x1f:
7717 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7718 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7719 else
7720 return arm_copy_undef (gdbarch, insn, dsc);
7721 }
7722
7723 /* Should be unreachable. */
7724 return 1;
7725}
7726
7727static int
7728arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7729 struct regcache *regs,
7730 arm_displaced_step_copy_insn_closure *dsc)
7731{
7732 if (bit (insn, 25))
7733 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7734 else
7735 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7736}
7737
7738static int
7739arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7740 struct regcache *regs,
7741 arm_displaced_step_copy_insn_closure *dsc)
7742{
7743 unsigned int opcode = bits (insn, 20, 24);
7744
7745 switch (opcode)
7746 {
7747 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7748 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7749
7750 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7751 case 0x12: case 0x16:
7752 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7753
7754 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7755 case 0x13: case 0x17:
7756 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7757
7758 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7759 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7760 /* Note: no writeback for these instructions. Bit 25 will always be
7761 zero though (via caller), so the following works OK. */
7762 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7763 }
7764
7765 /* Should be unreachable. */
7766 return 1;
7767}
7768
7769/* Decode shifted register instructions. */
7770
7771static int
7772thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7773 uint16_t insn2, struct regcache *regs,
7774 arm_displaced_step_copy_insn_closure *dsc)
7775{
7776 /* PC is only allowed to be used in instruction MOV. */
7777
7778 unsigned int op = bits (insn1, 5, 8);
7779 unsigned int rn = bits (insn1, 0, 3);
7780
7781 if (op == 0x2 && rn == 0xf) /* MOV */
7782 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7783 else
7784 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7785 "dp (shift reg)", dsc);
7786}
7787
7788
7789/* Decode extension register load/store. Exactly the same as
7790 arm_decode_ext_reg_ld_st. */
7791
7792static int
7793thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7794 uint16_t insn2, struct regcache *regs,
7795 arm_displaced_step_copy_insn_closure *dsc)
7796{
7797 unsigned int opcode = bits (insn1, 4, 8);
7798
7799 switch (opcode)
7800 {
7801 case 0x04: case 0x05:
7802 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7803 "vfp/neon vmov", dsc);
7804
7805 case 0x08: case 0x0c: /* 01x00 */
7806 case 0x0a: case 0x0e: /* 01x10 */
7807 case 0x12: case 0x16: /* 10x10 */
7808 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7809 "vfp/neon vstm/vpush", dsc);
7810
7811 case 0x09: case 0x0d: /* 01x01 */
7812 case 0x0b: case 0x0f: /* 01x11 */
7813 case 0x13: case 0x17: /* 10x11 */
7814 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7815 "vfp/neon vldm/vpop", dsc);
7816
7817 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7818 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7819 "vstr", dsc);
7820 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7821 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7822 }
7823
7824 /* Should be unreachable. */
7825 return 1;
7826}
7827
7828static int
7829arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7830 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7831{
7832 unsigned int op1 = bits (insn, 20, 25);
7833 int op = bit (insn, 4);
7834 unsigned int coproc = bits (insn, 8, 11);
7835
7836 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7837 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7838 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7839 && (coproc & 0xe) != 0xa)
7840 /* stc/stc2. */
7841 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7842 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7843 && (coproc & 0xe) != 0xa)
7844 /* ldc/ldc2 imm/lit. */
7845 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7846 else if ((op1 & 0x3e) == 0x00)
7847 return arm_copy_undef (gdbarch, insn, dsc);
7848 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7849 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7850 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7851 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7852 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7853 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7854 else if ((op1 & 0x30) == 0x20 && !op)
7855 {
7856 if ((coproc & 0xe) == 0xa)
7857 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7858 else
7859 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7860 }
7861 else if ((op1 & 0x30) == 0x20 && op)
7862 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7863 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7864 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7865 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7866 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7867 else if ((op1 & 0x30) == 0x30)
7868 return arm_copy_svc (gdbarch, insn, regs, dsc);
7869 else
7870 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7871}
7872
7873static int
7874thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7875 uint16_t insn2, struct regcache *regs,
7876 arm_displaced_step_copy_insn_closure *dsc)
7877{
7878 unsigned int coproc = bits (insn2, 8, 11);
7879 unsigned int bit_5_8 = bits (insn1, 5, 8);
7880 unsigned int bit_9 = bit (insn1, 9);
7881 unsigned int bit_4 = bit (insn1, 4);
7882
7883 if (bit_9 == 0)
7884 {
7885 if (bit_5_8 == 2)
7886 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7887 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7888 dsc);
7889 else if (bit_5_8 == 0) /* UNDEFINED. */
7890 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7891 else
7892 {
7893 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7894 if ((coproc & 0xe) == 0xa)
7895 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7896 dsc);
7897 else /* coproc is not 101x. */
7898 {
7899 if (bit_4 == 0) /* STC/STC2. */
7900 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7901 "stc/stc2", dsc);
7902 else /* LDC/LDC2 {literal, immediate}. */
7903 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7904 regs, dsc);
7905 }
7906 }
7907 }
7908 else
7909 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7910
7911 return 0;
7912}
7913
7914static void
7915install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7916 arm_displaced_step_copy_insn_closure *dsc, int rd)
7917{
7918 /* ADR Rd, #imm
7919
7920 Rewrite as:
7921
7922 Preparation: Rd <- PC
7923 Insn: ADD Rd, #imm
7924 Cleanup: Null.
7925 */
7926
7927 /* Rd <- PC */
7928 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7929 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7930}
7931
7932static int
7933thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7934 arm_displaced_step_copy_insn_closure *dsc,
7935 int rd, unsigned int imm)
7936{
7937
7938 /* Encoding T2: ADDS Rd, #imm */
7939 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7940
7941 install_pc_relative (gdbarch, regs, dsc, rd);
7942
7943 return 0;
7944}
7945
7946static int
7947thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7948 struct regcache *regs,
7949 arm_displaced_step_copy_insn_closure *dsc)
7950{
7951 unsigned int rd = bits (insn, 8, 10);
7952 unsigned int imm8 = bits (insn, 0, 7);
7953
7954 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x",
7955 rd, imm8, insn);
7956
7957 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7958}
7959
7960static int
7961thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7962 uint16_t insn2, struct regcache *regs,
7963 arm_displaced_step_copy_insn_closure *dsc)
7964{
7965 unsigned int rd = bits (insn2, 8, 11);
7966 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7967 extract raw immediate encoding rather than computing immediate. When
7968 generating ADD or SUB instruction, we can simply perform OR operation to
7969 set immediate into ADD. */
7970 unsigned int imm_3_8 = insn2 & 0x70ff;
7971 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7972
7973 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x",
7974 rd, imm_i, imm_3_8, insn1, insn2);
7975
7976 if (bit (insn1, 7)) /* Encoding T2 */
7977 {
7978 /* Encoding T3: SUB Rd, Rd, #imm */
7979 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7980 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7981 }
7982 else /* Encoding T3 */
7983 {
7984 /* Encoding T3: ADD Rd, Rd, #imm */
7985 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7986 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7987 }
7988 dsc->numinsns = 2;
7989
7990 install_pc_relative (gdbarch, regs, dsc, rd);
7991
7992 return 0;
7993}
7994
7995static int
7996thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7997 struct regcache *regs,
7998 arm_displaced_step_copy_insn_closure *dsc)
7999{
8000 unsigned int rt = bits (insn1, 8, 10);
8001 unsigned int pc;
8002 int imm8 = (bits (insn1, 0, 7) << 2);
8003
8004 /* LDR Rd, #imm8
8005
8006 Rwrite as:
8007
8008 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8009
8010 Insn: LDR R0, [R2, R3];
8011 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8012
8013 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt, imm8);
8014
8015 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8016 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8017 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8018 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8019 /* The assembler calculates the required value of the offset from the
8020 Align(PC,4) value of this instruction to the label. */
8021 pc = pc & 0xfffffffc;
8022
8023 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8024 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8025
8026 dsc->rd = rt;
8027 dsc->u.ldst.xfersize = 4;
8028 dsc->u.ldst.rn = 0;
8029 dsc->u.ldst.immed = 0;
8030 dsc->u.ldst.writeback = 0;
8031 dsc->u.ldst.restore_r4 = 0;
8032
8033 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8034
8035 dsc->cleanup = &cleanup_load;
8036
8037 return 0;
8038}
8039
8040/* Copy Thumb cbnz/cbz instruction. */
8041
8042static int
8043thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8044 struct regcache *regs,
8045 arm_displaced_step_copy_insn_closure *dsc)
8046{
8047 int non_zero = bit (insn1, 11);
8048 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8049 CORE_ADDR from = dsc->insn_addr;
8050 int rn = bits (insn1, 0, 2);
8051 int rn_val = displaced_read_reg (regs, dsc, rn);
8052
8053 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8054 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8055 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8056 condition is false, let it be, cleanup_branch will do nothing. */
8057 if (dsc->u.branch.cond)
8058 {
8059 dsc->u.branch.cond = INST_AL;
8060 dsc->u.branch.dest = from + 4 + imm5;
8061 }
8062 else
8063 dsc->u.branch.dest = from + 2;
8064
8065 dsc->u.branch.link = 0;
8066 dsc->u.branch.exchange = 0;
8067
8068 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx",
8069 non_zero ? "cbnz" : "cbz",
8070 rn, rn_val, insn1, dsc->u.branch.dest);
8071
8072 dsc->modinsn[0] = THUMB_NOP;
8073
8074 dsc->cleanup = &cleanup_branch;
8075 return 0;
8076}
8077
8078/* Copy Table Branch Byte/Halfword */
8079static int
8080thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8081 uint16_t insn2, struct regcache *regs,
8082 arm_displaced_step_copy_insn_closure *dsc)
8083{
8084 ULONGEST rn_val, rm_val;
8085 int is_tbh = bit (insn2, 4);
8086 CORE_ADDR halfwords = 0;
8087 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8088
8089 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8090 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8091
8092 if (is_tbh)
8093 {
8094 gdb_byte buf[2];
8095
8096 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8097 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8098 }
8099 else
8100 {
8101 gdb_byte buf[1];
8102
8103 target_read_memory (rn_val + rm_val, buf, 1);
8104 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8105 }
8106
8107 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x",
8108 is_tbh ? "tbh" : "tbb",
8109 (unsigned int) rn_val, (unsigned int) rm_val,
8110 (unsigned int) halfwords);
8111
8112 dsc->u.branch.cond = INST_AL;
8113 dsc->u.branch.link = 0;
8114 dsc->u.branch.exchange = 0;
8115 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8116
8117 dsc->cleanup = &cleanup_branch;
8118
8119 return 0;
8120}
8121
8122static void
8123cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8124 arm_displaced_step_copy_insn_closure *dsc)
8125{
8126 /* PC <- r7 */
8127 int val = displaced_read_reg (regs, dsc, 7);
8128 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8129
8130 /* r7 <- r8 */
8131 val = displaced_read_reg (regs, dsc, 8);
8132 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8133
8134 /* r8 <- tmp[0] */
8135 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8136
8137}
8138
8139static int
8140thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
8141 struct regcache *regs,
8142 arm_displaced_step_copy_insn_closure *dsc)
8143{
8144 dsc->u.block.regmask = insn1 & 0x00ff;
8145
8146 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8147 to :
8148
8149 (1) register list is full, that is, r0-r7 are used.
8150 Prepare: tmp[0] <- r8
8151
8152 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8153 MOV r8, r7; Move value of r7 to r8;
8154 POP {r7}; Store PC value into r7.
8155
8156 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8157
8158 (2) register list is not full, supposing there are N registers in
8159 register list (except PC, 0 <= N <= 7).
8160 Prepare: for each i, 0 - N, tmp[i] <- ri.
8161
8162 POP {r0, r1, ...., rN};
8163
8164 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8165 from tmp[] properly.
8166 */
8167 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x",
8168 dsc->u.block.regmask, insn1);
8169
8170 if (dsc->u.block.regmask == 0xff)
8171 {
8172 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8173
8174 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8175 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8176 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8177
8178 dsc->numinsns = 3;
8179 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8180 }
8181 else
8182 {
8183 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
8184 unsigned int i;
8185 unsigned int new_regmask;
8186
8187 for (i = 0; i < num_in_list + 1; i++)
8188 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8189
8190 new_regmask = (1 << (num_in_list + 1)) - 1;
8191
8192 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, "
8193 "modified list %.4x",
8194 (int) dsc->u.block.regmask, new_regmask);
8195
8196 dsc->u.block.regmask |= 0x8000;
8197 dsc->u.block.writeback = 0;
8198 dsc->u.block.cond = INST_AL;
8199
8200 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8201
8202 dsc->cleanup = &cleanup_block_load_pc;
8203 }
8204
8205 return 0;
8206}
8207
8208static void
8209thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8210 struct regcache *regs,
8211 arm_displaced_step_copy_insn_closure *dsc)
8212{
8213 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8214 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8215 int err = 0;
8216
8217 /* 16-bit thumb instructions. */
8218 switch (op_bit_12_15)
8219 {
8220 /* Shift (imme), add, subtract, move and compare. */
8221 case 0: case 1: case 2: case 3:
8222 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8223 "shift/add/sub/mov/cmp",
8224 dsc);
8225 break;
8226 case 4:
8227 switch (op_bit_10_11)
8228 {
8229 case 0: /* Data-processing */
8230 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8231 "data-processing",
8232 dsc);
8233 break;
8234 case 1: /* Special data instructions and branch and exchange. */
8235 {
8236 unsigned short op = bits (insn1, 7, 9);
8237 if (op == 6 || op == 7) /* BX or BLX */
8238 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8239 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8240 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8241 else
8242 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8243 dsc);
8244 }
8245 break;
8246 default: /* LDR (literal) */
8247 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8248 }
8249 break;
8250 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8251 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8252 break;
8253 case 10:
8254 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8255 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8256 else /* Generate SP-relative address */
8257 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8258 break;
8259 case 11: /* Misc 16-bit instructions */
8260 {
8261 switch (bits (insn1, 8, 11))
8262 {
8263 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8264 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8265 break;
8266 case 12: case 13: /* POP */
8267 if (bit (insn1, 8)) /* PC is in register list. */
8268 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8269 else
8270 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8271 break;
8272 case 15: /* If-Then, and hints */
8273 if (bits (insn1, 0, 3))
8274 /* If-Then makes up to four following instructions conditional.
8275 IT instruction itself is not conditional, so handle it as a
8276 common unmodified instruction. */
8277 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8278 dsc);
8279 else
8280 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8281 break;
8282 default:
8283 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8284 }
8285 }
8286 break;
8287 case 12:
8288 if (op_bit_10_11 < 2) /* Store multiple registers */
8289 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8290 else /* Load multiple registers */
8291 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8292 break;
8293 case 13: /* Conditional branch and supervisor call */
8294 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8295 err = thumb_copy_b (gdbarch, insn1, dsc);
8296 else
8297 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8298 break;
8299 case 14: /* Unconditional branch */
8300 err = thumb_copy_b (gdbarch, insn1, dsc);
8301 break;
8302 default:
8303 err = 1;
8304 }
8305
8306 if (err)
8307 internal_error (_("thumb_process_displaced_16bit_insn: Instruction decode error"));
8308}
8309
8310static int
8311decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8312 uint16_t insn1, uint16_t insn2,
8313 struct regcache *regs,
8314 arm_displaced_step_copy_insn_closure *dsc)
8315{
8316 int rt = bits (insn2, 12, 15);
8317 int rn = bits (insn1, 0, 3);
8318 int op1 = bits (insn1, 7, 8);
8319
8320 switch (bits (insn1, 5, 6))
8321 {
8322 case 0: /* Load byte and memory hints */
8323 if (rt == 0xf) /* PLD/PLI */
8324 {
8325 if (rn == 0xf)
8326 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8327 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8328 else
8329 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8330 "pli/pld", dsc);
8331 }
8332 else
8333 {
8334 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8335 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8336 1);
8337 else
8338 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8339 "ldrb{reg, immediate}/ldrbt",
8340 dsc);
8341 }
8342
8343 break;
8344 case 1: /* Load halfword and memory hints. */
8345 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8346 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8347 "pld/unalloc memhint", dsc);
8348 else
8349 {
8350 if (rn == 0xf)
8351 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8352 2);
8353 else
8354 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8355 "ldrh/ldrht", dsc);
8356 }
8357 break;
8358 case 2: /* Load word */
8359 {
8360 int insn2_bit_8_11 = bits (insn2, 8, 11);
8361
8362 if (rn == 0xf)
8363 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8364 else if (op1 == 0x1) /* Encoding T3 */
8365 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8366 0, 1);
8367 else /* op1 == 0x0 */
8368 {
8369 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8370 /* LDR (immediate) */
8371 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8372 dsc, bit (insn2, 8), 1);
8373 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8374 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8375 "ldrt", dsc);
8376 else
8377 /* LDR (register) */
8378 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8379 dsc, 0, 0);
8380 }
8381 break;
8382 }
8383 default:
8384 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8385 break;
8386 }
8387 return 0;
8388}
8389
8390static void
8391thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8392 uint16_t insn2, struct regcache *regs,
8393 arm_displaced_step_copy_insn_closure *dsc)
8394{
8395 int err = 0;
8396 unsigned short op = bit (insn2, 15);
8397 unsigned int op1 = bits (insn1, 11, 12);
8398
8399 switch (op1)
8400 {
8401 case 1:
8402 {
8403 switch (bits (insn1, 9, 10))
8404 {
8405 case 0:
8406 if (bit (insn1, 6))
8407 {
8408 /* Load/store {dual, exclusive}, table branch. */
8409 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8410 && bits (insn2, 5, 7) == 0)
8411 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8412 dsc);
8413 else
8414 /* PC is not allowed to use in load/store {dual, exclusive}
8415 instructions. */
8416 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8417 "load/store dual/ex", dsc);
8418 }
8419 else /* load/store multiple */
8420 {
8421 switch (bits (insn1, 7, 8))
8422 {
8423 case 0: case 3: /* SRS, RFE */
8424 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8425 "srs/rfe", dsc);
8426 break;
8427 case 1: case 2: /* LDM/STM/PUSH/POP */
8428 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8429 break;
8430 }
8431 }
8432 break;
8433
8434 case 1:
8435 /* Data-processing (shift register). */
8436 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8437 dsc);
8438 break;
8439 default: /* Coprocessor instructions. */
8440 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8441 break;
8442 }
8443 break;
8444 }
8445 case 2: /* op1 = 2 */
8446 if (op) /* Branch and misc control. */
8447 {
8448 if (bit (insn2, 14) /* BLX/BL */
8449 || bit (insn2, 12) /* Unconditional branch */
8450 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8451 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8452 else
8453 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8454 "misc ctrl", dsc);
8455 }
8456 else
8457 {
8458 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8459 {
8460 int dp_op = bits (insn1, 4, 8);
8461 int rn = bits (insn1, 0, 3);
8462 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
8463 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8464 regs, dsc);
8465 else
8466 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8467 "dp/pb", dsc);
8468 }
8469 else /* Data processing (modified immediate) */
8470 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8471 "dp/mi", dsc);
8472 }
8473 break;
8474 case 3: /* op1 = 3 */
8475 switch (bits (insn1, 9, 10))
8476 {
8477 case 0:
8478 if (bit (insn1, 4))
8479 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8480 regs, dsc);
8481 else /* NEON Load/Store and Store single data item */
8482 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8483 "neon elt/struct load/store",
8484 dsc);
8485 break;
8486 case 1: /* op1 = 3, bits (9, 10) == 1 */
8487 switch (bits (insn1, 7, 8))
8488 {
8489 case 0: case 1: /* Data processing (register) */
8490 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8491 "dp(reg)", dsc);
8492 break;
8493 case 2: /* Multiply and absolute difference */
8494 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8495 "mul/mua/diff", dsc);
8496 break;
8497 case 3: /* Long multiply and divide */
8498 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8499 "lmul/lmua", dsc);
8500 break;
8501 }
8502 break;
8503 default: /* Coprocessor instructions */
8504 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8505 break;
8506 }
8507 break;
8508 default:
8509 err = 1;
8510 }
8511
8512 if (err)
8513 internal_error (_("thumb_process_displaced_32bit_insn: Instruction decode error"));
8514
8515}
8516
8517static void
8518thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8519 struct regcache *regs,
8520 arm_displaced_step_copy_insn_closure *dsc)
8521{
8522 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8523 uint16_t insn1
8524 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8525
8526 displaced_debug_printf ("process thumb insn %.4x at %.8lx",
8527 insn1, (unsigned long) from);
8528
8529 dsc->is_thumb = 1;
8530 dsc->insn_size = thumb_insn_size (insn1);
8531 if (thumb_insn_size (insn1) == 4)
8532 {
8533 uint16_t insn2
8534 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8535 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8536 }
8537 else
8538 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8539}
8540
8541void
8542arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8543 CORE_ADDR to, struct regcache *regs,
8544 arm_displaced_step_copy_insn_closure *dsc)
8545{
8546 int err = 0;
8547 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8548 uint32_t insn;
8549
8550 /* Most displaced instructions use a 1-instruction scratch space, so set this
8551 here and override below if/when necessary. */
8552 dsc->numinsns = 1;
8553 dsc->insn_addr = from;
8554 dsc->scratch_base = to;
8555 dsc->cleanup = NULL;
8556 dsc->wrote_to_pc = 0;
8557
8558 if (!displaced_in_arm_mode (regs))
8559 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
8560
8561 dsc->is_thumb = 0;
8562 dsc->insn_size = 4;
8563 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8564 displaced_debug_printf ("stepping insn %.8lx at %.8lx",
8565 (unsigned long) insn, (unsigned long) from);
8566
8567 if ((insn & 0xf0000000) == 0xf0000000)
8568 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8569 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8570 {
8571 case 0x0: case 0x1: case 0x2: case 0x3:
8572 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8573 break;
8574
8575 case 0x4: case 0x5: case 0x6:
8576 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8577 break;
8578
8579 case 0x7:
8580 err = arm_decode_media (gdbarch, insn, dsc);
8581 break;
8582
8583 case 0x8: case 0x9: case 0xa: case 0xb:
8584 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8585 break;
8586
8587 case 0xc: case 0xd: case 0xe: case 0xf:
8588 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
8589 break;
8590 }
8591
8592 if (err)
8593 internal_error (_("arm_process_displaced_insn: Instruction decode error"));
8594}
8595
8596/* Actually set up the scratch space for a displaced instruction. */
8597
8598void
8599arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8600 CORE_ADDR to,
8601 arm_displaced_step_copy_insn_closure *dsc)
8602{
8603 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8604 unsigned int i, len, offset;
8605 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8606 int size = dsc->is_thumb? 2 : 4;
8607 const gdb_byte *bkp_insn;
8608
8609 offset = 0;
8610 /* Poke modified instruction(s). */
8611 for (i = 0; i < dsc->numinsns; i++)
8612 {
8613 if (size == 4)
8614 displaced_debug_printf ("writing insn %.8lx at %.8lx",
8615 dsc->modinsn[i], (unsigned long) to + offset);
8616 else if (size == 2)
8617 displaced_debug_printf ("writing insn %.4x at %.8lx",
8618 (unsigned short) dsc->modinsn[i],
8619 (unsigned long) to + offset);
8620
8621 write_memory_unsigned_integer (to + offset, size,
8622 byte_order_for_code,
8623 dsc->modinsn[i]);
8624 offset += size;
8625 }
8626
8627 /* Choose the correct breakpoint instruction. */
8628 if (dsc->is_thumb)
8629 {
8630 bkp_insn = tdep->thumb_breakpoint;
8631 len = tdep->thumb_breakpoint_size;
8632 }
8633 else
8634 {
8635 bkp_insn = tdep->arm_breakpoint;
8636 len = tdep->arm_breakpoint_size;
8637 }
8638
8639 /* Put breakpoint afterwards. */
8640 write_memory (to + offset, bkp_insn, len);
8641
8642 displaced_debug_printf ("copy %s->%s", paddress (gdbarch, from),
8643 paddress (gdbarch, to));
8644}
8645
8646/* Entry point for cleaning things up after a displaced instruction has been
8647 single-stepped. */
8648
8649void
8650arm_displaced_step_fixup (struct gdbarch *gdbarch,
8651 struct displaced_step_copy_insn_closure *dsc_,
8652 CORE_ADDR from, CORE_ADDR to,
8653 struct regcache *regs)
8654{
8655 arm_displaced_step_copy_insn_closure *dsc
8656 = (arm_displaced_step_copy_insn_closure *) dsc_;
8657
8658 if (dsc->cleanup)
8659 dsc->cleanup (gdbarch, regs, dsc);
8660
8661 if (!dsc->wrote_to_pc)
8662 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8663 dsc->insn_addr + dsc->insn_size);
8664
8665}
8666
8667#include "bfd-in2.h"
8668#include "libcoff.h"
8669
8670static int
8671gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8672{
8673 gdb_disassemble_info *di
8674 = static_cast<gdb_disassemble_info *> (info->application_data);
8675 struct gdbarch *gdbarch = di->arch ();
8676
8677 if (arm_pc_is_thumb (gdbarch, memaddr))
8678 {
8679 static asymbol *asym;
8680 static combined_entry_type ce;
8681 static struct coff_symbol_struct csym;
8682 static struct bfd fake_bfd;
8683 static bfd_target fake_target;
8684
8685 if (csym.native == NULL)
8686 {
8687 /* Create a fake symbol vector containing a Thumb symbol.
8688 This is solely so that the code in print_insn_little_arm()
8689 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8690 the presence of a Thumb symbol and switch to decoding
8691 Thumb instructions. */
8692
8693 fake_target.flavour = bfd_target_coff_flavour;
8694 fake_bfd.xvec = &fake_target;
8695 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8696 csym.native = &ce;
8697 csym.symbol.the_bfd = &fake_bfd;
8698 csym.symbol.name = "fake";
8699 asym = (asymbol *) & csym;
8700 }
8701
8702 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8703 info->symbols = &asym;
8704 }
8705 else
8706 info->symbols = NULL;
8707
8708 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
8709 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
8710 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
8711 the assert on the mismatch of info->mach and
8712 bfd_get_mach (current_program_space->exec_bfd ()) in
8713 default_print_insn. */
8714 if (current_program_space->exec_bfd () != NULL
8715 && (current_program_space->exec_bfd ()->arch_info
8716 == gdbarch_bfd_arch_info (gdbarch)))
8717 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
8718
8719 return default_print_insn (memaddr, info);
8720}
8721
8722/* The following define instruction sequences that will cause ARM
8723 cpu's to take an undefined instruction trap. These are used to
8724 signal a breakpoint to GDB.
8725
8726 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8727 modes. A different instruction is required for each mode. The ARM
8728 cpu's can also be big or little endian. Thus four different
8729 instructions are needed to support all cases.
8730
8731 Note: ARMv4 defines several new instructions that will take the
8732 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8733 not in fact add the new instructions. The new undefined
8734 instructions in ARMv4 are all instructions that had no defined
8735 behaviour in earlier chips. There is no guarantee that they will
8736 raise an exception, but may be treated as NOP's. In practice, it
8737 may only safe to rely on instructions matching:
8738
8739 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8740 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8741 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8742
8743 Even this may only true if the condition predicate is true. The
8744 following use a condition predicate of ALWAYS so it is always TRUE.
8745
8746 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8747 and NetBSD all use a software interrupt rather than an undefined
8748 instruction to force a trap. This can be handled by by the
8749 abi-specific code during establishment of the gdbarch vector. */
8750
8751#define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8752#define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8753#define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8754#define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8755
8756static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8757static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8758static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8759static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8760
8761/* Implement the breakpoint_kind_from_pc gdbarch method. */
8762
8763static int
8764arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
8765{
8766 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8767 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8768
8769 if (arm_pc_is_thumb (gdbarch, *pcptr))
8770 {
8771 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8772
8773 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8774 check whether we are replacing a 32-bit instruction. */
8775 if (tdep->thumb2_breakpoint != NULL)
8776 {
8777 gdb_byte buf[2];
8778
8779 if (target_read_memory (*pcptr, buf, 2) == 0)
8780 {
8781 unsigned short inst1;
8782
8783 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8784 if (thumb_insn_size (inst1) == 4)
8785 return ARM_BP_KIND_THUMB2;
8786 }
8787 }
8788
8789 return ARM_BP_KIND_THUMB;
8790 }
8791 else
8792 return ARM_BP_KIND_ARM;
8793
8794}
8795
8796/* Implement the sw_breakpoint_from_kind gdbarch method. */
8797
8798static const gdb_byte *
8799arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
8800{
8801 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8802
8803 switch (kind)
8804 {
8805 case ARM_BP_KIND_ARM:
8806 *size = tdep->arm_breakpoint_size;
8807 return tdep->arm_breakpoint;
8808 case ARM_BP_KIND_THUMB:
8809 *size = tdep->thumb_breakpoint_size;
8810 return tdep->thumb_breakpoint;
8811 case ARM_BP_KIND_THUMB2:
8812 *size = tdep->thumb2_breakpoint_size;
8813 return tdep->thumb2_breakpoint;
8814 default:
8815 gdb_assert_not_reached ("unexpected arm breakpoint kind");
8816 }
8817}
8818
8819/* Implement the breakpoint_kind_from_current_state gdbarch method. */
8820
8821static int
8822arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
8823 struct regcache *regcache,
8824 CORE_ADDR *pcptr)
8825{
8826 gdb_byte buf[4];
8827
8828 /* Check the memory pointed by PC is readable. */
8829 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
8830 {
8831 struct arm_get_next_pcs next_pcs_ctx;
8832
8833 arm_get_next_pcs_ctor (&next_pcs_ctx,
8834 &arm_get_next_pcs_ops,
8835 gdbarch_byte_order (gdbarch),
8836 gdbarch_byte_order_for_code (gdbarch),
8837 0,
8838 regcache);
8839
8840 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
8841
8842 /* If MEMADDR is the next instruction of current pc, do the
8843 software single step computation, and get the thumb mode by
8844 the destination address. */
8845 for (CORE_ADDR pc : next_pcs)
8846 {
8847 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
8848 {
8849 if (IS_THUMB_ADDR (pc))
8850 {
8851 *pcptr = MAKE_THUMB_ADDR (*pcptr);
8852 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8853 }
8854 else
8855 return ARM_BP_KIND_ARM;
8856 }
8857 }
8858 }
8859
8860 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8861}
8862
8863/* Extract from an array REGBUF containing the (raw) register state a
8864 function return value of type TYPE, and copy that, in virtual
8865 format, into VALBUF. */
8866
8867static void
8868arm_extract_return_value (struct type *type, struct regcache *regs,
8869 gdb_byte *valbuf)
8870{
8871 struct gdbarch *gdbarch = regs->arch ();
8872 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8873 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8874
8875 if (TYPE_CODE_FLT == type->code ())
8876 {
8877 switch (tdep->fp_model)
8878 {
8879 case ARM_FLOAT_FPA:
8880 {
8881 /* The value is in register F0 in internal format. We need to
8882 extract the raw value and then convert it to the desired
8883 internal type. */
8884 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
8885
8886 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
8887 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
8888 valbuf, type);
8889 }
8890 break;
8891
8892 case ARM_FLOAT_SOFT_FPA:
8893 case ARM_FLOAT_SOFT_VFP:
8894 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8895 not using the VFP ABI code. */
8896 case ARM_FLOAT_VFP:
8897 regs->cooked_read (ARM_A1_REGNUM, valbuf);
8898 if (type->length () > 4)
8899 regs->cooked_read (ARM_A1_REGNUM + 1,
8900 valbuf + ARM_INT_REGISTER_SIZE);
8901 break;
8902
8903 default:
8904 internal_error (_("arm_extract_return_value: "
8905 "Floating point model not supported"));
8906 break;
8907 }
8908 }
8909 else if (type->code () == TYPE_CODE_INT
8910 || type->code () == TYPE_CODE_CHAR
8911 || type->code () == TYPE_CODE_BOOL
8912 || type->code () == TYPE_CODE_PTR
8913 || TYPE_IS_REFERENCE (type)
8914 || type->code () == TYPE_CODE_ENUM
8915 || is_fixed_point_type (type))
8916 {
8917 /* If the type is a plain integer, then the access is
8918 straight-forward. Otherwise we have to play around a bit
8919 more. */
8920 int len = type->length ();
8921 int regno = ARM_A1_REGNUM;
8922 ULONGEST tmp;
8923
8924 while (len > 0)
8925 {
8926 /* By using store_unsigned_integer we avoid having to do
8927 anything special for small big-endian values. */
8928 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8929 store_unsigned_integer (valbuf,
8930 (len > ARM_INT_REGISTER_SIZE
8931 ? ARM_INT_REGISTER_SIZE : len),
8932 byte_order, tmp);
8933 len -= ARM_INT_REGISTER_SIZE;
8934 valbuf += ARM_INT_REGISTER_SIZE;
8935 }
8936 }
8937 else
8938 {
8939 /* For a structure or union the behaviour is as if the value had
8940 been stored to word-aligned memory and then loaded into
8941 registers with 32-bit load instruction(s). */
8942 int len = type->length ();
8943 int regno = ARM_A1_REGNUM;
8944 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8945
8946 while (len > 0)
8947 {
8948 regs->cooked_read (regno++, tmpbuf);
8949 memcpy (valbuf, tmpbuf,
8950 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8951 len -= ARM_INT_REGISTER_SIZE;
8952 valbuf += ARM_INT_REGISTER_SIZE;
8953 }
8954 }
8955}
8956
8957
8958/* Will a function return an aggregate type in memory or in a
8959 register? Return 0 if an aggregate type can be returned in a
8960 register, 1 if it must be returned in memory. */
8961
8962static int
8963arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8964{
8965 enum type_code code;
8966
8967 type = check_typedef (type);
8968
8969 /* Simple, non-aggregate types (ie not including vectors and
8970 complex) are always returned in a register (or registers). */
8971 code = type->code ();
8972 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8973 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8974 return 0;
8975
8976 if (TYPE_HAS_DYNAMIC_LENGTH (type))
8977 return 1;
8978
8979 if (TYPE_CODE_ARRAY == code && type->is_vector ())
8980 {
8981 /* Vector values should be returned using ARM registers if they
8982 are not over 16 bytes. */
8983 return (type->length () > 16);
8984 }
8985
8986 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8987 if (tdep->arm_abi != ARM_ABI_APCS)
8988 {
8989 /* The AAPCS says all aggregates not larger than a word are returned
8990 in a register. */
8991 if (type->length () <= ARM_INT_REGISTER_SIZE
8992 && language_pass_by_reference (type).trivially_copyable)
8993 return 0;
8994
8995 return 1;
8996 }
8997 else
8998 {
8999 int nRc;
9000
9001 /* All aggregate types that won't fit in a register must be returned
9002 in memory. */
9003 if (type->length () > ARM_INT_REGISTER_SIZE
9004 || !language_pass_by_reference (type).trivially_copyable)
9005 return 1;
9006
9007 /* In the ARM ABI, "integer" like aggregate types are returned in
9008 registers. For an aggregate type to be integer like, its size
9009 must be less than or equal to ARM_INT_REGISTER_SIZE and the
9010 offset of each addressable subfield must be zero. Note that bit
9011 fields are not addressable, and all addressable subfields of
9012 unions always start at offset zero.
9013
9014 This function is based on the behaviour of GCC 2.95.1.
9015 See: gcc/arm.c: arm_return_in_memory() for details.
9016
9017 Note: All versions of GCC before GCC 2.95.2 do not set up the
9018 parameters correctly for a function returning the following
9019 structure: struct { float f;}; This should be returned in memory,
9020 not a register. Richard Earnshaw sent me a patch, but I do not
9021 know of any way to detect if a function like the above has been
9022 compiled with the correct calling convention. */
9023
9024 /* Assume all other aggregate types can be returned in a register.
9025 Run a check for structures, unions and arrays. */
9026 nRc = 0;
9027
9028 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9029 {
9030 int i;
9031 /* Need to check if this struct/union is "integer" like. For
9032 this to be true, its size must be less than or equal to
9033 ARM_INT_REGISTER_SIZE and the offset of each addressable
9034 subfield must be zero. Note that bit fields are not
9035 addressable, and unions always start at offset zero. If any
9036 of the subfields is a floating point type, the struct/union
9037 cannot be an integer type. */
9038
9039 /* For each field in the object, check:
9040 1) Is it FP? --> yes, nRc = 1;
9041 2) Is it addressable (bitpos != 0) and
9042 not packed (bitsize == 0)?
9043 --> yes, nRc = 1
9044 */
9045
9046 for (i = 0; i < type->num_fields (); i++)
9047 {
9048 enum type_code field_type_code;
9049
9050 field_type_code
9051 = check_typedef (type->field (i).type ())->code ();
9052
9053 /* Is it a floating point type field? */
9054 if (field_type_code == TYPE_CODE_FLT)
9055 {
9056 nRc = 1;
9057 break;
9058 }
9059
9060 /* If bitpos != 0, then we have to care about it. */
9061 if (type->field (i).loc_bitpos () != 0)
9062 {
9063 /* Bitfields are not addressable. If the field bitsize is
9064 zero, then the field is not packed. Hence it cannot be
9065 a bitfield or any other packed type. */
9066 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9067 {
9068 nRc = 1;
9069 break;
9070 }
9071 }
9072 }
9073 }
9074
9075 return nRc;
9076 }
9077}
9078
9079/* Write into appropriate registers a function return value of type
9080 TYPE, given in virtual format. */
9081
9082static void
9083arm_store_return_value (struct type *type, struct regcache *regs,
9084 const gdb_byte *valbuf)
9085{
9086 struct gdbarch *gdbarch = regs->arch ();
9087 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9088
9089 if (type->code () == TYPE_CODE_FLT)
9090 {
9091 gdb_byte buf[ARM_FP_REGISTER_SIZE];
9092 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9093
9094 switch (tdep->fp_model)
9095 {
9096 case ARM_FLOAT_FPA:
9097
9098 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
9099 regs->cooked_write (ARM_F0_REGNUM, buf);
9100 break;
9101
9102 case ARM_FLOAT_SOFT_FPA:
9103 case ARM_FLOAT_SOFT_VFP:
9104 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9105 not using the VFP ABI code. */
9106 case ARM_FLOAT_VFP:
9107 regs->cooked_write (ARM_A1_REGNUM, valbuf);
9108 if (type->length () > 4)
9109 regs->cooked_write (ARM_A1_REGNUM + 1,
9110 valbuf + ARM_INT_REGISTER_SIZE);
9111 break;
9112
9113 default:
9114 internal_error (_("arm_store_return_value: Floating "
9115 "point model not supported"));
9116 break;
9117 }
9118 }
9119 else if (type->code () == TYPE_CODE_INT
9120 || type->code () == TYPE_CODE_CHAR
9121 || type->code () == TYPE_CODE_BOOL
9122 || type->code () == TYPE_CODE_PTR
9123 || TYPE_IS_REFERENCE (type)
9124 || type->code () == TYPE_CODE_ENUM)
9125 {
9126 if (type->length () <= 4)
9127 {
9128 /* Values of one word or less are zero/sign-extended and
9129 returned in r0. */
9130 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
9131 LONGEST val = unpack_long (type, valbuf);
9132
9133 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
9134 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
9135 }
9136 else
9137 {
9138 /* Integral values greater than one word are stored in consecutive
9139 registers starting with r0. This will always be a multiple of
9140 the regiser size. */
9141 int len = type->length ();
9142 int regno = ARM_A1_REGNUM;
9143
9144 while (len > 0)
9145 {
9146 regs->cooked_write (regno++, valbuf);
9147 len -= ARM_INT_REGISTER_SIZE;
9148 valbuf += ARM_INT_REGISTER_SIZE;
9149 }
9150 }
9151 }
9152 else
9153 {
9154 /* For a structure or union the behaviour is as if the value had
9155 been stored to word-aligned memory and then loaded into
9156 registers with 32-bit load instruction(s). */
9157 int len = type->length ();
9158 int regno = ARM_A1_REGNUM;
9159 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
9160
9161 while (len > 0)
9162 {
9163 memcpy (tmpbuf, valbuf,
9164 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
9165 regs->cooked_write (regno++, tmpbuf);
9166 len -= ARM_INT_REGISTER_SIZE;
9167 valbuf += ARM_INT_REGISTER_SIZE;
9168 }
9169 }
9170}
9171
9172
9173/* Handle function return values. */
9174
9175static enum return_value_convention
9176arm_return_value (struct gdbarch *gdbarch, struct value *function,
9177 struct type *valtype, struct regcache *regcache,
9178 struct value **read_value, const gdb_byte *writebuf)
9179{
9180 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9181 struct type *func_type = function ? function->type () : NULL;
9182 enum arm_vfp_cprc_base_type vfp_base_type;
9183 int vfp_base_count;
9184
9185 if (arm_vfp_abi_for_function (gdbarch, func_type)
9186 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9187 {
9188 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9189 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9190 int i;
9191
9192 gdb_byte *readbuf = nullptr;
9193 if (read_value != nullptr)
9194 {
9195 *read_value = value::allocate (valtype);
9196 readbuf = (*read_value)->contents_raw ().data ();
9197 }
9198
9199 for (i = 0; i < vfp_base_count; i++)
9200 {
9201 if (reg_char == 'q')
9202 {
9203 if (writebuf)
9204 arm_neon_quad_write (gdbarch, regcache, i,
9205 writebuf + i * unit_length);
9206
9207 if (readbuf)
9208 arm_neon_quad_read (gdbarch, regcache, i,
9209 readbuf + i * unit_length);
9210 }
9211 else
9212 {
9213 char name_buf[4];
9214 int regnum;
9215
9216 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9217 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9218 strlen (name_buf));
9219 if (writebuf)
9220 regcache->cooked_write (regnum, writebuf + i * unit_length);
9221 if (readbuf)
9222 regcache->cooked_read (regnum, readbuf + i * unit_length);
9223 }
9224 }
9225 return RETURN_VALUE_REGISTER_CONVENTION;
9226 }
9227
9228 if (valtype->code () == TYPE_CODE_STRUCT
9229 || valtype->code () == TYPE_CODE_UNION
9230 || valtype->code () == TYPE_CODE_ARRAY)
9231 {
9232 /* From the AAPCS document:
9233
9234 Result return:
9235
9236 A Composite Type larger than 4 bytes, or whose size cannot be
9237 determined statically by both caller and callee, is stored in memory
9238 at an address passed as an extra argument when the function was
9239 called (Parameter Passing, rule A.4). The memory to be used for the
9240 result may be modified at any point during the function call.
9241
9242 Parameter Passing:
9243
9244 A.4: If the subroutine is a function that returns a result in memory,
9245 then the address for the result is placed in r0 and the NCRN is set
9246 to r1. */
9247 if (tdep->struct_return == pcc_struct_return
9248 || arm_return_in_memory (gdbarch, valtype))
9249 {
9250 if (read_value != nullptr)
9251 {
9252 CORE_ADDR addr;
9253
9254 regcache->cooked_read (ARM_A1_REGNUM, &addr);
9255 *read_value = value_at_non_lval (valtype, addr);
9256 }
9257 return RETURN_VALUE_ABI_RETURNS_ADDRESS;
9258 }
9259 }
9260 else if (valtype->code () == TYPE_CODE_COMPLEX)
9261 {
9262 if (arm_return_in_memory (gdbarch, valtype))
9263 return RETURN_VALUE_STRUCT_CONVENTION;
9264 }
9265
9266 if (writebuf)
9267 arm_store_return_value (valtype, regcache, writebuf);
9268
9269 if (read_value != nullptr)
9270 {
9271 *read_value = value::allocate (valtype);
9272 gdb_byte *readbuf = (*read_value)->contents_raw ().data ();
9273 arm_extract_return_value (valtype, regcache, readbuf);
9274 }
9275
9276 return RETURN_VALUE_REGISTER_CONVENTION;
9277}
9278
9279
9280static int
9281arm_get_longjmp_target (frame_info_ptr frame, CORE_ADDR *pc)
9282{
9283 struct gdbarch *gdbarch = get_frame_arch (frame);
9284 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9285 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9286 CORE_ADDR jb_addr;
9287 gdb_byte buf[ARM_INT_REGISTER_SIZE];
9288
9289 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9290
9291 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9292 ARM_INT_REGISTER_SIZE))
9293 return 0;
9294
9295 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
9296 return 1;
9297}
9298/* A call to cmse secure entry function "foo" at "a" is modified by
9299 GNU ld as "b".
9300 a) bl xxxx <foo>
9301
9302 <foo>
9303 xxxx:
9304
9305 b) bl yyyy <__acle_se_foo>
9306
9307 section .gnu.sgstubs:
9308 <foo>
9309 yyyy: sg // secure gateway
9310 b.w xxxx <__acle_se_foo> // original_branch_dest
9311
9312 <__acle_se_foo>
9313 xxxx:
9314
9315 When the control at "b", the pc contains "yyyy" (sg address) which is a
9316 trampoline and does not exist in source code. This function returns the
9317 target pc "xxxx". For more details please refer to section 5.4
9318 (Entry functions) and section 3.4.4 (C level development flow of secure code)
9319 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
9320 document on www.developer.arm.com. */
9321
9322static CORE_ADDR
9323arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
9324{
9325 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
9326 char *target_name = (char *) alloca (target_len);
9327 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
9328
9329 struct bound_minimal_symbol minsym
9330 = lookup_minimal_symbol (target_name, NULL, objfile);
9331
9332 if (minsym.minsym != nullptr)
9333 return minsym.value_address ();
9334
9335 return 0;
9336}
9337
9338/* Return true when SEC points to ".gnu.sgstubs" section. */
9339
9340static bool
9341arm_is_sgstubs_section (struct obj_section *sec)
9342{
9343 return (sec != nullptr
9344 && sec->the_bfd_section != nullptr
9345 && sec->the_bfd_section->name != nullptr
9346 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
9347}
9348
9349/* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9350 return the target PC. Otherwise return 0. */
9351
9352CORE_ADDR
9353arm_skip_stub (frame_info_ptr frame, CORE_ADDR pc)
9354{
9355 const char *name;
9356 int namelen;
9357 CORE_ADDR start_addr;
9358
9359 /* Find the starting address and name of the function containing the PC. */
9360 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9361 {
9362 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9363 check here. */
9364 start_addr = arm_skip_bx_reg (frame, pc);
9365 if (start_addr != 0)
9366 return start_addr;
9367
9368 return 0;
9369 }
9370
9371 /* If PC is in a Thumb call or return stub, return the address of the
9372 target PC, which is in a register. The thunk functions are called
9373 _call_via_xx, where x is the register name. The possible names
9374 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9375 functions, named __ARM_call_via_r[0-7]. */
9376 if (startswith (name, "_call_via_")
9377 || startswith (name, "__ARM_call_via_"))
9378 {
9379 /* Use the name suffix to determine which register contains the
9380 target PC. */
9381 static const char *table[15] =
9382 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9383 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9384 };
9385 int regno;
9386 int offset = strlen (name) - 2;
9387
9388 for (regno = 0; regno <= 14; regno++)
9389 if (strcmp (&name[offset], table[regno]) == 0)
9390 return get_frame_register_unsigned (frame, regno);
9391 }
9392
9393 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9394 non-interworking calls to foo. We could decode the stubs
9395 to find the target but it's easier to use the symbol table. */
9396 namelen = strlen (name);
9397 if (name[0] == '_' && name[1] == '_'
9398 && ((namelen > 2 + strlen ("_from_thumb")
9399 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
9400 || (namelen > 2 + strlen ("_from_arm")
9401 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
9402 {
9403 char *target_name;
9404 int target_len = namelen - 2;
9405 struct bound_minimal_symbol minsym;
9406 struct objfile *objfile;
9407 struct obj_section *sec;
9408
9409 if (name[namelen - 1] == 'b')
9410 target_len -= strlen ("_from_thumb");
9411 else
9412 target_len -= strlen ("_from_arm");
9413
9414 target_name = (char *) alloca (target_len + 1);
9415 memcpy (target_name, name + 2, target_len);
9416 target_name[target_len] = '\0';
9417
9418 sec = find_pc_section (pc);
9419 objfile = (sec == NULL) ? NULL : sec->objfile;
9420 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9421 if (minsym.minsym != NULL)
9422 return minsym.value_address ();
9423 else
9424 return 0;
9425 }
9426
9427 struct obj_section *section = find_pc_section (pc);
9428
9429 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
9430 if (arm_is_sgstubs_section (section))
9431 return arm_skip_cmse_entry (pc, name, section->objfile);
9432
9433 return 0; /* not a stub */
9434}
9435
9436static void
9437arm_update_current_architecture (void)
9438{
9439 /* If the current architecture is not ARM, we have nothing to do. */
9440 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9441 return;
9442
9443 /* Update the architecture. */
9444 gdbarch_info info;
9445 if (!gdbarch_update_p (info))
9446 internal_error (_("could not update architecture"));
9447}
9448
9449static void
9450set_fp_model_sfunc (const char *args, int from_tty,
9451 struct cmd_list_element *c)
9452{
9453 int fp_model;
9454
9455 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9456 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9457 {
9458 arm_fp_model = (enum arm_float_model) fp_model;
9459 break;
9460 }
9461
9462 if (fp_model == ARM_FLOAT_LAST)
9463 internal_error (_("Invalid fp model accepted: %s."),
9464 current_fp_model);
9465
9466 arm_update_current_architecture ();
9467}
9468
9469static void
9470show_fp_model (struct ui_file *file, int from_tty,
9471 struct cmd_list_element *c, const char *value)
9472{
9473 if (arm_fp_model == ARM_FLOAT_AUTO
9474 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9475 {
9476 arm_gdbarch_tdep *tdep
9477 = gdbarch_tdep<arm_gdbarch_tdep> (target_gdbarch ());
9478
9479 gdb_printf (file, _("\
9480The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9481 fp_model_strings[tdep->fp_model]);
9482 }
9483 else
9484 gdb_printf (file, _("\
9485The current ARM floating point model is \"%s\".\n"),
9486 fp_model_strings[arm_fp_model]);
9487}
9488
9489static void
9490arm_set_abi (const char *args, int from_tty,
9491 struct cmd_list_element *c)
9492{
9493 int arm_abi;
9494
9495 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9496 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9497 {
9498 arm_abi_global = (enum arm_abi_kind) arm_abi;
9499 break;
9500 }
9501
9502 if (arm_abi == ARM_ABI_LAST)
9503 internal_error (_("Invalid ABI accepted: %s."),
9504 arm_abi_string);
9505
9506 arm_update_current_architecture ();
9507}
9508
9509static void
9510arm_show_abi (struct ui_file *file, int from_tty,
9511 struct cmd_list_element *c, const char *value)
9512{
9513 if (arm_abi_global == ARM_ABI_AUTO
9514 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9515 {
9516 arm_gdbarch_tdep *tdep
9517 = gdbarch_tdep<arm_gdbarch_tdep> (target_gdbarch ());
9518
9519 gdb_printf (file, _("\
9520The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9521 arm_abi_strings[tdep->arm_abi]);
9522 }
9523 else
9524 gdb_printf (file, _("The current ARM ABI is \"%s\".\n"),
9525 arm_abi_string);
9526}
9527
9528static void
9529arm_show_fallback_mode (struct ui_file *file, int from_tty,
9530 struct cmd_list_element *c, const char *value)
9531{
9532 gdb_printf (file,
9533 _("The current execution mode assumed "
9534 "(when symbols are unavailable) is \"%s\".\n"),
9535 arm_fallback_mode_string);
9536}
9537
9538static void
9539arm_show_force_mode (struct ui_file *file, int from_tty,
9540 struct cmd_list_element *c, const char *value)
9541{
9542 gdb_printf (file,
9543 _("The current execution mode assumed "
9544 "(even when symbols are available) is \"%s\".\n"),
9545 arm_force_mode_string);
9546}
9547
9548static void
9549arm_show_unwind_secure_frames (struct ui_file *file, int from_tty,
9550 struct cmd_list_element *c, const char *value)
9551{
9552 gdb_printf (file,
9553 _("Usage of non-secure to secure exception stack unwinding is %s.\n"),
9554 arm_unwind_secure_frames ? "on" : "off");
9555}
9556
9557/* If the user changes the register disassembly style used for info
9558 register and other commands, we have to also switch the style used
9559 in opcodes for disassembly output. This function is run in the "set
9560 arm disassembly" command, and does that. */
9561
9562static void
9563set_disassembly_style_sfunc (const char *args, int from_tty,
9564 struct cmd_list_element *c)
9565{
9566 /* Convert the short style name into the long style name (eg, reg-names-*)
9567 before calling the generic set_disassembler_options() function. */
9568 std::string long_name = std::string ("reg-names-") + disassembly_style;
9569 set_disassembler_options (&long_name[0]);
9570}
9571
9572static void
9573show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
9574 struct cmd_list_element *c, const char *value)
9575{
9576 struct gdbarch *gdbarch = get_current_arch ();
9577 char *options = get_disassembler_options (gdbarch);
9578 const char *style = "";
9579 int len = 0;
9580 const char *opt;
9581
9582 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
9583 if (startswith (opt, "reg-names-"))
9584 {
9585 style = &opt[strlen ("reg-names-")];
9586 len = strcspn (style, ",");
9587 }
9588
9589 gdb_printf (file, "The disassembly style is \"%.*s\".\n", len, style);
9590}
9591\f
9592/* Return the ARM register name corresponding to register I. */
9593static const char *
9594arm_register_name (struct gdbarch *gdbarch, int i)
9595{
9596 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9597
9598 if (is_s_pseudo (gdbarch, i))
9599 {
9600 static const char *const s_pseudo_names[] = {
9601 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9602 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9603 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9604 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9605 };
9606
9607 return s_pseudo_names[i - tdep->s_pseudo_base];
9608 }
9609
9610 if (is_q_pseudo (gdbarch, i))
9611 {
9612 static const char *const q_pseudo_names[] = {
9613 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9614 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9615 };
9616
9617 return q_pseudo_names[i - tdep->q_pseudo_base];
9618 }
9619
9620 if (is_mve_pseudo (gdbarch, i))
9621 return "p0";
9622
9623 /* RA_AUTH_CODE is used for unwinding only. Do not assign it a name. */
9624 if (is_pacbti_pseudo (gdbarch, i))
9625 return "";
9626
9627 if (i >= ARRAY_SIZE (arm_register_names))
9628 /* These registers are only supported on targets which supply
9629 an XML description. */
9630 return "";
9631
9632 /* Non-pseudo registers. */
9633 return arm_register_names[i];
9634}
9635
9636/* Test whether the coff symbol specific value corresponds to a Thumb
9637 function. */
9638
9639static int
9640coff_sym_is_thumb (int val)
9641{
9642 return (val == C_THUMBEXT
9643 || val == C_THUMBSTAT
9644 || val == C_THUMBEXTFUNC
9645 || val == C_THUMBSTATFUNC
9646 || val == C_THUMBLABEL);
9647}
9648
9649/* arm_coff_make_msymbol_special()
9650 arm_elf_make_msymbol_special()
9651
9652 These functions test whether the COFF or ELF symbol corresponds to
9653 an address in thumb code, and set a "special" bit in a minimal
9654 symbol to indicate that it does. */
9655
9656static void
9657arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9658{
9659 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
9660
9661 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
9662 == ST_BRANCH_TO_THUMB)
9663 MSYMBOL_SET_SPECIAL (msym);
9664}
9665
9666static void
9667arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9668{
9669 if (coff_sym_is_thumb (val))
9670 MSYMBOL_SET_SPECIAL (msym);
9671}
9672
9673static void
9674arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9675 asymbol *sym)
9676{
9677 const char *name = bfd_asymbol_name (sym);
9678 struct arm_per_bfd *data;
9679 struct arm_mapping_symbol new_map_sym;
9680
9681 gdb_assert (name[0] == '$');
9682 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9683 return;
9684
9685 data = arm_bfd_data_key.get (objfile->obfd.get ());
9686 if (data == NULL)
9687 data = arm_bfd_data_key.emplace (objfile->obfd.get (),
9688 objfile->obfd->section_count);
9689 arm_mapping_symbol_vec &map
9690 = data->section_maps[bfd_asymbol_section (sym)->index];
9691
9692 new_map_sym.value = sym->value;
9693 new_map_sym.type = name[1];
9694
9695 /* Insert at the end, the vector will be sorted on first use. */
9696 map.push_back (new_map_sym);
9697}
9698
9699static void
9700arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9701{
9702 struct gdbarch *gdbarch = regcache->arch ();
9703 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9704
9705 /* If necessary, set the T bit. */
9706 if (arm_apcs_32)
9707 {
9708 ULONGEST val, t_bit;
9709 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9710 t_bit = arm_psr_thumb_bit (gdbarch);
9711 if (arm_pc_is_thumb (gdbarch, pc))
9712 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9713 val | t_bit);
9714 else
9715 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9716 val & ~t_bit);
9717 }
9718}
9719
9720/* Read the contents of a NEON quad register, by reading from two
9721 double registers. This is used to implement the quad pseudo
9722 registers, and for argument passing in case the quad registers are
9723 missing; vectors are passed in quad registers when using the VFP
9724 ABI, even if a NEON unit is not present. REGNUM is the index of
9725 the quad register, in [0, 15]. */
9726
9727static enum register_status
9728arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9729 int regnum, gdb_byte *buf)
9730{
9731 char name_buf[4];
9732 gdb_byte reg_buf[8];
9733 int offset, double_regnum;
9734 enum register_status status;
9735
9736 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9737 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9738 strlen (name_buf));
9739
9740 /* d0 is always the least significant half of q0. */
9741 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9742 offset = 8;
9743 else
9744 offset = 0;
9745
9746 status = regcache->raw_read (double_regnum, reg_buf);
9747 if (status != REG_VALID)
9748 return status;
9749 memcpy (buf + offset, reg_buf, 8);
9750
9751 offset = 8 - offset;
9752 status = regcache->raw_read (double_regnum + 1, reg_buf);
9753 if (status != REG_VALID)
9754 return status;
9755 memcpy (buf + offset, reg_buf, 8);
9756
9757 return REG_VALID;
9758}
9759
9760/* Read the contents of the MVE pseudo register REGNUM and store it
9761 in BUF. */
9762
9763static enum register_status
9764arm_mve_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9765 int regnum, gdb_byte *buf)
9766{
9767 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9768
9769 /* P0 is the first 16 bits of VPR. */
9770 return regcache->raw_read_part (tdep->mve_vpr_regnum, 0, 2, buf);
9771}
9772
9773static enum register_status
9774arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9775 int regnum, gdb_byte *buf)
9776{
9777 const int num_regs = gdbarch_num_regs (gdbarch);
9778 char name_buf[4];
9779 gdb_byte reg_buf[8];
9780 int offset, double_regnum;
9781 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9782
9783 gdb_assert (regnum >= num_regs);
9784
9785 if (is_q_pseudo (gdbarch, regnum))
9786 {
9787 /* Quad-precision register. */
9788 return arm_neon_quad_read (gdbarch, regcache,
9789 regnum - tdep->q_pseudo_base, buf);
9790 }
9791 else if (is_mve_pseudo (gdbarch, regnum))
9792 return arm_mve_pseudo_read (gdbarch, regcache, regnum, buf);
9793 else
9794 {
9795 enum register_status status;
9796
9797 regnum -= tdep->s_pseudo_base;
9798 /* Single-precision register. */
9799 gdb_assert (regnum < 32);
9800
9801 /* s0 is always the least significant half of d0. */
9802 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9803 offset = (regnum & 1) ? 0 : 4;
9804 else
9805 offset = (regnum & 1) ? 4 : 0;
9806
9807 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9808 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9809 strlen (name_buf));
9810
9811 status = regcache->raw_read (double_regnum, reg_buf);
9812 if (status == REG_VALID)
9813 memcpy (buf, reg_buf + offset, 4);
9814 return status;
9815 }
9816}
9817
9818/* Store the contents of BUF to a NEON quad register, by writing to
9819 two double registers. This is used to implement the quad pseudo
9820 registers, and for argument passing in case the quad registers are
9821 missing; vectors are passed in quad registers when using the VFP
9822 ABI, even if a NEON unit is not present. REGNUM is the index
9823 of the quad register, in [0, 15]. */
9824
9825static void
9826arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9827 int regnum, const gdb_byte *buf)
9828{
9829 char name_buf[4];
9830 int offset, double_regnum;
9831
9832 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9833 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9834 strlen (name_buf));
9835
9836 /* d0 is always the least significant half of q0. */
9837 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9838 offset = 8;
9839 else
9840 offset = 0;
9841
9842 regcache->raw_write (double_regnum, buf + offset);
9843 offset = 8 - offset;
9844 regcache->raw_write (double_regnum + 1, buf + offset);
9845}
9846
9847/* Store the contents of BUF to the MVE pseudo register REGNUM. */
9848
9849static void
9850arm_mve_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9851 int regnum, const gdb_byte *buf)
9852{
9853 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9854
9855 /* P0 is the first 16 bits of VPR. */
9856 regcache->raw_write_part (tdep->mve_vpr_regnum, 0, 2, buf);
9857}
9858
9859static void
9860arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9861 int regnum, const gdb_byte *buf)
9862{
9863 const int num_regs = gdbarch_num_regs (gdbarch);
9864 char name_buf[4];
9865 gdb_byte reg_buf[8];
9866 int offset, double_regnum;
9867 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9868
9869 gdb_assert (regnum >= num_regs);
9870
9871 if (is_q_pseudo (gdbarch, regnum))
9872 {
9873 /* Quad-precision register. */
9874 arm_neon_quad_write (gdbarch, regcache,
9875 regnum - tdep->q_pseudo_base, buf);
9876 }
9877 else if (is_mve_pseudo (gdbarch, regnum))
9878 arm_mve_pseudo_write (gdbarch, regcache, regnum, buf);
9879 else
9880 {
9881 regnum -= tdep->s_pseudo_base;
9882 /* Single-precision register. */
9883 gdb_assert (regnum < 32);
9884
9885 /* s0 is always the least significant half of d0. */
9886 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9887 offset = (regnum & 1) ? 0 : 4;
9888 else
9889 offset = (regnum & 1) ? 4 : 0;
9890
9891 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9892 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9893 strlen (name_buf));
9894
9895 regcache->raw_read (double_regnum, reg_buf);
9896 memcpy (reg_buf + offset, buf, 4);
9897 regcache->raw_write (double_regnum, reg_buf);
9898 }
9899}
9900
9901static struct value *
9902value_of_arm_user_reg (frame_info_ptr frame, const void *baton)
9903{
9904 const int *reg_p = (const int *) baton;
9905 return value_of_register (*reg_p, frame);
9906}
9907\f
9908static enum gdb_osabi
9909arm_elf_osabi_sniffer (bfd *abfd)
9910{
9911 unsigned int elfosabi;
9912 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9913
9914 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9915
9916 if (elfosabi == ELFOSABI_ARM)
9917 /* GNU tools use this value. Check note sections in this case,
9918 as well. */
9919 {
9920 for (asection *sect : gdb_bfd_sections (abfd))
9921 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi);
9922 }
9923
9924 /* Anything else will be handled by the generic ELF sniffer. */
9925 return osabi;
9926}
9927
9928static int
9929arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9930 const struct reggroup *group)
9931{
9932 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9933 this, FPS register belongs to save_regroup, restore_reggroup, and
9934 all_reggroup, of course. */
9935 if (regnum == ARM_FPS_REGNUM)
9936 return (group == float_reggroup
9937 || group == save_reggroup
9938 || group == restore_reggroup
9939 || group == all_reggroup);
9940 else
9941 return default_register_reggroup_p (gdbarch, regnum, group);
9942}
9943
9944/* For backward-compatibility we allow two 'g' packet lengths with
9945 the remote protocol depending on whether FPA registers are
9946 supplied. M-profile targets do not have FPA registers, but some
9947 stubs already exist in the wild which use a 'g' packet which
9948 supplies them albeit with dummy values. The packet format which
9949 includes FPA registers should be considered deprecated for
9950 M-profile targets. */
9951
9952static void
9953arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9954{
9955 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9956
9957 if (tdep->is_m)
9958 {
9959 const target_desc *tdesc;
9960
9961 /* If we know from the executable this is an M-profile target,
9962 cater for remote targets whose register set layout is the
9963 same as the FPA layout. */
9964 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
9965 register_remote_g_packet_guess (gdbarch,
9966 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
9967 tdesc);
9968
9969 /* The regular M-profile layout. */
9970 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
9971 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
9972 tdesc);
9973
9974 /* M-profile plus M4F VFP. */
9975 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
9976 register_remote_g_packet_guess (gdbarch,
9977 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
9978 tdesc);
9979 /* M-profile plus MVE. */
9980 tdesc = arm_read_mprofile_description (ARM_M_TYPE_MVE);
9981 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE
9982 + ARM_VFP2_REGS_SIZE
9983 + ARM_INT_REGISTER_SIZE, tdesc);
9984
9985 /* M-profile system (stack pointers). */
9986 tdesc = arm_read_mprofile_description (ARM_M_TYPE_SYSTEM);
9987 register_remote_g_packet_guess (gdbarch, 2 * ARM_INT_REGISTER_SIZE, tdesc);
9988 }
9989
9990 /* Otherwise we don't have a useful guess. */
9991}
9992
9993/* Implement the code_of_frame_writable gdbarch method. */
9994
9995static int
9996arm_code_of_frame_writable (struct gdbarch *gdbarch, frame_info_ptr frame)
9997{
9998 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9999
10000 if (tdep->is_m && get_frame_type (frame) == SIGTRAMP_FRAME)
10001 {
10002 /* M-profile exception frames return to some magic PCs, where
10003 isn't writable at all. */
10004 return 0;
10005 }
10006 else
10007 return 1;
10008}
10009
10010/* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
10011 to be postfixed by a version (eg armv7hl). */
10012
10013static const char *
10014arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
10015{
10016 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
10017 return "arm(v[^- ]*)?";
10018 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
10019}
10020
10021/* Implement the "get_pc_address_flags" gdbarch method. */
10022
10023static std::string
10024arm_get_pc_address_flags (frame_info_ptr frame, CORE_ADDR pc)
10025{
10026 if (get_frame_pc_masked (frame))
10027 return "PAC";
10028
10029 return "";
10030}
10031
10032/* Initialize the current architecture based on INFO. If possible,
10033 re-use an architecture from ARCHES, which is a list of
10034 architectures already created during this debugging session.
10035
10036 Called e.g. at program startup, when reading a core file, and when
10037 reading a binary file. */
10038
10039static struct gdbarch *
10040arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
10041{
10042 struct gdbarch_list *best_arch;
10043 enum arm_abi_kind arm_abi = arm_abi_global;
10044 enum arm_float_model fp_model = arm_fp_model;
10045 tdesc_arch_data_up tdesc_data;
10046 int i;
10047 bool is_m = false;
10048 bool have_sec_ext = false;
10049 int vfp_register_count = 0;
10050 bool have_s_pseudos = false, have_q_pseudos = false;
10051 bool have_wmmx_registers = false;
10052 bool have_neon = false;
10053 bool have_fpa_registers = true;
10054 const struct target_desc *tdesc = info.target_desc;
10055 bool have_vfp = false;
10056 bool have_mve = false;
10057 bool have_pacbti = false;
10058 int mve_vpr_regnum = -1;
10059 int register_count = ARM_NUM_REGS;
10060 bool have_m_profile_msp = false;
10061 int m_profile_msp_regnum = -1;
10062 int m_profile_psp_regnum = -1;
10063 int m_profile_msp_ns_regnum = -1;
10064 int m_profile_psp_ns_regnum = -1;
10065 int m_profile_msp_s_regnum = -1;
10066 int m_profile_psp_s_regnum = -1;
10067 int tls_regnum = 0;
10068
10069 /* If we have an object to base this architecture on, try to determine
10070 its ABI. */
10071
10072 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
10073 {
10074 int ei_osabi, e_flags;
10075
10076 switch (bfd_get_flavour (info.abfd))
10077 {
10078 case bfd_target_coff_flavour:
10079 /* Assume it's an old APCS-style ABI. */
10080 /* XXX WinCE? */
10081 arm_abi = ARM_ABI_APCS;
10082 break;
10083
10084 case bfd_target_elf_flavour:
10085 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
10086 e_flags = elf_elfheader (info.abfd)->e_flags;
10087
10088 if (ei_osabi == ELFOSABI_ARM)
10089 {
10090 /* GNU tools used to use this value, but do not for EABI
10091 objects. There's nowhere to tag an EABI version
10092 anyway, so assume APCS. */
10093 arm_abi = ARM_ABI_APCS;
10094 }
10095 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
10096 {
10097 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
10098
10099 switch (eabi_ver)
10100 {
10101 case EF_ARM_EABI_UNKNOWN:
10102 /* Assume GNU tools. */
10103 arm_abi = ARM_ABI_APCS;
10104 break;
10105
10106 case EF_ARM_EABI_VER4:
10107 case EF_ARM_EABI_VER5:
10108 arm_abi = ARM_ABI_AAPCS;
10109 /* EABI binaries default to VFP float ordering.
10110 They may also contain build attributes that can
10111 be used to identify if the VFP argument-passing
10112 ABI is in use. */
10113 if (fp_model == ARM_FLOAT_AUTO)
10114 {
10115#ifdef HAVE_ELF
10116 switch (bfd_elf_get_obj_attr_int (info.abfd,
10117 OBJ_ATTR_PROC,
10118 Tag_ABI_VFP_args))
10119 {
10120 case AEABI_VFP_args_base:
10121 /* "The user intended FP parameter/result
10122 passing to conform to AAPCS, base
10123 variant". */
10124 fp_model = ARM_FLOAT_SOFT_VFP;
10125 break;
10126 case AEABI_VFP_args_vfp:
10127 /* "The user intended FP parameter/result
10128 passing to conform to AAPCS, VFP
10129 variant". */
10130 fp_model = ARM_FLOAT_VFP;
10131 break;
10132 case AEABI_VFP_args_toolchain:
10133 /* "The user intended FP parameter/result
10134 passing to conform to tool chain-specific
10135 conventions" - we don't know any such
10136 conventions, so leave it as "auto". */
10137 break;
10138 case AEABI_VFP_args_compatible:
10139 /* "Code is compatible with both the base
10140 and VFP variants; the user did not permit
10141 non-variadic functions to pass FP
10142 parameters/results" - leave it as
10143 "auto". */
10144 break;
10145 default:
10146 /* Attribute value not mentioned in the
10147 November 2012 ABI, so leave it as
10148 "auto". */
10149 break;
10150 }
10151#else
10152 fp_model = ARM_FLOAT_SOFT_VFP;
10153#endif
10154 }
10155 break;
10156
10157 default:
10158 /* Leave it as "auto". */
10159 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10160 break;
10161 }
10162
10163#ifdef HAVE_ELF
10164 /* Detect M-profile programs. This only works if the
10165 executable file includes build attributes; GCC does
10166 copy them to the executable, but e.g. RealView does
10167 not. */
10168 int attr_arch
10169 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10170 Tag_CPU_arch);
10171 int attr_profile
10172 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10173 Tag_CPU_arch_profile);
10174
10175 /* GCC specifies the profile for v6-M; RealView only
10176 specifies the profile for architectures starting with
10177 V7 (as opposed to architectures with a tag
10178 numerically greater than TAG_CPU_ARCH_V7). */
10179 if (!tdesc_has_registers (tdesc)
10180 && (attr_arch == TAG_CPU_ARCH_V6_M
10181 || attr_arch == TAG_CPU_ARCH_V6S_M
10182 || attr_arch == TAG_CPU_ARCH_V7E_M
10183 || attr_arch == TAG_CPU_ARCH_V8M_BASE
10184 || attr_arch == TAG_CPU_ARCH_V8M_MAIN
10185 || attr_arch == TAG_CPU_ARCH_V8_1M_MAIN
10186 || attr_profile == 'M'))
10187 is_m = true;
10188
10189 /* Look for attributes that indicate support for ARMv8.1-m
10190 PACBTI. */
10191 if (!tdesc_has_registers (tdesc) && is_m)
10192 {
10193 int attr_pac_extension
10194 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10195 Tag_PAC_extension);
10196
10197 int attr_bti_extension
10198 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10199 Tag_BTI_extension);
10200
10201 int attr_pacret_use
10202 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10203 Tag_PACRET_use);
10204
10205 int attr_bti_use
10206 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10207 Tag_BTI_use);
10208
10209 if (attr_pac_extension != 0 || attr_bti_extension != 0
10210 || attr_pacret_use != 0 || attr_bti_use != 0)
10211 have_pacbti = true;
10212 }
10213#endif
10214 }
10215
10216 if (fp_model == ARM_FLOAT_AUTO)
10217 {
10218 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10219 {
10220 case 0:
10221 /* Leave it as "auto". Strictly speaking this case
10222 means FPA, but almost nobody uses that now, and
10223 many toolchains fail to set the appropriate bits
10224 for the floating-point model they use. */
10225 break;
10226 case EF_ARM_SOFT_FLOAT:
10227 fp_model = ARM_FLOAT_SOFT_FPA;
10228 break;
10229 case EF_ARM_VFP_FLOAT:
10230 fp_model = ARM_FLOAT_VFP;
10231 break;
10232 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10233 fp_model = ARM_FLOAT_SOFT_VFP;
10234 break;
10235 }
10236 }
10237
10238 if (e_flags & EF_ARM_BE8)
10239 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10240
10241 break;
10242
10243 default:
10244 /* Leave it as "auto". */
10245 break;
10246 }
10247 }
10248
10249 /* Check any target description for validity. */
10250 if (tdesc_has_registers (tdesc))
10251 {
10252 /* For most registers we require GDB's default names; but also allow
10253 the numeric names for sp / lr / pc, as a convenience. */
10254 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10255 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10256 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10257
10258 const struct tdesc_feature *feature;
10259 int valid_p;
10260
10261 feature = tdesc_find_feature (tdesc,
10262 "org.gnu.gdb.arm.core");
10263 if (feature == NULL)
10264 {
10265 feature = tdesc_find_feature (tdesc,
10266 "org.gnu.gdb.arm.m-profile");
10267 if (feature == NULL)
10268 return NULL;
10269 else
10270 is_m = true;
10271 }
10272
10273 tdesc_data = tdesc_data_alloc ();
10274
10275 valid_p = 1;
10276 for (i = 0; i < ARM_SP_REGNUM; i++)
10277 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10278 arm_register_names[i]);
10279 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10280 ARM_SP_REGNUM,
10281 arm_sp_names);
10282 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10283 ARM_LR_REGNUM,
10284 arm_lr_names);
10285 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10286 ARM_PC_REGNUM,
10287 arm_pc_names);
10288 if (is_m)
10289 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10290 ARM_PS_REGNUM, "xpsr");
10291 else
10292 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10293 ARM_PS_REGNUM, "cpsr");
10294
10295 if (!valid_p)
10296 return NULL;
10297
10298 if (is_m)
10299 {
10300 feature = tdesc_find_feature (tdesc,
10301 "org.gnu.gdb.arm.m-system");
10302 if (feature != nullptr)
10303 {
10304 /* MSP */
10305 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10306 register_count, "msp");
10307 if (!valid_p)
10308 {
10309 warning (_("M-profile m-system feature is missing required register msp."));
10310 return nullptr;
10311 }
10312 have_m_profile_msp = true;
10313 m_profile_msp_regnum = register_count++;
10314
10315 /* PSP */
10316 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10317 register_count, "psp");
10318 if (!valid_p)
10319 {
10320 warning (_("M-profile m-system feature is missing required register psp."));
10321 return nullptr;
10322 }
10323 m_profile_psp_regnum = register_count++;
10324 }
10325 }
10326
10327 feature = tdesc_find_feature (tdesc,
10328 "org.gnu.gdb.arm.fpa");
10329 if (feature != NULL)
10330 {
10331 valid_p = 1;
10332 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10333 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10334 arm_register_names[i]);
10335 if (!valid_p)
10336 return NULL;
10337 }
10338 else
10339 have_fpa_registers = false;
10340
10341 feature = tdesc_find_feature (tdesc,
10342 "org.gnu.gdb.xscale.iwmmxt");
10343 if (feature != NULL)
10344 {
10345 static const char *const iwmmxt_names[] = {
10346 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10347 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10348 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10349 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10350 };
10351
10352 valid_p = 1;
10353 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10354 valid_p
10355 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10356 iwmmxt_names[i - ARM_WR0_REGNUM]);
10357
10358 /* Check for the control registers, but do not fail if they
10359 are missing. */
10360 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10361 tdesc_numbered_register (feature, tdesc_data.get (), i,
10362 iwmmxt_names[i - ARM_WR0_REGNUM]);
10363
10364 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10365 valid_p
10366 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10367 iwmmxt_names[i - ARM_WR0_REGNUM]);
10368
10369 if (!valid_p)
10370 return NULL;
10371
10372 have_wmmx_registers = true;
10373 }
10374
10375 /* If we have a VFP unit, check whether the single precision registers
10376 are present. If not, then we will synthesize them as pseudo
10377 registers. */
10378 feature = tdesc_find_feature (tdesc,
10379 "org.gnu.gdb.arm.vfp");
10380 if (feature != NULL)
10381 {
10382 static const char *const vfp_double_names[] = {
10383 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10384 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10385 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10386 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10387 };
10388
10389 /* Require the double precision registers. There must be either
10390 16 or 32. */
10391 valid_p = 1;
10392 for (i = 0; i < 32; i++)
10393 {
10394 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10395 ARM_D0_REGNUM + i,
10396 vfp_double_names[i]);
10397 if (!valid_p)
10398 break;
10399 }
10400 if (!valid_p && i == 16)
10401 valid_p = 1;
10402
10403 /* Also require FPSCR. */
10404 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10405 ARM_FPSCR_REGNUM, "fpscr");
10406 if (!valid_p)
10407 return NULL;
10408
10409 have_vfp = true;
10410
10411 if (tdesc_unnumbered_register (feature, "s0") == 0)
10412 have_s_pseudos = true;
10413
10414 vfp_register_count = i;
10415
10416 /* If we have VFP, also check for NEON. The architecture allows
10417 NEON without VFP (integer vector operations only), but GDB
10418 does not support that. */
10419 feature = tdesc_find_feature (tdesc,
10420 "org.gnu.gdb.arm.neon");
10421 if (feature != NULL)
10422 {
10423 /* NEON requires 32 double-precision registers. */
10424 if (i != 32)
10425 return NULL;
10426
10427 /* If there are quad registers defined by the stub, use
10428 their type; otherwise (normally) provide them with
10429 the default type. */
10430 if (tdesc_unnumbered_register (feature, "q0") == 0)
10431 have_q_pseudos = true;
10432 }
10433 }
10434
10435 /* Check for the TLS register feature. */
10436 feature = tdesc_find_feature (tdesc, "org.gnu.gdb.arm.tls");
10437 if (feature != nullptr)
10438 {
10439 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10440 register_count, "tpidruro");
10441 if (!valid_p)
10442 return nullptr;
10443
10444 tls_regnum = register_count;
10445 register_count++;
10446 }
10447
10448 /* Check for MVE after all the checks for GPR's, VFP and Neon.
10449 MVE (Helium) is an M-profile extension. */
10450 if (is_m)
10451 {
10452 /* Do we have the MVE feature? */
10453 feature = tdesc_find_feature (tdesc,"org.gnu.gdb.arm.m-profile-mve");
10454
10455 if (feature != nullptr)
10456 {
10457 /* If we have MVE, we must always have the VPR register. */
10458 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10459 register_count, "vpr");
10460 if (!valid_p)
10461 {
10462 warning (_("MVE feature is missing required register vpr."));
10463 return nullptr;
10464 }
10465
10466 have_mve = true;
10467 mve_vpr_regnum = register_count;
10468 register_count++;
10469
10470 /* We can't have Q pseudo registers available here, as that
10471 would mean we have NEON features, and that is only available
10472 on A and R profiles. */
10473 gdb_assert (!have_q_pseudos);
10474
10475 /* Given we have a M-profile target description, if MVE is
10476 enabled and there are VFP registers, we should have Q
10477 pseudo registers (Q0 ~ Q7). */
10478 if (have_vfp)
10479 have_q_pseudos = true;
10480 }
10481
10482 /* Do we have the ARMv8.1-m PACBTI feature? */
10483 feature = tdesc_find_feature (tdesc,
10484 "org.gnu.gdb.arm.m-profile-pacbti");
10485 if (feature != nullptr)
10486 {
10487 /* By advertising this feature, the target acknowledges the
10488 presence of the ARMv8.1-m PACBTI extensions.
10489
10490 We don't care for any particular registers in this group, so
10491 the target is free to include whatever it deems appropriate.
10492
10493 The expectation is for this feature to include the PAC
10494 keys. */
10495 have_pacbti = true;
10496 }
10497
10498 /* Do we have the Security extension? */
10499 feature = tdesc_find_feature (tdesc,
10500 "org.gnu.gdb.arm.secext");
10501 if (feature != nullptr)
10502 {
10503 /* Secure/Non-secure stack pointers. */
10504 /* MSP_NS */
10505 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10506 register_count, "msp_ns");
10507 if (!valid_p)
10508 {
10509 warning (_("M-profile secext feature is missing required register msp_ns."));
10510 return nullptr;
10511 }
10512 m_profile_msp_ns_regnum = register_count++;
10513
10514 /* PSP_NS */
10515 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10516 register_count, "psp_ns");
10517 if (!valid_p)
10518 {
10519 warning (_("M-profile secext feature is missing required register psp_ns."));
10520 return nullptr;
10521 }
10522 m_profile_psp_ns_regnum = register_count++;
10523
10524 /* MSP_S */
10525 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10526 register_count, "msp_s");
10527 if (!valid_p)
10528 {
10529 warning (_("M-profile secext feature is missing required register msp_s."));
10530 return nullptr;
10531 }
10532 m_profile_msp_s_regnum = register_count++;
10533
10534 /* PSP_S */
10535 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10536 register_count, "psp_s");
10537 if (!valid_p)
10538 {
10539 warning (_("M-profile secext feature is missing required register psp_s."));
10540 return nullptr;
10541 }
10542 m_profile_psp_s_regnum = register_count++;
10543
10544 have_sec_ext = true;
10545 }
10546
10547 }
10548 }
10549
10550 /* If there is already a candidate, use it. */
10551 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10552 best_arch != NULL;
10553 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10554 {
10555 arm_gdbarch_tdep *tdep
10556 = gdbarch_tdep<arm_gdbarch_tdep> (best_arch->gdbarch);
10557
10558 if (arm_abi != ARM_ABI_AUTO && arm_abi != tdep->arm_abi)
10559 continue;
10560
10561 if (fp_model != ARM_FLOAT_AUTO && fp_model != tdep->fp_model)
10562 continue;
10563
10564 /* There are various other properties in tdep that we do not
10565 need to check here: those derived from a target description,
10566 since gdbarches with a different target description are
10567 automatically disqualified. */
10568
10569 /* Do check is_m, though, since it might come from the binary. */
10570 if (is_m != tdep->is_m)
10571 continue;
10572
10573 /* Also check for ARMv8.1-m PACBTI support, since it might come from
10574 the binary. */
10575 if (have_pacbti != tdep->have_pacbti)
10576 continue;
10577
10578 /* Found a match. */
10579 break;
10580 }
10581
10582 if (best_arch != NULL)
10583 return best_arch->gdbarch;
10584
10585 gdbarch *gdbarch
10586 = gdbarch_alloc (&info, gdbarch_tdep_up (new arm_gdbarch_tdep));
10587 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10588
10589 /* Record additional information about the architecture we are defining.
10590 These are gdbarch discriminators, like the OSABI. */
10591 tdep->arm_abi = arm_abi;
10592 tdep->fp_model = fp_model;
10593 tdep->is_m = is_m;
10594 tdep->have_sec_ext = have_sec_ext;
10595 tdep->have_fpa_registers = have_fpa_registers;
10596 tdep->have_wmmx_registers = have_wmmx_registers;
10597 gdb_assert (vfp_register_count == 0
10598 || vfp_register_count == 16
10599 || vfp_register_count == 32);
10600 tdep->vfp_register_count = vfp_register_count;
10601 tdep->have_s_pseudos = have_s_pseudos;
10602 tdep->have_q_pseudos = have_q_pseudos;
10603 tdep->have_neon = have_neon;
10604 tdep->tls_regnum = tls_regnum;
10605
10606 /* Adjust the MVE feature settings. */
10607 if (have_mve)
10608 {
10609 tdep->have_mve = true;
10610 tdep->mve_vpr_regnum = mve_vpr_regnum;
10611 }
10612
10613 /* Adjust the PACBTI feature settings. */
10614 tdep->have_pacbti = have_pacbti;
10615
10616 /* Adjust the M-profile stack pointers settings. */
10617 if (have_m_profile_msp)
10618 {
10619 tdep->m_profile_msp_regnum = m_profile_msp_regnum;
10620 tdep->m_profile_psp_regnum = m_profile_psp_regnum;
10621 tdep->m_profile_msp_ns_regnum = m_profile_msp_ns_regnum;
10622 tdep->m_profile_psp_ns_regnum = m_profile_psp_ns_regnum;
10623 tdep->m_profile_msp_s_regnum = m_profile_msp_s_regnum;
10624 tdep->m_profile_psp_s_regnum = m_profile_psp_s_regnum;
10625 }
10626
10627 arm_register_g_packet_guesses (gdbarch);
10628
10629 /* Breakpoints. */
10630 switch (info.byte_order_for_code)
10631 {
10632 case BFD_ENDIAN_BIG:
10633 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10634 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10635 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10636 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10637
10638 break;
10639
10640 case BFD_ENDIAN_LITTLE:
10641 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10642 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10643 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10644 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10645
10646 break;
10647
10648 default:
10649 internal_error (_("arm_gdbarch_init: bad byte order for float format"));
10650 }
10651
10652 /* On ARM targets char defaults to unsigned. */
10653 set_gdbarch_char_signed (gdbarch, 0);
10654
10655 /* wchar_t is unsigned under the AAPCS. */
10656 if (tdep->arm_abi == ARM_ABI_AAPCS)
10657 set_gdbarch_wchar_signed (gdbarch, 0);
10658 else
10659 set_gdbarch_wchar_signed (gdbarch, 1);
10660
10661 /* Compute type alignment. */
10662 set_gdbarch_type_align (gdbarch, arm_type_align);
10663
10664 /* Note: for displaced stepping, this includes the breakpoint, and one word
10665 of additional scratch space. This setting isn't used for anything beside
10666 displaced stepping at present. */
10667 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
10668
10669 /* This should be low enough for everything. */
10670 tdep->lowest_pc = 0x20;
10671 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10672
10673 /* The default, for both APCS and AAPCS, is to return small
10674 structures in registers. */
10675 tdep->struct_return = reg_struct_return;
10676
10677 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10678 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10679
10680 if (is_m)
10681 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
10682
10683 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10684
10685 frame_base_set_default (gdbarch, &arm_normal_base);
10686
10687 /* Address manipulation. */
10688 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10689
10690 /* Advance PC across function entry code. */
10691 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10692
10693 /* Detect whether PC is at a point where the stack has been destroyed. */
10694 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
10695
10696 /* Skip trampolines. */
10697 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10698
10699 /* The stack grows downward. */
10700 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10701
10702 /* Breakpoint manipulation. */
10703 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
10704 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
10705 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
10706 arm_breakpoint_kind_from_current_state);
10707
10708 /* Information about registers, etc. */
10709 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10710 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10711 set_gdbarch_num_regs (gdbarch, register_count);
10712 set_gdbarch_register_type (gdbarch, arm_register_type);
10713 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10714
10715 /* This "info float" is FPA-specific. Use the generic version if we
10716 do not have FPA. */
10717 if (tdep->have_fpa_registers)
10718 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10719
10720 /* Internal <-> external register number maps. */
10721 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10722 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10723
10724 set_gdbarch_register_name (gdbarch, arm_register_name);
10725
10726 /* Returning results. */
10727 set_gdbarch_return_value_as_value (gdbarch, arm_return_value);
10728
10729 /* Disassembly. */
10730 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10731
10732 /* Minsymbol frobbing. */
10733 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10734 set_gdbarch_coff_make_msymbol_special (gdbarch,
10735 arm_coff_make_msymbol_special);
10736 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10737
10738 /* Thumb-2 IT block support. */
10739 set_gdbarch_adjust_breakpoint_address (gdbarch,
10740 arm_adjust_breakpoint_address);
10741
10742 /* Virtual tables. */
10743 set_gdbarch_vbit_in_delta (gdbarch, 1);
10744
10745 /* Hook in the ABI-specific overrides, if they have been registered. */
10746 gdbarch_init_osabi (info, gdbarch);
10747
10748 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10749
10750 /* Add some default predicates. */
10751 if (is_m)
10752 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10753 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10754 dwarf2_append_unwinders (gdbarch);
10755 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10756 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
10757 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10758
10759 /* Now we have tuned the configuration, set a few final things,
10760 based on what the OS ABI has told us. */
10761
10762 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10763 binaries are always marked. */
10764 if (tdep->arm_abi == ARM_ABI_AUTO)
10765 tdep->arm_abi = ARM_ABI_APCS;
10766
10767 /* Watchpoints are not steppable. */
10768 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10769
10770 /* We used to default to FPA for generic ARM, but almost nobody
10771 uses that now, and we now provide a way for the user to force
10772 the model. So default to the most useful variant. */
10773 if (tdep->fp_model == ARM_FLOAT_AUTO)
10774 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10775
10776 if (tdep->jb_pc >= 0)
10777 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10778
10779 /* Floating point sizes and format. */
10780 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10781 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10782 {
10783 set_gdbarch_double_format
10784 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10785 set_gdbarch_long_double_format
10786 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10787 }
10788 else
10789 {
10790 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10791 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10792 }
10793
10794 /* Hook used to decorate frames with signed return addresses, only available
10795 for ARMv8.1-m PACBTI. */
10796 if (is_m && have_pacbti)
10797 set_gdbarch_get_pc_address_flags (gdbarch, arm_get_pc_address_flags);
10798
10799 if (tdesc_data != nullptr)
10800 {
10801 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10802
10803 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
10804 register_count = gdbarch_num_regs (gdbarch);
10805
10806 /* Override tdesc_register_type to adjust the types of VFP
10807 registers for NEON. */
10808 set_gdbarch_register_type (gdbarch, arm_register_type);
10809 }
10810
10811 /* Initialize the pseudo register data. */
10812 int num_pseudos = 0;
10813 if (tdep->have_s_pseudos)
10814 {
10815 /* VFP single precision pseudo registers (S0~S31). */
10816 tdep->s_pseudo_base = register_count;
10817 tdep->s_pseudo_count = 32;
10818 num_pseudos += tdep->s_pseudo_count;
10819
10820 if (tdep->have_q_pseudos)
10821 {
10822 /* NEON quad precision pseudo registers (Q0~Q15). */
10823 tdep->q_pseudo_base = register_count + num_pseudos;
10824
10825 if (have_neon)
10826 tdep->q_pseudo_count = 16;
10827 else if (have_mve)
10828 tdep->q_pseudo_count = ARM_MVE_NUM_Q_REGS;
10829
10830 num_pseudos += tdep->q_pseudo_count;
10831 }
10832 }
10833
10834 /* Do we have any MVE pseudo registers? */
10835 if (have_mve)
10836 {
10837 tdep->mve_pseudo_base = register_count + num_pseudos;
10838 tdep->mve_pseudo_count = 1;
10839 num_pseudos += tdep->mve_pseudo_count;
10840 }
10841
10842 /* Do we have any ARMv8.1-m PACBTI pseudo registers. */
10843 if (have_pacbti)
10844 {
10845 tdep->pacbti_pseudo_base = register_count + num_pseudos;
10846 tdep->pacbti_pseudo_count = 1;
10847 num_pseudos += tdep->pacbti_pseudo_count;
10848 }
10849
10850 /* Set some pseudo register hooks, if we have pseudo registers. */
10851 if (tdep->have_s_pseudos || have_mve || have_pacbti)
10852 {
10853 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10854 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10855 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10856 }
10857
10858 /* Add standard register aliases. We add aliases even for those
10859 names which are used by the current architecture - it's simpler,
10860 and does no harm, since nothing ever lists user registers. */
10861 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10862 user_reg_add (gdbarch, arm_register_aliases[i].name,
10863 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10864
10865 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
10866 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
10867
10868 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
10869
10870 return gdbarch;
10871}
10872
10873static void
10874arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10875{
10876 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10877
10878 if (tdep == NULL)
10879 return;
10880
10881 gdb_printf (file, _("arm_dump_tdep: fp_model = %i\n"),
10882 (int) tdep->fp_model);
10883 gdb_printf (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
10884 (int) tdep->have_fpa_registers);
10885 gdb_printf (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
10886 (int) tdep->have_wmmx_registers);
10887 gdb_printf (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
10888 (int) tdep->vfp_register_count);
10889 gdb_printf (file, _("arm_dump_tdep: have_s_pseudos = %s\n"),
10890 tdep->have_s_pseudos? "true" : "false");
10891 gdb_printf (file, _("arm_dump_tdep: s_pseudo_base = %i\n"),
10892 (int) tdep->s_pseudo_base);
10893 gdb_printf (file, _("arm_dump_tdep: s_pseudo_count = %i\n"),
10894 (int) tdep->s_pseudo_count);
10895 gdb_printf (file, _("arm_dump_tdep: have_q_pseudos = %s\n"),
10896 tdep->have_q_pseudos? "true" : "false");
10897 gdb_printf (file, _("arm_dump_tdep: q_pseudo_base = %i\n"),
10898 (int) tdep->q_pseudo_base);
10899 gdb_printf (file, _("arm_dump_tdep: q_pseudo_count = %i\n"),
10900 (int) tdep->q_pseudo_count);
10901 gdb_printf (file, _("arm_dump_tdep: have_neon = %i\n"),
10902 (int) tdep->have_neon);
10903 gdb_printf (file, _("arm_dump_tdep: have_mve = %s\n"),
10904 tdep->have_mve? "yes" : "no");
10905 gdb_printf (file, _("arm_dump_tdep: mve_vpr_regnum = %i\n"),
10906 tdep->mve_vpr_regnum);
10907 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_base = %i\n"),
10908 tdep->mve_pseudo_base);
10909 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_count = %i\n"),
10910 tdep->mve_pseudo_count);
10911 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_regnum = %i\n"),
10912 tdep->m_profile_msp_regnum);
10913 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_regnum = %i\n"),
10914 tdep->m_profile_psp_regnum);
10915 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_ns_regnum = %i\n"),
10916 tdep->m_profile_msp_ns_regnum);
10917 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_ns_regnum = %i\n"),
10918 tdep->m_profile_psp_ns_regnum);
10919 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_s_regnum = %i\n"),
10920 tdep->m_profile_msp_s_regnum);
10921 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_s_regnum = %i\n"),
10922 tdep->m_profile_psp_s_regnum);
10923 gdb_printf (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
10924 (unsigned long) tdep->lowest_pc);
10925 gdb_printf (file, _("arm_dump_tdep: have_pacbti = %s\n"),
10926 tdep->have_pacbti? "yes" : "no");
10927 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_base = %i\n"),
10928 tdep->pacbti_pseudo_base);
10929 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_count = %i\n"),
10930 tdep->pacbti_pseudo_count);
10931 gdb_printf (file, _("arm_dump_tdep: is_m = %s\n"),
10932 tdep->is_m? "yes" : "no");
10933}
10934
10935#if GDB_SELF_TEST
10936namespace selftests
10937{
10938static void arm_record_test (void);
10939static void arm_analyze_prologue_test ();
10940}
10941#endif
10942
10943void _initialize_arm_tdep ();
10944void
10945_initialize_arm_tdep ()
10946{
10947 long length;
10948 int i, j;
10949 char regdesc[1024], *rdptr = regdesc;
10950 size_t rest = sizeof (regdesc);
10951
10952 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10953
10954 /* Add ourselves to objfile event chain. */
10955 gdb::observers::new_objfile.attach (arm_exidx_new_objfile, "arm-tdep");
10956
10957 /* Register an ELF OS ABI sniffer for ARM binaries. */
10958 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10959 bfd_target_elf_flavour,
10960 arm_elf_osabi_sniffer);
10961
10962 /* Add root prefix command for all "set arm"/"show arm" commands. */
10963 add_setshow_prefix_cmd ("arm", no_class,
10964 _("Various ARM-specific commands."),
10965 _("Various ARM-specific commands."),
10966 &setarmcmdlist, &showarmcmdlist,
10967 &setlist, &showlist);
10968
10969 arm_disassembler_options = xstrdup ("reg-names-std");
10970 const disasm_options_t *disasm_options
10971 = &disassembler_options_arm ()->options;
10972 int num_disassembly_styles = 0;
10973 for (i = 0; disasm_options->name[i] != NULL; i++)
10974 if (startswith (disasm_options->name[i], "reg-names-"))
10975 num_disassembly_styles++;
10976
10977 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
10978 valid_disassembly_styles = XNEWVEC (const char *,
10979 num_disassembly_styles + 1);
10980 for (i = j = 0; disasm_options->name[i] != NULL; i++)
10981 if (startswith (disasm_options->name[i], "reg-names-"))
10982 {
10983 size_t offset = strlen ("reg-names-");
10984 const char *style = disasm_options->name[i];
10985 valid_disassembly_styles[j++] = &style[offset];
10986 if (strcmp (&style[offset], "std") == 0)
10987 disassembly_style = &style[offset];
10988 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
10989 disasm_options->description[i]);
10990 rdptr += length;
10991 rest -= length;
10992 }
10993 /* Mark the end of valid options. */
10994 valid_disassembly_styles[num_disassembly_styles] = NULL;
10995
10996 /* Create the help text. */
10997 std::string helptext = string_printf ("%s%s%s",
10998 _("The valid values are:\n"),
10999 regdesc,
11000 _("The default is \"std\"."));
11001
11002 add_setshow_enum_cmd("disassembler", no_class,
11003 valid_disassembly_styles, &disassembly_style,
11004 _("Set the disassembly style."),
11005 _("Show the disassembly style."),
11006 helptext.c_str (),
11007 set_disassembly_style_sfunc,
11008 show_disassembly_style_sfunc,
11009 &setarmcmdlist, &showarmcmdlist);
11010
11011 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
11012 _("Set usage of ARM 32-bit mode."),
11013 _("Show usage of ARM 32-bit mode."),
11014 _("When off, a 26-bit PC will be used."),
11015 NULL,
11016 NULL, /* FIXME: i18n: Usage of ARM 32-bit
11017 mode is %s. */
11018 &setarmcmdlist, &showarmcmdlist);
11019
11020 /* Add a command to allow the user to force the FPU model. */
11021 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
11022 _("Set the floating point type."),
11023 _("Show the floating point type."),
11024 _("auto - Determine the FP typefrom the OS-ABI.\n\
11025softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
11026fpa - FPA co-processor (GCC compiled).\n\
11027softvfp - Software FP with pure-endian doubles.\n\
11028vfp - VFP co-processor."),
11029 set_fp_model_sfunc, show_fp_model,
11030 &setarmcmdlist, &showarmcmdlist);
11031
11032 /* Add a command to allow the user to force the ABI. */
11033 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
11034 _("Set the ABI."),
11035 _("Show the ABI."),
11036 NULL, arm_set_abi, arm_show_abi,
11037 &setarmcmdlist, &showarmcmdlist);
11038
11039 /* Add two commands to allow the user to force the assumed
11040 execution mode. */
11041 add_setshow_enum_cmd ("fallback-mode", class_support,
11042 arm_mode_strings, &arm_fallback_mode_string,
11043 _("Set the mode assumed when symbols are unavailable."),
11044 _("Show the mode assumed when symbols are unavailable."),
11045 NULL, NULL, arm_show_fallback_mode,
11046 &setarmcmdlist, &showarmcmdlist);
11047 add_setshow_enum_cmd ("force-mode", class_support,
11048 arm_mode_strings, &arm_force_mode_string,
11049 _("Set the mode assumed even when symbols are available."),
11050 _("Show the mode assumed even when symbols are available."),
11051 NULL, NULL, arm_show_force_mode,
11052 &setarmcmdlist, &showarmcmdlist);
11053
11054 /* Add a command to stop triggering security exceptions when
11055 unwinding exception stacks. */
11056 add_setshow_boolean_cmd ("unwind-secure-frames", no_class, &arm_unwind_secure_frames,
11057 _("Set usage of non-secure to secure exception stack unwinding."),
11058 _("Show usage of non-secure to secure exception stack unwinding."),
11059 _("When on, the debugger can trigger memory access traps."),
11060 NULL, arm_show_unwind_secure_frames,
11061 &setarmcmdlist, &showarmcmdlist);
11062
11063 /* Debugging flag. */
11064 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
11065 _("Set ARM debugging."),
11066 _("Show ARM debugging."),
11067 _("When on, arm-specific debugging is enabled."),
11068 NULL,
11069 NULL, /* FIXME: i18n: "ARM debugging is %s. */
11070 &setdebuglist, &showdebuglist);
11071
11072#if GDB_SELF_TEST
11073 selftests::register_test ("arm-record", selftests::arm_record_test);
11074 selftests::register_test ("arm_analyze_prologue", selftests::arm_analyze_prologue_test);
11075#endif
11076
11077}
11078
11079/* ARM-reversible process record data structures. */
11080
11081#define ARM_INSN_SIZE_BYTES 4
11082#define THUMB_INSN_SIZE_BYTES 2
11083#define THUMB2_INSN_SIZE_BYTES 4
11084
11085
11086/* Position of the bit within a 32-bit ARM instruction
11087 that defines whether the instruction is a load or store. */
11088#define INSN_S_L_BIT_NUM 20
11089
11090#define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
11091 do \
11092 { \
11093 unsigned int reg_len = LENGTH; \
11094 if (reg_len) \
11095 { \
11096 REGS = XNEWVEC (uint32_t, reg_len); \
11097 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
11098 } \
11099 } \
11100 while (0)
11101
11102#define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
11103 do \
11104 { \
11105 unsigned int mem_len = LENGTH; \
11106 if (mem_len) \
11107 { \
11108 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
11109 memcpy(&MEMS->len, &RECORD_BUF[0], \
11110 sizeof(struct arm_mem_r) * LENGTH); \
11111 } \
11112 } \
11113 while (0)
11114
11115/* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
11116#define INSN_RECORDED(ARM_RECORD) \
11117 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
11118
11119/* ARM memory record structure. */
11120struct arm_mem_r
11121{
11122 uint32_t len; /* Record length. */
11123 uint32_t addr; /* Memory address. */
11124};
11125
11126/* ARM instruction record contains opcode of current insn
11127 and execution state (before entry to decode_insn()),
11128 contains list of to-be-modified registers and
11129 memory blocks (on return from decode_insn()). */
11130
11131struct arm_insn_decode_record
11132{
11133 struct gdbarch *gdbarch;
11134 struct regcache *regcache;
11135 CORE_ADDR this_addr; /* Address of the insn being decoded. */
11136 uint32_t arm_insn; /* Should accommodate thumb. */
11137 uint32_t cond; /* Condition code. */
11138 uint32_t opcode; /* Insn opcode. */
11139 uint32_t decode; /* Insn decode bits. */
11140 uint32_t mem_rec_count; /* No of mem records. */
11141 uint32_t reg_rec_count; /* No of reg records. */
11142 uint32_t *arm_regs; /* Registers to be saved for this record. */
11143 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
11144};
11145
11146
11147/* Checks ARM SBZ and SBO mandatory fields. */
11148
11149static int
11150sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
11151{
11152 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
11153
11154 if (!len)
11155 return 1;
11156
11157 if (!sbo)
11158 ones = ~ones;
11159
11160 while (ones)
11161 {
11162 if (!(ones & sbo))
11163 {
11164 return 0;
11165 }
11166 ones = ones >> 1;
11167 }
11168 return 1;
11169}
11170
11171enum arm_record_result
11172{
11173 ARM_RECORD_SUCCESS = 0,
11174 ARM_RECORD_FAILURE = 1
11175};
11176
11177enum arm_record_strx_t
11178{
11179 ARM_RECORD_STRH=1,
11180 ARM_RECORD_STRD
11181};
11182
11183enum record_type_t
11184{
11185 ARM_RECORD=1,
11186 THUMB_RECORD,
11187 THUMB2_RECORD
11188};
11189
11190
11191static int
11192arm_record_strx (arm_insn_decode_record *arm_insn_r, uint32_t *record_buf,
11193 uint32_t *record_buf_mem, arm_record_strx_t str_type)
11194{
11195
11196 struct regcache *reg_cache = arm_insn_r->regcache;
11197 ULONGEST u_regval[2]= {0};
11198
11199 uint32_t reg_src1 = 0, reg_src2 = 0;
11200 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
11201
11202 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11203 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11204
11205 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11206 {
11207 /* 1) Handle misc store, immediate offset. */
11208 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
11209 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
11210 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11211 regcache_raw_read_unsigned (reg_cache, reg_src1,
11212 &u_regval[0]);
11213 if (ARM_PC_REGNUM == reg_src1)
11214 {
11215 /* If R15 was used as Rn, hence current PC+8. */
11216 u_regval[0] = u_regval[0] + 8;
11217 }
11218 offset_8 = (immed_high << 4) | immed_low;
11219 /* Calculate target store address. */
11220 if (14 == arm_insn_r->opcode)
11221 {
11222 tgt_mem_addr = u_regval[0] + offset_8;
11223 }
11224 else
11225 {
11226 tgt_mem_addr = u_regval[0] - offset_8;
11227 }
11228 if (ARM_RECORD_STRH == str_type)
11229 {
11230 record_buf_mem[0] = 2;
11231 record_buf_mem[1] = tgt_mem_addr;
11232 arm_insn_r->mem_rec_count = 1;
11233 }
11234 else if (ARM_RECORD_STRD == str_type)
11235 {
11236 record_buf_mem[0] = 4;
11237 record_buf_mem[1] = tgt_mem_addr;
11238 record_buf_mem[2] = 4;
11239 record_buf_mem[3] = tgt_mem_addr + 4;
11240 arm_insn_r->mem_rec_count = 2;
11241 }
11242 }
11243 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
11244 {
11245 /* 2) Store, register offset. */
11246 /* Get Rm. */
11247 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11248 /* Get Rn. */
11249 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11250 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11251 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11252 if (15 == reg_src2)
11253 {
11254 /* If R15 was used as Rn, hence current PC+8. */
11255 u_regval[0] = u_regval[0] + 8;
11256 }
11257 /* Calculate target store address, Rn +/- Rm, register offset. */
11258 if (12 == arm_insn_r->opcode)
11259 {
11260 tgt_mem_addr = u_regval[0] + u_regval[1];
11261 }
11262 else
11263 {
11264 tgt_mem_addr = u_regval[1] - u_regval[0];
11265 }
11266 if (ARM_RECORD_STRH == str_type)
11267 {
11268 record_buf_mem[0] = 2;
11269 record_buf_mem[1] = tgt_mem_addr;
11270 arm_insn_r->mem_rec_count = 1;
11271 }
11272 else if (ARM_RECORD_STRD == str_type)
11273 {
11274 record_buf_mem[0] = 4;
11275 record_buf_mem[1] = tgt_mem_addr;
11276 record_buf_mem[2] = 4;
11277 record_buf_mem[3] = tgt_mem_addr + 4;
11278 arm_insn_r->mem_rec_count = 2;
11279 }
11280 }
11281 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11282 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
11283 {
11284 /* 3) Store, immediate pre-indexed. */
11285 /* 5) Store, immediate post-indexed. */
11286 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
11287 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
11288 offset_8 = (immed_high << 4) | immed_low;
11289 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11290 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11291 /* Calculate target store address, Rn +/- Rm, register offset. */
11292 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
11293 {
11294 tgt_mem_addr = u_regval[0] + offset_8;
11295 }
11296 else
11297 {
11298 tgt_mem_addr = u_regval[0] - offset_8;
11299 }
11300 if (ARM_RECORD_STRH == str_type)
11301 {
11302 record_buf_mem[0] = 2;
11303 record_buf_mem[1] = tgt_mem_addr;
11304 arm_insn_r->mem_rec_count = 1;
11305 }
11306 else if (ARM_RECORD_STRD == str_type)
11307 {
11308 record_buf_mem[0] = 4;
11309 record_buf_mem[1] = tgt_mem_addr;
11310 record_buf_mem[2] = 4;
11311 record_buf_mem[3] = tgt_mem_addr + 4;
11312 arm_insn_r->mem_rec_count = 2;
11313 }
11314 /* Record Rn also as it changes. */
11315 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
11316 arm_insn_r->reg_rec_count = 1;
11317 }
11318 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
11319 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
11320 {
11321 /* 4) Store, register pre-indexed. */
11322 /* 6) Store, register post -indexed. */
11323 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11324 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11325 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11326 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11327 /* Calculate target store address, Rn +/- Rm, register offset. */
11328 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
11329 {
11330 tgt_mem_addr = u_regval[0] + u_regval[1];
11331 }
11332 else
11333 {
11334 tgt_mem_addr = u_regval[1] - u_regval[0];
11335 }
11336 if (ARM_RECORD_STRH == str_type)
11337 {
11338 record_buf_mem[0] = 2;
11339 record_buf_mem[1] = tgt_mem_addr;
11340 arm_insn_r->mem_rec_count = 1;
11341 }
11342 else if (ARM_RECORD_STRD == str_type)
11343 {
11344 record_buf_mem[0] = 4;
11345 record_buf_mem[1] = tgt_mem_addr;
11346 record_buf_mem[2] = 4;
11347 record_buf_mem[3] = tgt_mem_addr + 4;
11348 arm_insn_r->mem_rec_count = 2;
11349 }
11350 /* Record Rn also as it changes. */
11351 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
11352 arm_insn_r->reg_rec_count = 1;
11353 }
11354 return 0;
11355}
11356
11357/* Handling ARM extension space insns. */
11358
11359static int
11360arm_record_extension_space (arm_insn_decode_record *arm_insn_r)
11361{
11362 int ret = 0; /* Return value: -1:record failure ; 0:success */
11363 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
11364 uint32_t record_buf[8], record_buf_mem[8];
11365 uint32_t reg_src1 = 0;
11366 struct regcache *reg_cache = arm_insn_r->regcache;
11367 ULONGEST u_regval = 0;
11368
11369 gdb_assert (!INSN_RECORDED(arm_insn_r));
11370 /* Handle unconditional insn extension space. */
11371
11372 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
11373 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11374 if (arm_insn_r->cond)
11375 {
11376 /* PLD has no affect on architectural state, it just affects
11377 the caches. */
11378 if (5 == ((opcode1 & 0xE0) >> 5))
11379 {
11380 /* BLX(1) */
11381 record_buf[0] = ARM_PS_REGNUM;
11382 record_buf[1] = ARM_LR_REGNUM;
11383 arm_insn_r->reg_rec_count = 2;
11384 }
11385 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
11386 }
11387
11388
11389 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11390 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
11391 {
11392 ret = -1;
11393 /* Undefined instruction on ARM V5; need to handle if later
11394 versions define it. */
11395 }
11396
11397 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
11398 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11399 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
11400
11401 /* Handle arithmetic insn extension space. */
11402 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
11403 && !INSN_RECORDED(arm_insn_r))
11404 {
11405 /* Handle MLA(S) and MUL(S). */
11406 if (in_inclusive_range (insn_op1, 0U, 3U))
11407 {
11408 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11409 record_buf[1] = ARM_PS_REGNUM;
11410 arm_insn_r->reg_rec_count = 2;
11411 }
11412 else if (in_inclusive_range (insn_op1, 4U, 15U))
11413 {
11414 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11415 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11416 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11417 record_buf[2] = ARM_PS_REGNUM;
11418 arm_insn_r->reg_rec_count = 3;
11419 }
11420 }
11421
11422 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11423 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11424 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11425
11426 /* Handle control insn extension space. */
11427
11428 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11429 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11430 {
11431 if (!bit (arm_insn_r->arm_insn,25))
11432 {
11433 if (!bits (arm_insn_r->arm_insn, 4, 7))
11434 {
11435 if ((0 == insn_op1) || (2 == insn_op1))
11436 {
11437 /* MRS. */
11438 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11439 arm_insn_r->reg_rec_count = 1;
11440 }
11441 else if (1 == insn_op1)
11442 {
11443 /* CSPR is going to be changed. */
11444 record_buf[0] = ARM_PS_REGNUM;
11445 arm_insn_r->reg_rec_count = 1;
11446 }
11447 else if (3 == insn_op1)
11448 {
11449 /* SPSR is going to be changed. */
11450 /* We need to get SPSR value, which is yet to be done. */
11451 return -1;
11452 }
11453 }
11454 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11455 {
11456 if (1 == insn_op1)
11457 {
11458 /* BX. */
11459 record_buf[0] = ARM_PS_REGNUM;
11460 arm_insn_r->reg_rec_count = 1;
11461 }
11462 else if (3 == insn_op1)
11463 {
11464 /* CLZ. */
11465 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11466 arm_insn_r->reg_rec_count = 1;
11467 }
11468 }
11469 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11470 {
11471 /* BLX. */
11472 record_buf[0] = ARM_PS_REGNUM;
11473 record_buf[1] = ARM_LR_REGNUM;
11474 arm_insn_r->reg_rec_count = 2;
11475 }
11476 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11477 {
11478 /* QADD, QSUB, QDADD, QDSUB */
11479 record_buf[0] = ARM_PS_REGNUM;
11480 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11481 arm_insn_r->reg_rec_count = 2;
11482 }
11483 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11484 {
11485 /* BKPT. */
11486 record_buf[0] = ARM_PS_REGNUM;
11487 record_buf[1] = ARM_LR_REGNUM;
11488 arm_insn_r->reg_rec_count = 2;
11489
11490 /* Save SPSR also;how? */
11491 return -1;
11492 }
11493 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11494 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11495 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11496 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11497 )
11498 {
11499 if (0 == insn_op1 || 1 == insn_op1)
11500 {
11501 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11502 /* We dont do optimization for SMULW<y> where we
11503 need only Rd. */
11504 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11505 record_buf[1] = ARM_PS_REGNUM;
11506 arm_insn_r->reg_rec_count = 2;
11507 }
11508 else if (2 == insn_op1)
11509 {
11510 /* SMLAL<x><y>. */
11511 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11512 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11513 arm_insn_r->reg_rec_count = 2;
11514 }
11515 else if (3 == insn_op1)
11516 {
11517 /* SMUL<x><y>. */
11518 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11519 arm_insn_r->reg_rec_count = 1;
11520 }
11521 }
11522 }
11523 else
11524 {
11525 /* MSR : immediate form. */
11526 if (1 == insn_op1)
11527 {
11528 /* CSPR is going to be changed. */
11529 record_buf[0] = ARM_PS_REGNUM;
11530 arm_insn_r->reg_rec_count = 1;
11531 }
11532 else if (3 == insn_op1)
11533 {
11534 /* SPSR is going to be changed. */
11535 /* we need to get SPSR value, which is yet to be done */
11536 return -1;
11537 }
11538 }
11539 }
11540
11541 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11542 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11543 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11544
11545 /* Handle load/store insn extension space. */
11546
11547 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11548 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11549 && !INSN_RECORDED(arm_insn_r))
11550 {
11551 /* SWP/SWPB. */
11552 if (0 == insn_op1)
11553 {
11554 /* These insn, changes register and memory as well. */
11555 /* SWP or SWPB insn. */
11556 /* Get memory address given by Rn. */
11557 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11558 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11559 /* SWP insn ?, swaps word. */
11560 if (8 == arm_insn_r->opcode)
11561 {
11562 record_buf_mem[0] = 4;
11563 }
11564 else
11565 {
11566 /* SWPB insn, swaps only byte. */
11567 record_buf_mem[0] = 1;
11568 }
11569 record_buf_mem[1] = u_regval;
11570 arm_insn_r->mem_rec_count = 1;
11571 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11572 arm_insn_r->reg_rec_count = 1;
11573 }
11574 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11575 {
11576 /* STRH. */
11577 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11578 ARM_RECORD_STRH);
11579 }
11580 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11581 {
11582 /* LDRD. */
11583 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11584 record_buf[1] = record_buf[0] + 1;
11585 arm_insn_r->reg_rec_count = 2;
11586 }
11587 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11588 {
11589 /* STRD. */
11590 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11591 ARM_RECORD_STRD);
11592 }
11593 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11594 {
11595 /* LDRH, LDRSB, LDRSH. */
11596 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11597 arm_insn_r->reg_rec_count = 1;
11598 }
11599
11600 }
11601
11602 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11603 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11604 && !INSN_RECORDED(arm_insn_r))
11605 {
11606 ret = -1;
11607 /* Handle coprocessor insn extension space. */
11608 }
11609
11610 /* To be done for ARMv5 and later; as of now we return -1. */
11611 if (-1 == ret)
11612 return ret;
11613
11614 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11615 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11616
11617 return ret;
11618}
11619
11620/* Handling opcode 000 insns. */
11621
11622static int
11623arm_record_data_proc_misc_ld_str (arm_insn_decode_record *arm_insn_r)
11624{
11625 struct regcache *reg_cache = arm_insn_r->regcache;
11626 uint32_t record_buf[8], record_buf_mem[8];
11627 ULONGEST u_regval[2] = {0};
11628
11629 uint32_t reg_src1 = 0;
11630 uint32_t opcode1 = 0;
11631
11632 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11633 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11634 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11635
11636 if (!((opcode1 & 0x19) == 0x10))
11637 {
11638 /* Data-processing (register) and Data-processing (register-shifted
11639 register */
11640 /* Out of 11 shifter operands mode, all the insn modifies destination
11641 register, which is specified by 13-16 decode. */
11642 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11643 record_buf[1] = ARM_PS_REGNUM;
11644 arm_insn_r->reg_rec_count = 2;
11645 }
11646 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
11647 {
11648 /* Miscellaneous instructions */
11649
11650 if (3 == arm_insn_r->decode && 0x12 == opcode1
11651 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11652 {
11653 /* Handle BLX, branch and link/exchange. */
11654 if (9 == arm_insn_r->opcode)
11655 {
11656 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11657 and R14 stores the return address. */
11658 record_buf[0] = ARM_PS_REGNUM;
11659 record_buf[1] = ARM_LR_REGNUM;
11660 arm_insn_r->reg_rec_count = 2;
11661 }
11662 }
11663 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11664 {
11665 /* Handle enhanced software breakpoint insn, BKPT. */
11666 /* CPSR is changed to be executed in ARM state, disabling normal
11667 interrupts, entering abort mode. */
11668 /* According to high vector configuration PC is set. */
11669 /* user hit breakpoint and type reverse, in
11670 that case, we need to go back with previous CPSR and
11671 Program Counter. */
11672 record_buf[0] = ARM_PS_REGNUM;
11673 record_buf[1] = ARM_LR_REGNUM;
11674 arm_insn_r->reg_rec_count = 2;
11675
11676 /* Save SPSR also; how? */
11677 return -1;
11678 }
11679 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11680 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11681 {
11682 /* Handle BX, branch and link/exchange. */
11683 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11684 record_buf[0] = ARM_PS_REGNUM;
11685 arm_insn_r->reg_rec_count = 1;
11686 }
11687 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11688 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11689 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11690 {
11691 /* Count leading zeros: CLZ. */
11692 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11693 arm_insn_r->reg_rec_count = 1;
11694 }
11695 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11696 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11697 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11698 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
11699 {
11700 /* Handle MRS insn. */
11701 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11702 arm_insn_r->reg_rec_count = 1;
11703 }
11704 }
11705 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
11706 {
11707 /* Multiply and multiply-accumulate */
11708
11709 /* Handle multiply instructions. */
11710 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11711 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11712 {
11713 /* Handle MLA and MUL. */
11714 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11715 record_buf[1] = ARM_PS_REGNUM;
11716 arm_insn_r->reg_rec_count = 2;
11717 }
11718 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11719 {
11720 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11721 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11722 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11723 record_buf[2] = ARM_PS_REGNUM;
11724 arm_insn_r->reg_rec_count = 3;
11725 }
11726 }
11727 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
11728 {
11729 /* Synchronization primitives */
11730
11731 /* Handling SWP, SWPB. */
11732 /* These insn, changes register and memory as well. */
11733 /* SWP or SWPB insn. */
11734
11735 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11736 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11737 /* SWP insn ?, swaps word. */
11738 if (8 == arm_insn_r->opcode)
11739 {
11740 record_buf_mem[0] = 4;
11741 }
11742 else
11743 {
11744 /* SWPB insn, swaps only byte. */
11745 record_buf_mem[0] = 1;
11746 }
11747 record_buf_mem[1] = u_regval[0];
11748 arm_insn_r->mem_rec_count = 1;
11749 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11750 arm_insn_r->reg_rec_count = 1;
11751 }
11752 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
11753 || 15 == arm_insn_r->decode)
11754 {
11755 if ((opcode1 & 0x12) == 2)
11756 {
11757 /* Extra load/store (unprivileged) */
11758 return -1;
11759 }
11760 else
11761 {
11762 /* Extra load/store */
11763 switch (bits (arm_insn_r->arm_insn, 5, 6))
11764 {
11765 case 1:
11766 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
11767 {
11768 /* STRH (register), STRH (immediate) */
11769 arm_record_strx (arm_insn_r, &record_buf[0],
11770 &record_buf_mem[0], ARM_RECORD_STRH);
11771 }
11772 else if ((opcode1 & 0x05) == 0x1)
11773 {
11774 /* LDRH (register) */
11775 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11776 arm_insn_r->reg_rec_count = 1;
11777
11778 if (bit (arm_insn_r->arm_insn, 21))
11779 {
11780 /* Write back to Rn. */
11781 record_buf[arm_insn_r->reg_rec_count++]
11782 = bits (arm_insn_r->arm_insn, 16, 19);
11783 }
11784 }
11785 else if ((opcode1 & 0x05) == 0x5)
11786 {
11787 /* LDRH (immediate), LDRH (literal) */
11788 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11789
11790 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11791 arm_insn_r->reg_rec_count = 1;
11792
11793 if (rn != 15)
11794 {
11795 /*LDRH (immediate) */
11796 if (bit (arm_insn_r->arm_insn, 21))
11797 {
11798 /* Write back to Rn. */
11799 record_buf[arm_insn_r->reg_rec_count++] = rn;
11800 }
11801 }
11802 }
11803 else
11804 return -1;
11805 break;
11806 case 2:
11807 if ((opcode1 & 0x05) == 0x0)
11808 {
11809 /* LDRD (register) */
11810 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11811 record_buf[1] = record_buf[0] + 1;
11812 arm_insn_r->reg_rec_count = 2;
11813
11814 if (bit (arm_insn_r->arm_insn, 21))
11815 {
11816 /* Write back to Rn. */
11817 record_buf[arm_insn_r->reg_rec_count++]
11818 = bits (arm_insn_r->arm_insn, 16, 19);
11819 }
11820 }
11821 else if ((opcode1 & 0x05) == 0x1)
11822 {
11823 /* LDRSB (register) */
11824 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11825 arm_insn_r->reg_rec_count = 1;
11826
11827 if (bit (arm_insn_r->arm_insn, 21))
11828 {
11829 /* Write back to Rn. */
11830 record_buf[arm_insn_r->reg_rec_count++]
11831 = bits (arm_insn_r->arm_insn, 16, 19);
11832 }
11833 }
11834 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
11835 {
11836 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
11837 LDRSB (literal) */
11838 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11839
11840 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11841 arm_insn_r->reg_rec_count = 1;
11842
11843 if (rn != 15)
11844 {
11845 /*LDRD (immediate), LDRSB (immediate) */
11846 if (bit (arm_insn_r->arm_insn, 21))
11847 {
11848 /* Write back to Rn. */
11849 record_buf[arm_insn_r->reg_rec_count++] = rn;
11850 }
11851 }
11852 }
11853 else
11854 return -1;
11855 break;
11856 case 3:
11857 if ((opcode1 & 0x05) == 0x0)
11858 {
11859 /* STRD (register) */
11860 arm_record_strx (arm_insn_r, &record_buf[0],
11861 &record_buf_mem[0], ARM_RECORD_STRD);
11862 }
11863 else if ((opcode1 & 0x05) == 0x1)
11864 {
11865 /* LDRSH (register) */
11866 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11867 arm_insn_r->reg_rec_count = 1;
11868
11869 if (bit (arm_insn_r->arm_insn, 21))
11870 {
11871 /* Write back to Rn. */
11872 record_buf[arm_insn_r->reg_rec_count++]
11873 = bits (arm_insn_r->arm_insn, 16, 19);
11874 }
11875 }
11876 else if ((opcode1 & 0x05) == 0x4)
11877 {
11878 /* STRD (immediate) */
11879 arm_record_strx (arm_insn_r, &record_buf[0],
11880 &record_buf_mem[0], ARM_RECORD_STRD);
11881 }
11882 else if ((opcode1 & 0x05) == 0x5)
11883 {
11884 /* LDRSH (immediate), LDRSH (literal) */
11885 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11886 arm_insn_r->reg_rec_count = 1;
11887
11888 if (bit (arm_insn_r->arm_insn, 21))
11889 {
11890 /* Write back to Rn. */
11891 record_buf[arm_insn_r->reg_rec_count++]
11892 = bits (arm_insn_r->arm_insn, 16, 19);
11893 }
11894 }
11895 else
11896 return -1;
11897 break;
11898 default:
11899 return -1;
11900 }
11901 }
11902 }
11903 else
11904 {
11905 return -1;
11906 }
11907
11908 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11909 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11910 return 0;
11911}
11912
11913/* Handling opcode 001 insns. */
11914
11915static int
11916arm_record_data_proc_imm (arm_insn_decode_record *arm_insn_r)
11917{
11918 uint32_t record_buf[8], record_buf_mem[8];
11919
11920 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11921 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11922
11923 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11924 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11925 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11926 )
11927 {
11928 /* Handle MSR insn. */
11929 if (9 == arm_insn_r->opcode)
11930 {
11931 /* CSPR is going to be changed. */
11932 record_buf[0] = ARM_PS_REGNUM;
11933 arm_insn_r->reg_rec_count = 1;
11934 }
11935 else
11936 {
11937 /* SPSR is going to be changed. */
11938 }
11939 }
11940 else if (arm_insn_r->opcode <= 15)
11941 {
11942 /* Normal data processing insns. */
11943 /* Out of 11 shifter operands mode, all the insn modifies destination
11944 register, which is specified by 13-16 decode. */
11945 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11946 record_buf[1] = ARM_PS_REGNUM;
11947 arm_insn_r->reg_rec_count = 2;
11948 }
11949 else
11950 {
11951 return -1;
11952 }
11953
11954 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11955 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11956 return 0;
11957}
11958
11959static int
11960arm_record_media (arm_insn_decode_record *arm_insn_r)
11961{
11962 uint32_t record_buf[8];
11963
11964 switch (bits (arm_insn_r->arm_insn, 22, 24))
11965 {
11966 case 0:
11967 /* Parallel addition and subtraction, signed */
11968 case 1:
11969 /* Parallel addition and subtraction, unsigned */
11970 case 2:
11971 case 3:
11972 /* Packing, unpacking, saturation and reversal */
11973 {
11974 int rd = bits (arm_insn_r->arm_insn, 12, 15);
11975
11976 record_buf[arm_insn_r->reg_rec_count++] = rd;
11977 }
11978 break;
11979
11980 case 4:
11981 case 5:
11982 /* Signed multiplies */
11983 {
11984 int rd = bits (arm_insn_r->arm_insn, 16, 19);
11985 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
11986
11987 record_buf[arm_insn_r->reg_rec_count++] = rd;
11988 if (op1 == 0x0)
11989 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11990 else if (op1 == 0x4)
11991 record_buf[arm_insn_r->reg_rec_count++]
11992 = bits (arm_insn_r->arm_insn, 12, 15);
11993 }
11994 break;
11995
11996 case 6:
11997 {
11998 if (bit (arm_insn_r->arm_insn, 21)
11999 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
12000 {
12001 /* SBFX */
12002 record_buf[arm_insn_r->reg_rec_count++]
12003 = bits (arm_insn_r->arm_insn, 12, 15);
12004 }
12005 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
12006 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
12007 {
12008 /* USAD8 and USADA8 */
12009 record_buf[arm_insn_r->reg_rec_count++]
12010 = bits (arm_insn_r->arm_insn, 16, 19);
12011 }
12012 }
12013 break;
12014
12015 case 7:
12016 {
12017 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
12018 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
12019 {
12020 /* Permanently UNDEFINED */
12021 return -1;
12022 }
12023 else
12024 {
12025 /* BFC, BFI and UBFX */
12026 record_buf[arm_insn_r->reg_rec_count++]
12027 = bits (arm_insn_r->arm_insn, 12, 15);
12028 }
12029 }
12030 break;
12031
12032 default:
12033 return -1;
12034 }
12035
12036 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12037
12038 return 0;
12039}
12040
12041/* Handle ARM mode instructions with opcode 010. */
12042
12043static int
12044arm_record_ld_st_imm_offset (arm_insn_decode_record *arm_insn_r)
12045{
12046 struct regcache *reg_cache = arm_insn_r->regcache;
12047
12048 uint32_t reg_base , reg_dest;
12049 uint32_t offset_12, tgt_mem_addr;
12050 uint32_t record_buf[8], record_buf_mem[8];
12051 unsigned char wback;
12052 ULONGEST u_regval;
12053
12054 /* Calculate wback. */
12055 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
12056 || (bit (arm_insn_r->arm_insn, 21) == 1);
12057
12058 arm_insn_r->reg_rec_count = 0;
12059 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
12060
12061 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12062 {
12063 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
12064 and LDRT. */
12065
12066 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
12067 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
12068
12069 /* The LDR instruction is capable of doing branching. If MOV LR, PC
12070 preceeds a LDR instruction having R15 as reg_base, it
12071 emulates a branch and link instruction, and hence we need to save
12072 CPSR and PC as well. */
12073 if (ARM_PC_REGNUM == reg_dest)
12074 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12075
12076 /* If wback is true, also save the base register, which is going to be
12077 written to. */
12078 if (wback)
12079 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12080 }
12081 else
12082 {
12083 /* STR (immediate), STRB (immediate), STRBT and STRT. */
12084
12085 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
12086 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
12087
12088 /* Handle bit U. */
12089 if (bit (arm_insn_r->arm_insn, 23))
12090 {
12091 /* U == 1: Add the offset. */
12092 tgt_mem_addr = (uint32_t) u_regval + offset_12;
12093 }
12094 else
12095 {
12096 /* U == 0: subtract the offset. */
12097 tgt_mem_addr = (uint32_t) u_regval - offset_12;
12098 }
12099
12100 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
12101 bytes. */
12102 if (bit (arm_insn_r->arm_insn, 22))
12103 {
12104 /* STRB and STRBT: 1 byte. */
12105 record_buf_mem[0] = 1;
12106 }
12107 else
12108 {
12109 /* STR and STRT: 4 bytes. */
12110 record_buf_mem[0] = 4;
12111 }
12112
12113 /* Handle bit P. */
12114 if (bit (arm_insn_r->arm_insn, 24))
12115 record_buf_mem[1] = tgt_mem_addr;
12116 else
12117 record_buf_mem[1] = (uint32_t) u_regval;
12118
12119 arm_insn_r->mem_rec_count = 1;
12120
12121 /* If wback is true, also save the base register, which is going to be
12122 written to. */
12123 if (wback)
12124 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12125 }
12126
12127 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12128 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12129 return 0;
12130}
12131
12132/* Handling opcode 011 insns. */
12133
12134static int
12135arm_record_ld_st_reg_offset (arm_insn_decode_record *arm_insn_r)
12136{
12137 struct regcache *reg_cache = arm_insn_r->regcache;
12138
12139 uint32_t shift_imm = 0;
12140 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
12141 uint32_t offset_12 = 0, tgt_mem_addr = 0;
12142 uint32_t record_buf[8], record_buf_mem[8];
12143
12144 LONGEST s_word;
12145 ULONGEST u_regval[2];
12146
12147 if (bit (arm_insn_r->arm_insn, 4))
12148 return arm_record_media (arm_insn_r);
12149
12150 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
12151 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
12152
12153 /* Handle enhanced store insns and LDRD DSP insn,
12154 order begins according to addressing modes for store insns
12155 STRH insn. */
12156
12157 /* LDR or STR? */
12158 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12159 {
12160 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
12161 /* LDR insn has a capability to do branching, if
12162 MOV LR, PC is preceded by LDR insn having Rn as R15
12163 in that case, it emulates branch and link insn, and hence we
12164 need to save CSPR and PC as well. */
12165 if (15 != reg_dest)
12166 {
12167 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12168 arm_insn_r->reg_rec_count = 1;
12169 }
12170 else
12171 {
12172 record_buf[0] = reg_dest;
12173 record_buf[1] = ARM_PS_REGNUM;
12174 arm_insn_r->reg_rec_count = 2;
12175 }
12176 }
12177 else
12178 {
12179 if (! bits (arm_insn_r->arm_insn, 4, 11))
12180 {
12181 /* Store insn, register offset and register pre-indexed,
12182 register post-indexed. */
12183 /* Get Rm. */
12184 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
12185 /* Get Rn. */
12186 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
12187 regcache_raw_read_unsigned (reg_cache, reg_src1
12188 , &u_regval[0]);
12189 regcache_raw_read_unsigned (reg_cache, reg_src2
12190 , &u_regval[1]);
12191 if (15 == reg_src2)
12192 {
12193 /* If R15 was used as Rn, hence current PC+8. */
12194 /* Pre-indexed mode doesnt reach here ; illegal insn. */
12195 u_regval[0] = u_regval[0] + 8;
12196 }
12197 /* Calculate target store address, Rn +/- Rm, register offset. */
12198 /* U == 1. */
12199 if (bit (arm_insn_r->arm_insn, 23))
12200 {
12201 tgt_mem_addr = u_regval[0] + u_regval[1];
12202 }
12203 else
12204 {
12205 tgt_mem_addr = u_regval[1] - u_regval[0];
12206 }
12207
12208 switch (arm_insn_r->opcode)
12209 {
12210 /* STR. */
12211 case 8:
12212 case 12:
12213 /* STR. */
12214 case 9:
12215 case 13:
12216 /* STRT. */
12217 case 1:
12218 case 5:
12219 /* STR. */
12220 case 0:
12221 case 4:
12222 record_buf_mem[0] = 4;
12223 break;
12224
12225 /* STRB. */
12226 case 10:
12227 case 14:
12228 /* STRB. */
12229 case 11:
12230 case 15:
12231 /* STRBT. */
12232 case 3:
12233 case 7:
12234 /* STRB. */
12235 case 2:
12236 case 6:
12237 record_buf_mem[0] = 1;
12238 break;
12239
12240 default:
12241 gdb_assert_not_reached ("no decoding pattern found");
12242 break;
12243 }
12244 record_buf_mem[1] = tgt_mem_addr;
12245 arm_insn_r->mem_rec_count = 1;
12246
12247 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
12248 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
12249 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
12250 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
12251 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
12252 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
12253 )
12254 {
12255 /* Rn is going to be changed in pre-indexed mode and
12256 post-indexed mode as well. */
12257 record_buf[0] = reg_src2;
12258 arm_insn_r->reg_rec_count = 1;
12259 }
12260 }
12261 else
12262 {
12263 /* Store insn, scaled register offset; scaled pre-indexed. */
12264 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
12265 /* Get Rm. */
12266 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
12267 /* Get Rn. */
12268 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
12269 /* Get shift_imm. */
12270 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
12271 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12272 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
12273 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12274 /* Offset_12 used as shift. */
12275 switch (offset_12)
12276 {
12277 case 0:
12278 /* Offset_12 used as index. */
12279 offset_12 = u_regval[0] << shift_imm;
12280 break;
12281
12282 case 1:
12283 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
12284 break;
12285
12286 case 2:
12287 if (!shift_imm)
12288 {
12289 if (bit (u_regval[0], 31))
12290 {
12291 offset_12 = 0xFFFFFFFF;
12292 }
12293 else
12294 {
12295 offset_12 = 0;
12296 }
12297 }
12298 else
12299 {
12300 /* This is arithmetic shift. */
12301 offset_12 = s_word >> shift_imm;
12302 }
12303 break;
12304
12305 case 3:
12306 if (!shift_imm)
12307 {
12308 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
12309 &u_regval[1]);
12310 /* Get C flag value and shift it by 31. */
12311 offset_12 = (((bit (u_regval[1], 29)) << 31) \
12312 | (u_regval[0]) >> 1);
12313 }
12314 else
12315 {
12316 offset_12 = (u_regval[0] >> shift_imm) \
12317 | (u_regval[0] <<
12318 (sizeof(uint32_t) - shift_imm));
12319 }
12320 break;
12321
12322 default:
12323 gdb_assert_not_reached ("no decoding pattern found");
12324 break;
12325 }
12326
12327 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12328 /* bit U set. */
12329 if (bit (arm_insn_r->arm_insn, 23))
12330 {
12331 tgt_mem_addr = u_regval[1] + offset_12;
12332 }
12333 else
12334 {
12335 tgt_mem_addr = u_regval[1] - offset_12;
12336 }
12337
12338 switch (arm_insn_r->opcode)
12339 {
12340 /* STR. */
12341 case 8:
12342 case 12:
12343 /* STR. */
12344 case 9:
12345 case 13:
12346 /* STRT. */
12347 case 1:
12348 case 5:
12349 /* STR. */
12350 case 0:
12351 case 4:
12352 record_buf_mem[0] = 4;
12353 break;
12354
12355 /* STRB. */
12356 case 10:
12357 case 14:
12358 /* STRB. */
12359 case 11:
12360 case 15:
12361 /* STRBT. */
12362 case 3:
12363 case 7:
12364 /* STRB. */
12365 case 2:
12366 case 6:
12367 record_buf_mem[0] = 1;
12368 break;
12369
12370 default:
12371 gdb_assert_not_reached ("no decoding pattern found");
12372 break;
12373 }
12374 record_buf_mem[1] = tgt_mem_addr;
12375 arm_insn_r->mem_rec_count = 1;
12376
12377 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
12378 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
12379 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
12380 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
12381 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
12382 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
12383 )
12384 {
12385 /* Rn is going to be changed in register scaled pre-indexed
12386 mode,and scaled post indexed mode. */
12387 record_buf[0] = reg_src2;
12388 arm_insn_r->reg_rec_count = 1;
12389 }
12390 }
12391 }
12392
12393 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12394 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12395 return 0;
12396}
12397
12398/* Handle ARM mode instructions with opcode 100. */
12399
12400static int
12401arm_record_ld_st_multiple (arm_insn_decode_record *arm_insn_r)
12402{
12403 struct regcache *reg_cache = arm_insn_r->regcache;
12404 uint32_t register_count = 0, register_bits;
12405 uint32_t reg_base, addr_mode;
12406 uint32_t record_buf[24], record_buf_mem[48];
12407 uint32_t wback;
12408 ULONGEST u_regval;
12409
12410 /* Fetch the list of registers. */
12411 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
12412 arm_insn_r->reg_rec_count = 0;
12413
12414 /* Fetch the base register that contains the address we are loading data
12415 to. */
12416 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
12417
12418 /* Calculate wback. */
12419 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
12420
12421 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12422 {
12423 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
12424
12425 /* Find out which registers are going to be loaded from memory. */
12426 while (register_bits)
12427 {
12428 if (register_bits & 0x00000001)
12429 record_buf[arm_insn_r->reg_rec_count++] = register_count;
12430 register_bits = register_bits >> 1;
12431 register_count++;
12432 }
12433
12434
12435 /* If wback is true, also save the base register, which is going to be
12436 written to. */
12437 if (wback)
12438 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12439
12440 /* Save the CPSR register. */
12441 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12442 }
12443 else
12444 {
12445 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
12446
12447 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
12448
12449 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
12450
12451 /* Find out how many registers are going to be stored to memory. */
12452 while (register_bits)
12453 {
12454 if (register_bits & 0x00000001)
12455 register_count++;
12456 register_bits = register_bits >> 1;
12457 }
12458
12459 switch (addr_mode)
12460 {
12461 /* STMDA (STMED): Decrement after. */
12462 case 0:
12463 record_buf_mem[1] = (uint32_t) u_regval
12464 - register_count * ARM_INT_REGISTER_SIZE + 4;
12465 break;
12466 /* STM (STMIA, STMEA): Increment after. */
12467 case 1:
12468 record_buf_mem[1] = (uint32_t) u_regval;
12469 break;
12470 /* STMDB (STMFD): Decrement before. */
12471 case 2:
12472 record_buf_mem[1] = (uint32_t) u_regval
12473 - register_count * ARM_INT_REGISTER_SIZE;
12474 break;
12475 /* STMIB (STMFA): Increment before. */
12476 case 3:
12477 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
12478 break;
12479 default:
12480 gdb_assert_not_reached ("no decoding pattern found");
12481 break;
12482 }
12483
12484 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
12485 arm_insn_r->mem_rec_count = 1;
12486
12487 /* If wback is true, also save the base register, which is going to be
12488 written to. */
12489 if (wback)
12490 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12491 }
12492
12493 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12494 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12495 return 0;
12496}
12497
12498/* Handling opcode 101 insns. */
12499
12500static int
12501arm_record_b_bl (arm_insn_decode_record *arm_insn_r)
12502{
12503 uint32_t record_buf[8];
12504
12505 /* Handle B, BL, BLX(1) insns. */
12506 /* B simply branches so we do nothing here. */
12507 /* Note: BLX(1) doesnt fall here but instead it falls into
12508 extension space. */
12509 if (bit (arm_insn_r->arm_insn, 24))
12510 {
12511 record_buf[0] = ARM_LR_REGNUM;
12512 arm_insn_r->reg_rec_count = 1;
12513 }
12514
12515 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12516
12517 return 0;
12518}
12519
12520static int
12521arm_record_unsupported_insn (arm_insn_decode_record *arm_insn_r)
12522{
12523 gdb_printf (gdb_stderr,
12524 _("Process record does not support instruction "
12525 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
12526 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
12527
12528 return -1;
12529}
12530
12531/* Record handler for vector data transfer instructions. */
12532
12533static int
12534arm_record_vdata_transfer_insn (arm_insn_decode_record *arm_insn_r)
12535{
12536 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
12537 uint32_t record_buf[4];
12538
12539 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
12540 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
12541 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
12542 bit_l = bit (arm_insn_r->arm_insn, 20);
12543 bit_c = bit (arm_insn_r->arm_insn, 8);
12544
12545 /* Handle VMOV instruction. */
12546 if (bit_l && bit_c)
12547 {
12548 record_buf[0] = reg_t;
12549 arm_insn_r->reg_rec_count = 1;
12550 }
12551 else if (bit_l && !bit_c)
12552 {
12553 /* Handle VMOV instruction. */
12554 if (bits_a == 0x00)
12555 {
12556 record_buf[0] = reg_t;
12557 arm_insn_r->reg_rec_count = 1;
12558 }
12559 /* Handle VMRS instruction. */
12560 else if (bits_a == 0x07)
12561 {
12562 if (reg_t == 15)
12563 reg_t = ARM_PS_REGNUM;
12564
12565 record_buf[0] = reg_t;
12566 arm_insn_r->reg_rec_count = 1;
12567 }
12568 }
12569 else if (!bit_l && !bit_c)
12570 {
12571 /* Handle VMOV instruction. */
12572 if (bits_a == 0x00)
12573 {
12574 record_buf[0] = ARM_D0_REGNUM + reg_v;
12575
12576 arm_insn_r->reg_rec_count = 1;
12577 }
12578 /* Handle VMSR instruction. */
12579 else if (bits_a == 0x07)
12580 {
12581 record_buf[0] = ARM_FPSCR_REGNUM;
12582 arm_insn_r->reg_rec_count = 1;
12583 }
12584 }
12585 else if (!bit_l && bit_c)
12586 {
12587 /* Handle VMOV instruction. */
12588 if (!(bits_a & 0x04))
12589 {
12590 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12591 + ARM_D0_REGNUM;
12592 arm_insn_r->reg_rec_count = 1;
12593 }
12594 /* Handle VDUP instruction. */
12595 else
12596 {
12597 if (bit (arm_insn_r->arm_insn, 21))
12598 {
12599 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12600 record_buf[0] = reg_v + ARM_D0_REGNUM;
12601 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12602 arm_insn_r->reg_rec_count = 2;
12603 }
12604 else
12605 {
12606 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12607 record_buf[0] = reg_v + ARM_D0_REGNUM;
12608 arm_insn_r->reg_rec_count = 1;
12609 }
12610 }
12611 }
12612
12613 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12614 return 0;
12615}
12616
12617/* Record handler for extension register load/store instructions. */
12618
12619static int
12620arm_record_exreg_ld_st_insn (arm_insn_decode_record *arm_insn_r)
12621{
12622 uint32_t opcode, single_reg;
12623 uint8_t op_vldm_vstm;
12624 uint32_t record_buf[8], record_buf_mem[128];
12625 ULONGEST u_regval = 0;
12626
12627 struct regcache *reg_cache = arm_insn_r->regcache;
12628
12629 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12630 single_reg = !bit (arm_insn_r->arm_insn, 8);
12631 op_vldm_vstm = opcode & 0x1b;
12632
12633 /* Handle VMOV instructions. */
12634 if ((opcode & 0x1e) == 0x04)
12635 {
12636 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
12637 {
12638 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12639 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12640 arm_insn_r->reg_rec_count = 2;
12641 }
12642 else
12643 {
12644 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
12645 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
12646
12647 if (single_reg)
12648 {
12649 /* The first S register number m is REG_M:M (M is bit 5),
12650 the corresponding D register number is REG_M:M / 2, which
12651 is REG_M. */
12652 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
12653 /* The second S register number is REG_M:M + 1, the
12654 corresponding D register number is (REG_M:M + 1) / 2.
12655 IOW, if bit M is 1, the first and second S registers
12656 are mapped to different D registers, otherwise, they are
12657 in the same D register. */
12658 if (bit_m)
12659 {
12660 record_buf[arm_insn_r->reg_rec_count++]
12661 = ARM_D0_REGNUM + reg_m + 1;
12662 }
12663 }
12664 else
12665 {
12666 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
12667 arm_insn_r->reg_rec_count = 1;
12668 }
12669 }
12670 }
12671 /* Handle VSTM and VPUSH instructions. */
12672 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12673 || op_vldm_vstm == 0x12)
12674 {
12675 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12676 uint32_t memory_index = 0;
12677
12678 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12679 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12680 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12681 imm_off32 = imm_off8 << 2;
12682 memory_count = imm_off8;
12683
12684 if (bit (arm_insn_r->arm_insn, 23))
12685 start_address = u_regval;
12686 else
12687 start_address = u_regval - imm_off32;
12688
12689 if (bit (arm_insn_r->arm_insn, 21))
12690 {
12691 record_buf[0] = reg_rn;
12692 arm_insn_r->reg_rec_count = 1;
12693 }
12694
12695 while (memory_count > 0)
12696 {
12697 if (single_reg)
12698 {
12699 record_buf_mem[memory_index] = 4;
12700 record_buf_mem[memory_index + 1] = start_address;
12701 start_address = start_address + 4;
12702 memory_index = memory_index + 2;
12703 }
12704 else
12705 {
12706 record_buf_mem[memory_index] = 4;
12707 record_buf_mem[memory_index + 1] = start_address;
12708 record_buf_mem[memory_index + 2] = 4;
12709 record_buf_mem[memory_index + 3] = start_address + 4;
12710 start_address = start_address + 8;
12711 memory_index = memory_index + 4;
12712 }
12713 memory_count--;
12714 }
12715 arm_insn_r->mem_rec_count = (memory_index >> 1);
12716 }
12717 /* Handle VLDM instructions. */
12718 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12719 || op_vldm_vstm == 0x13)
12720 {
12721 uint32_t reg_count, reg_vd;
12722 uint32_t reg_index = 0;
12723 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
12724
12725 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12726 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12727
12728 /* REG_VD is the first D register number. If the instruction
12729 loads memory to S registers (SINGLE_REG is TRUE), the register
12730 number is (REG_VD << 1 | bit D), so the corresponding D
12731 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
12732 if (!single_reg)
12733 reg_vd = reg_vd | (bit_d << 4);
12734
12735 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
12736 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12737
12738 /* If the instruction loads memory to D register, REG_COUNT should
12739 be divided by 2, according to the ARM Architecture Reference
12740 Manual. If the instruction loads memory to S register, divide by
12741 2 as well because two S registers are mapped to D register. */
12742 reg_count = reg_count / 2;
12743 if (single_reg && bit_d)
12744 {
12745 /* Increase the register count if S register list starts from
12746 an odd number (bit d is one). */
12747 reg_count++;
12748 }
12749
12750 while (reg_count > 0)
12751 {
12752 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12753 reg_count--;
12754 }
12755 arm_insn_r->reg_rec_count = reg_index;
12756 }
12757 /* VSTR Vector store register. */
12758 else if ((opcode & 0x13) == 0x10)
12759 {
12760 uint32_t start_address, reg_rn, imm_off32, imm_off8;
12761 uint32_t memory_index = 0;
12762
12763 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12764 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12765 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12766 imm_off32 = imm_off8 << 2;
12767
12768 if (bit (arm_insn_r->arm_insn, 23))
12769 start_address = u_regval + imm_off32;
12770 else
12771 start_address = u_regval - imm_off32;
12772
12773 if (single_reg)
12774 {
12775 record_buf_mem[memory_index] = 4;
12776 record_buf_mem[memory_index + 1] = start_address;
12777 arm_insn_r->mem_rec_count = 1;
12778 }
12779 else
12780 {
12781 record_buf_mem[memory_index] = 4;
12782 record_buf_mem[memory_index + 1] = start_address;
12783 record_buf_mem[memory_index + 2] = 4;
12784 record_buf_mem[memory_index + 3] = start_address + 4;
12785 arm_insn_r->mem_rec_count = 2;
12786 }
12787 }
12788 /* VLDR Vector load register. */
12789 else if ((opcode & 0x13) == 0x11)
12790 {
12791 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12792
12793 if (!single_reg)
12794 {
12795 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12796 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12797 }
12798 else
12799 {
12800 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12801 /* Record register D rather than pseudo register S. */
12802 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
12803 }
12804 arm_insn_r->reg_rec_count = 1;
12805 }
12806
12807 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12808 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12809 return 0;
12810}
12811
12812/* Record handler for arm/thumb mode VFP data processing instructions. */
12813
12814static int
12815arm_record_vfp_data_proc_insn (arm_insn_decode_record *arm_insn_r)
12816{
12817 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12818 uint32_t record_buf[4];
12819 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12820 enum insn_types curr_insn_type = INSN_INV;
12821
12822 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12823 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12824 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12825 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12826 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12827 bit_d = bit (arm_insn_r->arm_insn, 22);
12828 /* Mask off the "D" bit. */
12829 opc1 = opc1 & ~0x04;
12830
12831 /* Handle VMLA, VMLS. */
12832 if (opc1 == 0x00)
12833 {
12834 if (bit (arm_insn_r->arm_insn, 10))
12835 {
12836 if (bit (arm_insn_r->arm_insn, 6))
12837 curr_insn_type = INSN_T0;
12838 else
12839 curr_insn_type = INSN_T1;
12840 }
12841 else
12842 {
12843 if (dp_op_sz)
12844 curr_insn_type = INSN_T1;
12845 else
12846 curr_insn_type = INSN_T2;
12847 }
12848 }
12849 /* Handle VNMLA, VNMLS, VNMUL. */
12850 else if (opc1 == 0x01)
12851 {
12852 if (dp_op_sz)
12853 curr_insn_type = INSN_T1;
12854 else
12855 curr_insn_type = INSN_T2;
12856 }
12857 /* Handle VMUL. */
12858 else if (opc1 == 0x02 && !(opc3 & 0x01))
12859 {
12860 if (bit (arm_insn_r->arm_insn, 10))
12861 {
12862 if (bit (arm_insn_r->arm_insn, 6))
12863 curr_insn_type = INSN_T0;
12864 else
12865 curr_insn_type = INSN_T1;
12866 }
12867 else
12868 {
12869 if (dp_op_sz)
12870 curr_insn_type = INSN_T1;
12871 else
12872 curr_insn_type = INSN_T2;
12873 }
12874 }
12875 /* Handle VADD, VSUB. */
12876 else if (opc1 == 0x03)
12877 {
12878 if (!bit (arm_insn_r->arm_insn, 9))
12879 {
12880 if (bit (arm_insn_r->arm_insn, 6))
12881 curr_insn_type = INSN_T0;
12882 else
12883 curr_insn_type = INSN_T1;
12884 }
12885 else
12886 {
12887 if (dp_op_sz)
12888 curr_insn_type = INSN_T1;
12889 else
12890 curr_insn_type = INSN_T2;
12891 }
12892 }
12893 /* Handle VDIV. */
12894 else if (opc1 == 0x08)
12895 {
12896 if (dp_op_sz)
12897 curr_insn_type = INSN_T1;
12898 else
12899 curr_insn_type = INSN_T2;
12900 }
12901 /* Handle all other vfp data processing instructions. */
12902 else if (opc1 == 0x0b)
12903 {
12904 /* Handle VMOV. */
12905 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12906 {
12907 if (bit (arm_insn_r->arm_insn, 4))
12908 {
12909 if (bit (arm_insn_r->arm_insn, 6))
12910 curr_insn_type = INSN_T0;
12911 else
12912 curr_insn_type = INSN_T1;
12913 }
12914 else
12915 {
12916 if (dp_op_sz)
12917 curr_insn_type = INSN_T1;
12918 else
12919 curr_insn_type = INSN_T2;
12920 }
12921 }
12922 /* Handle VNEG and VABS. */
12923 else if ((opc2 == 0x01 && opc3 == 0x01)
12924 || (opc2 == 0x00 && opc3 == 0x03))
12925 {
12926 if (!bit (arm_insn_r->arm_insn, 11))
12927 {
12928 if (bit (arm_insn_r->arm_insn, 6))
12929 curr_insn_type = INSN_T0;
12930 else
12931 curr_insn_type = INSN_T1;
12932 }
12933 else
12934 {
12935 if (dp_op_sz)
12936 curr_insn_type = INSN_T1;
12937 else
12938 curr_insn_type = INSN_T2;
12939 }
12940 }
12941 /* Handle VSQRT. */
12942 else if (opc2 == 0x01 && opc3 == 0x03)
12943 {
12944 if (dp_op_sz)
12945 curr_insn_type = INSN_T1;
12946 else
12947 curr_insn_type = INSN_T2;
12948 }
12949 /* Handle VCVT. */
12950 else if (opc2 == 0x07 && opc3 == 0x03)
12951 {
12952 if (!dp_op_sz)
12953 curr_insn_type = INSN_T1;
12954 else
12955 curr_insn_type = INSN_T2;
12956 }
12957 else if (opc3 & 0x01)
12958 {
12959 /* Handle VCVT. */
12960 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12961 {
12962 if (!bit (arm_insn_r->arm_insn, 18))
12963 curr_insn_type = INSN_T2;
12964 else
12965 {
12966 if (dp_op_sz)
12967 curr_insn_type = INSN_T1;
12968 else
12969 curr_insn_type = INSN_T2;
12970 }
12971 }
12972 /* Handle VCVT. */
12973 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
12974 {
12975 if (dp_op_sz)
12976 curr_insn_type = INSN_T1;
12977 else
12978 curr_insn_type = INSN_T2;
12979 }
12980 /* Handle VCVTB, VCVTT. */
12981 else if ((opc2 & 0x0e) == 0x02)
12982 curr_insn_type = INSN_T2;
12983 /* Handle VCMP, VCMPE. */
12984 else if ((opc2 & 0x0e) == 0x04)
12985 curr_insn_type = INSN_T3;
12986 }
12987 }
12988
12989 switch (curr_insn_type)
12990 {
12991 case INSN_T0:
12992 reg_vd = reg_vd | (bit_d << 4);
12993 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12994 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
12995 arm_insn_r->reg_rec_count = 2;
12996 break;
12997
12998 case INSN_T1:
12999 reg_vd = reg_vd | (bit_d << 4);
13000 record_buf[0] = reg_vd + ARM_D0_REGNUM;
13001 arm_insn_r->reg_rec_count = 1;
13002 break;
13003
13004 case INSN_T2:
13005 reg_vd = (reg_vd << 1) | bit_d;
13006 record_buf[0] = reg_vd + ARM_D0_REGNUM;
13007 arm_insn_r->reg_rec_count = 1;
13008 break;
13009
13010 case INSN_T3:
13011 record_buf[0] = ARM_FPSCR_REGNUM;
13012 arm_insn_r->reg_rec_count = 1;
13013 break;
13014
13015 default:
13016 gdb_assert_not_reached ("no decoding pattern found");
13017 break;
13018 }
13019
13020 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
13021 return 0;
13022}
13023
13024/* Handling opcode 110 insns. */
13025
13026static int
13027arm_record_asimd_vfp_coproc (arm_insn_decode_record *arm_insn_r)
13028{
13029 uint32_t op1, op1_ebit, coproc;
13030
13031 coproc = bits (arm_insn_r->arm_insn, 8, 11);
13032 op1 = bits (arm_insn_r->arm_insn, 20, 25);
13033 op1_ebit = bit (arm_insn_r->arm_insn, 20);
13034
13035 if ((coproc & 0x0e) == 0x0a)
13036 {
13037 /* Handle extension register ld/st instructions. */
13038 if (!(op1 & 0x20))
13039 return arm_record_exreg_ld_st_insn (arm_insn_r);
13040
13041 /* 64-bit transfers between arm core and extension registers. */
13042 if ((op1 & 0x3e) == 0x04)
13043 return arm_record_exreg_ld_st_insn (arm_insn_r);
13044 }
13045 else
13046 {
13047 /* Handle coprocessor ld/st instructions. */
13048 if (!(op1 & 0x3a))
13049 {
13050 /* Store. */
13051 if (!op1_ebit)
13052 return arm_record_unsupported_insn (arm_insn_r);
13053 else
13054 /* Load. */
13055 return arm_record_unsupported_insn (arm_insn_r);
13056 }
13057
13058 /* Move to coprocessor from two arm core registers. */
13059 if (op1 == 0x4)
13060 return arm_record_unsupported_insn (arm_insn_r);
13061
13062 /* Move to two arm core registers from coprocessor. */
13063 if (op1 == 0x5)
13064 {
13065 uint32_t reg_t[2];
13066
13067 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
13068 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
13069 arm_insn_r->reg_rec_count = 2;
13070
13071 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
13072 return 0;
13073 }
13074 }
13075 return arm_record_unsupported_insn (arm_insn_r);
13076}
13077
13078/* Handling opcode 111 insns. */
13079
13080static int
13081arm_record_coproc_data_proc (arm_insn_decode_record *arm_insn_r)
13082{
13083 uint32_t op, op1_ebit, coproc, bits_24_25;
13084 arm_gdbarch_tdep *tdep
13085 = gdbarch_tdep<arm_gdbarch_tdep> (arm_insn_r->gdbarch);
13086 struct regcache *reg_cache = arm_insn_r->regcache;
13087
13088 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
13089 coproc = bits (arm_insn_r->arm_insn, 8, 11);
13090 op1_ebit = bit (arm_insn_r->arm_insn, 20);
13091 op = bit (arm_insn_r->arm_insn, 4);
13092 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
13093
13094 /* Handle arm SWI/SVC system call instructions. */
13095 if (bits_24_25 == 0x3)
13096 {
13097 if (tdep->arm_syscall_record != NULL)
13098 {
13099 ULONGEST svc_operand, svc_number;
13100
13101 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
13102
13103 if (svc_operand) /* OABI. */
13104 svc_number = svc_operand - 0x900000;
13105 else /* EABI. */
13106 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
13107
13108 return tdep->arm_syscall_record (reg_cache, svc_number);
13109 }
13110 else
13111 {
13112 gdb_printf (gdb_stderr, _("no syscall record support\n"));
13113 return -1;
13114 }
13115 }
13116 else if (bits_24_25 == 0x02)
13117 {
13118 if (op)
13119 {
13120 if ((coproc & 0x0e) == 0x0a)
13121 {
13122 /* 8, 16, and 32-bit transfer */
13123 return arm_record_vdata_transfer_insn (arm_insn_r);
13124 }
13125 else
13126 {
13127 if (op1_ebit)
13128 {
13129 /* MRC, MRC2 */
13130 uint32_t record_buf[1];
13131
13132 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
13133 if (record_buf[0] == 15)
13134 record_buf[0] = ARM_PS_REGNUM;
13135
13136 arm_insn_r->reg_rec_count = 1;
13137 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
13138 record_buf);
13139 return 0;
13140 }
13141 else
13142 {
13143 /* MCR, MCR2 */
13144 return -1;
13145 }
13146 }
13147 }
13148 else
13149 {
13150 if ((coproc & 0x0e) == 0x0a)
13151 {
13152 /* VFP data-processing instructions. */
13153 return arm_record_vfp_data_proc_insn (arm_insn_r);
13154 }
13155 else
13156 {
13157 /* CDP, CDP2 */
13158 return -1;
13159 }
13160 }
13161 }
13162 else
13163 {
13164 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
13165
13166 if (op1 == 5)
13167 {
13168 if ((coproc & 0x0e) != 0x0a)
13169 {
13170 /* MRRC, MRRC2 */
13171 return -1;
13172 }
13173 }
13174 else if (op1 == 4 || op1 == 5)
13175 {
13176 if ((coproc & 0x0e) == 0x0a)
13177 {
13178 /* 64-bit transfers between ARM core and extension */
13179 return -1;
13180 }
13181 else if (op1 == 4)
13182 {
13183 /* MCRR, MCRR2 */
13184 return -1;
13185 }
13186 }
13187 else if (op1 == 0 || op1 == 1)
13188 {
13189 /* UNDEFINED */
13190 return -1;
13191 }
13192 else
13193 {
13194 if ((coproc & 0x0e) == 0x0a)
13195 {
13196 /* Extension register load/store */
13197 }
13198 else
13199 {
13200 /* STC, STC2, LDC, LDC2 */
13201 }
13202 return -1;
13203 }
13204 }
13205
13206 return -1;
13207}
13208
13209/* Handling opcode 000 insns. */
13210
13211static int
13212thumb_record_shift_add_sub (arm_insn_decode_record *thumb_insn_r)
13213{
13214 uint32_t record_buf[8];
13215 uint32_t reg_src1 = 0;
13216
13217 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13218
13219 record_buf[0] = ARM_PS_REGNUM;
13220 record_buf[1] = reg_src1;
13221 thumb_insn_r->reg_rec_count = 2;
13222
13223 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13224
13225 return 0;
13226}
13227
13228
13229/* Handling opcode 001 insns. */
13230
13231static int
13232thumb_record_add_sub_cmp_mov (arm_insn_decode_record *thumb_insn_r)
13233{
13234 uint32_t record_buf[8];
13235 uint32_t reg_src1 = 0;
13236
13237 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13238
13239 record_buf[0] = ARM_PS_REGNUM;
13240 record_buf[1] = reg_src1;
13241 thumb_insn_r->reg_rec_count = 2;
13242
13243 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13244
13245 return 0;
13246}
13247
13248/* Handling opcode 010 insns. */
13249
13250static int
13251thumb_record_ld_st_reg_offset (arm_insn_decode_record *thumb_insn_r)
13252{
13253 struct regcache *reg_cache = thumb_insn_r->regcache;
13254 uint32_t record_buf[8], record_buf_mem[8];
13255
13256 uint32_t reg_src1 = 0, reg_src2 = 0;
13257 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
13258
13259 ULONGEST u_regval[2] = {0};
13260
13261 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
13262
13263 if (bit (thumb_insn_r->arm_insn, 12))
13264 {
13265 /* Handle load/store register offset. */
13266 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
13267
13268 if (in_inclusive_range (opB, 4U, 7U))
13269 {
13270 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
13271 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
13272 record_buf[0] = reg_src1;
13273 thumb_insn_r->reg_rec_count = 1;
13274 }
13275 else if (in_inclusive_range (opB, 0U, 2U))
13276 {
13277 /* STR(2), STRB(2), STRH(2) . */
13278 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13279 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
13280 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
13281 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
13282 if (0 == opB)
13283 record_buf_mem[0] = 4; /* STR (2). */
13284 else if (2 == opB)
13285 record_buf_mem[0] = 1; /* STRB (2). */
13286 else if (1 == opB)
13287 record_buf_mem[0] = 2; /* STRH (2). */
13288 record_buf_mem[1] = u_regval[0] + u_regval[1];
13289 thumb_insn_r->mem_rec_count = 1;
13290 }
13291 }
13292 else if (bit (thumb_insn_r->arm_insn, 11))
13293 {
13294 /* Handle load from literal pool. */
13295 /* LDR(3). */
13296 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13297 record_buf[0] = reg_src1;
13298 thumb_insn_r->reg_rec_count = 1;
13299 }
13300 else if (opcode1)
13301 {
13302 /* Special data instructions and branch and exchange */
13303 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
13304 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
13305 if ((3 == opcode2) && (!opcode3))
13306 {
13307 /* Branch with exchange. */
13308 record_buf[0] = ARM_PS_REGNUM;
13309 thumb_insn_r->reg_rec_count = 1;
13310 }
13311 else
13312 {
13313 /* Format 8; special data processing insns. */
13314 record_buf[0] = ARM_PS_REGNUM;
13315 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
13316 | bits (thumb_insn_r->arm_insn, 0, 2));
13317 thumb_insn_r->reg_rec_count = 2;
13318 }
13319 }
13320 else
13321 {
13322 /* Format 5; data processing insns. */
13323 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13324 if (bit (thumb_insn_r->arm_insn, 7))
13325 {
13326 reg_src1 = reg_src1 + 8;
13327 }
13328 record_buf[0] = ARM_PS_REGNUM;
13329 record_buf[1] = reg_src1;
13330 thumb_insn_r->reg_rec_count = 2;
13331 }
13332
13333 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13334 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13335 record_buf_mem);
13336
13337 return 0;
13338}
13339
13340/* Handling opcode 001 insns. */
13341
13342static int
13343thumb_record_ld_st_imm_offset (arm_insn_decode_record *thumb_insn_r)
13344{
13345 struct regcache *reg_cache = thumb_insn_r->regcache;
13346 uint32_t record_buf[8], record_buf_mem[8];
13347
13348 uint32_t reg_src1 = 0;
13349 uint32_t opcode = 0, immed_5 = 0;
13350
13351 ULONGEST u_regval = 0;
13352
13353 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13354
13355 if (opcode)
13356 {
13357 /* LDR(1). */
13358 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13359 record_buf[0] = reg_src1;
13360 thumb_insn_r->reg_rec_count = 1;
13361 }
13362 else
13363 {
13364 /* STR(1). */
13365 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13366 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
13367 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13368 record_buf_mem[0] = 4;
13369 record_buf_mem[1] = u_regval + (immed_5 * 4);
13370 thumb_insn_r->mem_rec_count = 1;
13371 }
13372
13373 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13374 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13375 record_buf_mem);
13376
13377 return 0;
13378}
13379
13380/* Handling opcode 100 insns. */
13381
13382static int
13383thumb_record_ld_st_stack (arm_insn_decode_record *thumb_insn_r)
13384{
13385 struct regcache *reg_cache = thumb_insn_r->regcache;
13386 uint32_t record_buf[8], record_buf_mem[8];
13387
13388 uint32_t reg_src1 = 0;
13389 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
13390
13391 ULONGEST u_regval = 0;
13392
13393 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13394
13395 if (3 == opcode)
13396 {
13397 /* LDR(4). */
13398 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13399 record_buf[0] = reg_src1;
13400 thumb_insn_r->reg_rec_count = 1;
13401 }
13402 else if (1 == opcode)
13403 {
13404 /* LDRH(1). */
13405 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13406 record_buf[0] = reg_src1;
13407 thumb_insn_r->reg_rec_count = 1;
13408 }
13409 else if (2 == opcode)
13410 {
13411 /* STR(3). */
13412 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
13413 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
13414 record_buf_mem[0] = 4;
13415 record_buf_mem[1] = u_regval + (immed_8 * 4);
13416 thumb_insn_r->mem_rec_count = 1;
13417 }
13418 else if (0 == opcode)
13419 {
13420 /* STRH(1). */
13421 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
13422 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13423 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13424 record_buf_mem[0] = 2;
13425 record_buf_mem[1] = u_regval + (immed_5 * 2);
13426 thumb_insn_r->mem_rec_count = 1;
13427 }
13428
13429 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13430 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13431 record_buf_mem);
13432
13433 return 0;
13434}
13435
13436/* Handling opcode 101 insns. */
13437
13438static int
13439thumb_record_misc (arm_insn_decode_record *thumb_insn_r)
13440{
13441 struct regcache *reg_cache = thumb_insn_r->regcache;
13442
13443 uint32_t opcode = 0;
13444 uint32_t register_bits = 0, register_count = 0;
13445 uint32_t index = 0, start_address = 0;
13446 uint32_t record_buf[24], record_buf_mem[48];
13447 uint32_t reg_src1;
13448
13449 ULONGEST u_regval = 0;
13450
13451 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13452
13453 if (opcode == 0 || opcode == 1)
13454 {
13455 /* ADR and ADD (SP plus immediate) */
13456
13457 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13458 record_buf[0] = reg_src1;
13459 thumb_insn_r->reg_rec_count = 1;
13460 }
13461 else
13462 {
13463 /* Miscellaneous 16-bit instructions */
13464 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
13465
13466 switch (opcode2)
13467 {
13468 case 6:
13469 /* SETEND and CPS */
13470 break;
13471 case 0:
13472 /* ADD/SUB (SP plus immediate) */
13473 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13474 record_buf[0] = ARM_SP_REGNUM;
13475 thumb_insn_r->reg_rec_count = 1;
13476 break;
13477 case 1: /* fall through */
13478 case 3: /* fall through */
13479 case 9: /* fall through */
13480 case 11:
13481 /* CBNZ, CBZ */
13482 break;
13483 case 2:
13484 /* SXTH, SXTB, UXTH, UXTB */
13485 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13486 thumb_insn_r->reg_rec_count = 1;
13487 break;
13488 case 4: /* fall through */
13489 case 5:
13490 /* PUSH. */
13491 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13492 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
13493 while (register_bits)
13494 {
13495 if (register_bits & 0x00000001)
13496 register_count++;
13497 register_bits = register_bits >> 1;
13498 }
13499 start_address = u_regval - \
13500 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
13501 thumb_insn_r->mem_rec_count = register_count;
13502 while (register_count)
13503 {
13504 record_buf_mem[(register_count * 2) - 1] = start_address;
13505 record_buf_mem[(register_count * 2) - 2] = 4;
13506 start_address = start_address + 4;
13507 register_count--;
13508 }
13509 record_buf[0] = ARM_SP_REGNUM;
13510 thumb_insn_r->reg_rec_count = 1;
13511 break;
13512 case 10:
13513 /* REV, REV16, REVSH */
13514 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13515 thumb_insn_r->reg_rec_count = 1;
13516 break;
13517 case 12: /* fall through */
13518 case 13:
13519 /* POP. */
13520 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13521 while (register_bits)
13522 {
13523 if (register_bits & 0x00000001)
13524 record_buf[index++] = register_count;
13525 register_bits = register_bits >> 1;
13526 register_count++;
13527 }
13528 record_buf[index++] = ARM_PS_REGNUM;
13529 record_buf[index++] = ARM_SP_REGNUM;
13530 thumb_insn_r->reg_rec_count = index;
13531 break;
13532 case 0xe:
13533 /* BKPT insn. */
13534 /* Handle enhanced software breakpoint insn, BKPT. */
13535 /* CPSR is changed to be executed in ARM state, disabling normal
13536 interrupts, entering abort mode. */
13537 /* According to high vector configuration PC is set. */
13538 /* User hits breakpoint and type reverse, in that case, we need to go back with
13539 previous CPSR and Program Counter. */
13540 record_buf[0] = ARM_PS_REGNUM;
13541 record_buf[1] = ARM_LR_REGNUM;
13542 thumb_insn_r->reg_rec_count = 2;
13543 /* We need to save SPSR value, which is not yet done. */
13544 gdb_printf (gdb_stderr,
13545 _("Process record does not support instruction "
13546 "0x%0x at address %s.\n"),
13547 thumb_insn_r->arm_insn,
13548 paddress (thumb_insn_r->gdbarch,
13549 thumb_insn_r->this_addr));
13550 return -1;
13551
13552 case 0xf:
13553 /* If-Then, and hints */
13554 break;
13555 default:
13556 return -1;
13557 };
13558 }
13559
13560 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13561 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13562 record_buf_mem);
13563
13564 return 0;
13565}
13566
13567/* Handling opcode 110 insns. */
13568
13569static int
13570thumb_record_ldm_stm_swi (arm_insn_decode_record *thumb_insn_r)
13571{
13572 arm_gdbarch_tdep *tdep
13573 = gdbarch_tdep<arm_gdbarch_tdep> (thumb_insn_r->gdbarch);
13574 struct regcache *reg_cache = thumb_insn_r->regcache;
13575
13576 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
13577 uint32_t reg_src1 = 0;
13578 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
13579 uint32_t index = 0, start_address = 0;
13580 uint32_t record_buf[24], record_buf_mem[48];
13581
13582 ULONGEST u_regval = 0;
13583
13584 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
13585 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
13586
13587 if (1 == opcode2)
13588 {
13589
13590 /* LDMIA. */
13591 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13592 /* Get Rn. */
13593 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13594 while (register_bits)
13595 {
13596 if (register_bits & 0x00000001)
13597 record_buf[index++] = register_count;
13598 register_bits = register_bits >> 1;
13599 register_count++;
13600 }
13601 record_buf[index++] = reg_src1;
13602 thumb_insn_r->reg_rec_count = index;
13603 }
13604 else if (0 == opcode2)
13605 {
13606 /* It handles both STMIA. */
13607 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13608 /* Get Rn. */
13609 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13610 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13611 while (register_bits)
13612 {
13613 if (register_bits & 0x00000001)
13614 register_count++;
13615 register_bits = register_bits >> 1;
13616 }
13617 start_address = u_regval;
13618 thumb_insn_r->mem_rec_count = register_count;
13619 while (register_count)
13620 {
13621 record_buf_mem[(register_count * 2) - 1] = start_address;
13622 record_buf_mem[(register_count * 2) - 2] = 4;
13623 start_address = start_address + 4;
13624 register_count--;
13625 }
13626 }
13627 else if (0x1F == opcode1)
13628 {
13629 /* Handle arm syscall insn. */
13630 if (tdep->arm_syscall_record != NULL)
13631 {
13632 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
13633 ret = tdep->arm_syscall_record (reg_cache, u_regval);
13634 }
13635 else
13636 {
13637 gdb_printf (gdb_stderr, _("no syscall record support\n"));
13638 return -1;
13639 }
13640 }
13641
13642 /* B (1), conditional branch is automatically taken care in process_record,
13643 as PC is saved there. */
13644
13645 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13646 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13647 record_buf_mem);
13648
13649 return ret;
13650}
13651
13652/* Handling opcode 111 insns. */
13653
13654static int
13655thumb_record_branch (arm_insn_decode_record *thumb_insn_r)
13656{
13657 uint32_t record_buf[8];
13658 uint32_t bits_h = 0;
13659
13660 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
13661
13662 if (2 == bits_h || 3 == bits_h)
13663 {
13664 /* BL */
13665 record_buf[0] = ARM_LR_REGNUM;
13666 thumb_insn_r->reg_rec_count = 1;
13667 }
13668 else if (1 == bits_h)
13669 {
13670 /* BLX(1). */
13671 record_buf[0] = ARM_PS_REGNUM;
13672 record_buf[1] = ARM_LR_REGNUM;
13673 thumb_insn_r->reg_rec_count = 2;
13674 }
13675
13676 /* B(2) is automatically taken care in process_record, as PC is
13677 saved there. */
13678
13679 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13680
13681 return 0;
13682}
13683
13684/* Handler for thumb2 load/store multiple instructions. */
13685
13686static int
13687thumb2_record_ld_st_multiple (arm_insn_decode_record *thumb2_insn_r)
13688{
13689 struct regcache *reg_cache = thumb2_insn_r->regcache;
13690
13691 uint32_t reg_rn, op;
13692 uint32_t register_bits = 0, register_count = 0;
13693 uint32_t index = 0, start_address = 0;
13694 uint32_t record_buf[24], record_buf_mem[48];
13695
13696 ULONGEST u_regval = 0;
13697
13698 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13699 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13700
13701 if (0 == op || 3 == op)
13702 {
13703 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13704 {
13705 /* Handle RFE instruction. */
13706 record_buf[0] = ARM_PS_REGNUM;
13707 thumb2_insn_r->reg_rec_count = 1;
13708 }
13709 else
13710 {
13711 /* Handle SRS instruction after reading banked SP. */
13712 return arm_record_unsupported_insn (thumb2_insn_r);
13713 }
13714 }
13715 else if (1 == op || 2 == op)
13716 {
13717 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13718 {
13719 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13720 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13721 while (register_bits)
13722 {
13723 if (register_bits & 0x00000001)
13724 record_buf[index++] = register_count;
13725
13726 register_count++;
13727 register_bits = register_bits >> 1;
13728 }
13729 record_buf[index++] = reg_rn;
13730 record_buf[index++] = ARM_PS_REGNUM;
13731 thumb2_insn_r->reg_rec_count = index;
13732 }
13733 else
13734 {
13735 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13736 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13737 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13738 while (register_bits)
13739 {
13740 if (register_bits & 0x00000001)
13741 register_count++;
13742
13743 register_bits = register_bits >> 1;
13744 }
13745
13746 if (1 == op)
13747 {
13748 /* Start address calculation for LDMDB/LDMEA. */
13749 start_address = u_regval;
13750 }
13751 else if (2 == op)
13752 {
13753 /* Start address calculation for LDMDB/LDMEA. */
13754 start_address = u_regval - register_count * 4;
13755 }
13756
13757 thumb2_insn_r->mem_rec_count = register_count;
13758 while (register_count)
13759 {
13760 record_buf_mem[register_count * 2 - 1] = start_address;
13761 record_buf_mem[register_count * 2 - 2] = 4;
13762 start_address = start_address + 4;
13763 register_count--;
13764 }
13765 record_buf[0] = reg_rn;
13766 record_buf[1] = ARM_PS_REGNUM;
13767 thumb2_insn_r->reg_rec_count = 2;
13768 }
13769 }
13770
13771 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13772 record_buf_mem);
13773 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13774 record_buf);
13775 return ARM_RECORD_SUCCESS;
13776}
13777
13778/* Handler for thumb2 load/store (dual/exclusive) and table branch
13779 instructions. */
13780
13781static int
13782thumb2_record_ld_st_dual_ex_tbb (arm_insn_decode_record *thumb2_insn_r)
13783{
13784 struct regcache *reg_cache = thumb2_insn_r->regcache;
13785
13786 uint32_t reg_rd, reg_rn, offset_imm;
13787 uint32_t reg_dest1, reg_dest2;
13788 uint32_t address, offset_addr;
13789 uint32_t record_buf[8], record_buf_mem[8];
13790 uint32_t op1, op2, op3;
13791
13792 ULONGEST u_regval[2];
13793
13794 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13795 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13796 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13797
13798 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13799 {
13800 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13801 {
13802 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13803 record_buf[0] = reg_dest1;
13804 record_buf[1] = ARM_PS_REGNUM;
13805 thumb2_insn_r->reg_rec_count = 2;
13806 }
13807
13808 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13809 {
13810 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13811 record_buf[2] = reg_dest2;
13812 thumb2_insn_r->reg_rec_count = 3;
13813 }
13814 }
13815 else
13816 {
13817 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13818 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13819
13820 if (0 == op1 && 0 == op2)
13821 {
13822 /* Handle STREX. */
13823 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13824 address = u_regval[0] + (offset_imm * 4);
13825 record_buf_mem[0] = 4;
13826 record_buf_mem[1] = address;
13827 thumb2_insn_r->mem_rec_count = 1;
13828 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13829 record_buf[0] = reg_rd;
13830 thumb2_insn_r->reg_rec_count = 1;
13831 }
13832 else if (1 == op1 && 0 == op2)
13833 {
13834 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13835 record_buf[0] = reg_rd;
13836 thumb2_insn_r->reg_rec_count = 1;
13837 address = u_regval[0];
13838 record_buf_mem[1] = address;
13839
13840 if (4 == op3)
13841 {
13842 /* Handle STREXB. */
13843 record_buf_mem[0] = 1;
13844 thumb2_insn_r->mem_rec_count = 1;
13845 }
13846 else if (5 == op3)
13847 {
13848 /* Handle STREXH. */
13849 record_buf_mem[0] = 2 ;
13850 thumb2_insn_r->mem_rec_count = 1;
13851 }
13852 else if (7 == op3)
13853 {
13854 /* Handle STREXD. */
13855 address = u_regval[0];
13856 record_buf_mem[0] = 4;
13857 record_buf_mem[2] = 4;
13858 record_buf_mem[3] = address + 4;
13859 thumb2_insn_r->mem_rec_count = 2;
13860 }
13861 }
13862 else
13863 {
13864 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13865
13866 if (bit (thumb2_insn_r->arm_insn, 24))
13867 {
13868 if (bit (thumb2_insn_r->arm_insn, 23))
13869 offset_addr = u_regval[0] + (offset_imm * 4);
13870 else
13871 offset_addr = u_regval[0] - (offset_imm * 4);
13872
13873 address = offset_addr;
13874 }
13875 else
13876 address = u_regval[0];
13877
13878 record_buf_mem[0] = 4;
13879 record_buf_mem[1] = address;
13880 record_buf_mem[2] = 4;
13881 record_buf_mem[3] = address + 4;
13882 thumb2_insn_r->mem_rec_count = 2;
13883 record_buf[0] = reg_rn;
13884 thumb2_insn_r->reg_rec_count = 1;
13885 }
13886 }
13887
13888 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13889 record_buf);
13890 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13891 record_buf_mem);
13892 return ARM_RECORD_SUCCESS;
13893}
13894
13895/* Handler for thumb2 data processing (shift register and modified immediate)
13896 instructions. */
13897
13898static int
13899thumb2_record_data_proc_sreg_mimm (arm_insn_decode_record *thumb2_insn_r)
13900{
13901 uint32_t reg_rd, op;
13902 uint32_t record_buf[8];
13903
13904 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13905 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13906
13907 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13908 {
13909 record_buf[0] = ARM_PS_REGNUM;
13910 thumb2_insn_r->reg_rec_count = 1;
13911 }
13912 else
13913 {
13914 record_buf[0] = reg_rd;
13915 record_buf[1] = ARM_PS_REGNUM;
13916 thumb2_insn_r->reg_rec_count = 2;
13917 }
13918
13919 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13920 record_buf);
13921 return ARM_RECORD_SUCCESS;
13922}
13923
13924/* Generic handler for thumb2 instructions which effect destination and PS
13925 registers. */
13926
13927static int
13928thumb2_record_ps_dest_generic (arm_insn_decode_record *thumb2_insn_r)
13929{
13930 uint32_t reg_rd;
13931 uint32_t record_buf[8];
13932
13933 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13934
13935 record_buf[0] = reg_rd;
13936 record_buf[1] = ARM_PS_REGNUM;
13937 thumb2_insn_r->reg_rec_count = 2;
13938
13939 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13940 record_buf);
13941 return ARM_RECORD_SUCCESS;
13942}
13943
13944/* Handler for thumb2 branch and miscellaneous control instructions. */
13945
13946static int
13947thumb2_record_branch_misc_cntrl (arm_insn_decode_record *thumb2_insn_r)
13948{
13949 uint32_t op, op1, op2;
13950 uint32_t record_buf[8];
13951
13952 op = bits (thumb2_insn_r->arm_insn, 20, 26);
13953 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
13954 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13955
13956 /* Handle MSR insn. */
13957 if (!(op1 & 0x2) && 0x38 == op)
13958 {
13959 if (!(op2 & 0x3))
13960 {
13961 /* CPSR is going to be changed. */
13962 record_buf[0] = ARM_PS_REGNUM;
13963 thumb2_insn_r->reg_rec_count = 1;
13964 }
13965 else
13966 {
13967 arm_record_unsupported_insn(thumb2_insn_r);
13968 return -1;
13969 }
13970 }
13971 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
13972 {
13973 /* BLX. */
13974 record_buf[0] = ARM_PS_REGNUM;
13975 record_buf[1] = ARM_LR_REGNUM;
13976 thumb2_insn_r->reg_rec_count = 2;
13977 }
13978
13979 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13980 record_buf);
13981 return ARM_RECORD_SUCCESS;
13982}
13983
13984/* Handler for thumb2 store single data item instructions. */
13985
13986static int
13987thumb2_record_str_single_data (arm_insn_decode_record *thumb2_insn_r)
13988{
13989 struct regcache *reg_cache = thumb2_insn_r->regcache;
13990
13991 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
13992 uint32_t address, offset_addr;
13993 uint32_t record_buf[8], record_buf_mem[8];
13994 uint32_t op1, op2;
13995
13996 ULONGEST u_regval[2];
13997
13998 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
13999 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
14000 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
14001 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
14002
14003 if (bit (thumb2_insn_r->arm_insn, 23))
14004 {
14005 /* T2 encoding. */
14006 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
14007 offset_addr = u_regval[0] + offset_imm;
14008 address = offset_addr;
14009 }
14010 else
14011 {
14012 /* T3 encoding. */
14013 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
14014 {
14015 /* Handle STRB (register). */
14016 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
14017 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
14018 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
14019 offset_addr = u_regval[1] << shift_imm;
14020 address = u_regval[0] + offset_addr;
14021 }
14022 else
14023 {
14024 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
14025 if (bit (thumb2_insn_r->arm_insn, 10))
14026 {
14027 if (bit (thumb2_insn_r->arm_insn, 9))
14028 offset_addr = u_regval[0] + offset_imm;
14029 else
14030 offset_addr = u_regval[0] - offset_imm;
14031
14032 address = offset_addr;
14033 }
14034 else
14035 address = u_regval[0];
14036 }
14037 }
14038
14039 switch (op1)
14040 {
14041 /* Store byte instructions. */
14042 case 4:
14043 case 0:
14044 record_buf_mem[0] = 1;
14045 break;
14046 /* Store half word instructions. */
14047 case 1:
14048 case 5:
14049 record_buf_mem[0] = 2;
14050 break;
14051 /* Store word instructions. */
14052 case 2:
14053 case 6:
14054 record_buf_mem[0] = 4;
14055 break;
14056
14057 default:
14058 gdb_assert_not_reached ("no decoding pattern found");
14059 break;
14060 }
14061
14062 record_buf_mem[1] = address;
14063 thumb2_insn_r->mem_rec_count = 1;
14064 record_buf[0] = reg_rn;
14065 thumb2_insn_r->reg_rec_count = 1;
14066
14067 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14068 record_buf);
14069 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
14070 record_buf_mem);
14071 return ARM_RECORD_SUCCESS;
14072}
14073
14074/* Handler for thumb2 load memory hints instructions. */
14075
14076static int
14077thumb2_record_ld_mem_hints (arm_insn_decode_record *thumb2_insn_r)
14078{
14079 uint32_t record_buf[8];
14080 uint32_t reg_rt, reg_rn;
14081
14082 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
14083 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
14084
14085 if (ARM_PC_REGNUM != reg_rt)
14086 {
14087 record_buf[0] = reg_rt;
14088 record_buf[1] = reg_rn;
14089 record_buf[2] = ARM_PS_REGNUM;
14090 thumb2_insn_r->reg_rec_count = 3;
14091
14092 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14093 record_buf);
14094 return ARM_RECORD_SUCCESS;
14095 }
14096
14097 return ARM_RECORD_FAILURE;
14098}
14099
14100/* Handler for thumb2 load word instructions. */
14101
14102static int
14103thumb2_record_ld_word (arm_insn_decode_record *thumb2_insn_r)
14104{
14105 uint32_t record_buf[8];
14106
14107 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
14108 record_buf[1] = ARM_PS_REGNUM;
14109 thumb2_insn_r->reg_rec_count = 2;
14110
14111 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14112 record_buf);
14113 return ARM_RECORD_SUCCESS;
14114}
14115
14116/* Handler for thumb2 long multiply, long multiply accumulate, and
14117 divide instructions. */
14118
14119static int
14120thumb2_record_lmul_lmla_div (arm_insn_decode_record *thumb2_insn_r)
14121{
14122 uint32_t opcode1 = 0, opcode2 = 0;
14123 uint32_t record_buf[8];
14124
14125 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
14126 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
14127
14128 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
14129 {
14130 /* Handle SMULL, UMULL, SMULAL. */
14131 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
14132 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
14133 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
14134 record_buf[2] = ARM_PS_REGNUM;
14135 thumb2_insn_r->reg_rec_count = 3;
14136 }
14137 else if (1 == opcode1 || 3 == opcode2)
14138 {
14139 /* Handle SDIV and UDIV. */
14140 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
14141 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
14142 record_buf[2] = ARM_PS_REGNUM;
14143 thumb2_insn_r->reg_rec_count = 3;
14144 }
14145 else
14146 return ARM_RECORD_FAILURE;
14147
14148 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14149 record_buf);
14150 return ARM_RECORD_SUCCESS;
14151}
14152
14153/* Record handler for thumb32 coprocessor instructions. */
14154
14155static int
14156thumb2_record_coproc_insn (arm_insn_decode_record *thumb2_insn_r)
14157{
14158 if (bit (thumb2_insn_r->arm_insn, 25))
14159 return arm_record_coproc_data_proc (thumb2_insn_r);
14160 else
14161 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
14162}
14163
14164/* Record handler for advance SIMD structure load/store instructions. */
14165
14166static int
14167thumb2_record_asimd_struct_ld_st (arm_insn_decode_record *thumb2_insn_r)
14168{
14169 struct regcache *reg_cache = thumb2_insn_r->regcache;
14170 uint32_t l_bit, a_bit, b_bits;
14171 uint32_t record_buf[128], record_buf_mem[128];
14172 uint32_t reg_rn, reg_vd, address, f_elem;
14173 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
14174 uint8_t f_ebytes;
14175
14176 l_bit = bit (thumb2_insn_r->arm_insn, 21);
14177 a_bit = bit (thumb2_insn_r->arm_insn, 23);
14178 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
14179 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
14180 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
14181 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
14182 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
14183 f_elem = 8 / f_ebytes;
14184
14185 if (!l_bit)
14186 {
14187 ULONGEST u_regval = 0;
14188 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
14189 address = u_regval;
14190
14191 if (!a_bit)
14192 {
14193 /* Handle VST1. */
14194 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
14195 {
14196 if (b_bits == 0x07)
14197 bf_regs = 1;
14198 else if (b_bits == 0x0a)
14199 bf_regs = 2;
14200 else if (b_bits == 0x06)
14201 bf_regs = 3;
14202 else if (b_bits == 0x02)
14203 bf_regs = 4;
14204 else
14205 bf_regs = 0;
14206
14207 for (index_r = 0; index_r < bf_regs; index_r++)
14208 {
14209 for (index_e = 0; index_e < f_elem; index_e++)
14210 {
14211 record_buf_mem[index_m++] = f_ebytes;
14212 record_buf_mem[index_m++] = address;
14213 address = address + f_ebytes;
14214 thumb2_insn_r->mem_rec_count += 1;
14215 }
14216 }
14217 }
14218 /* Handle VST2. */
14219 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
14220 {
14221 if (b_bits == 0x09 || b_bits == 0x08)
14222 bf_regs = 1;
14223 else if (b_bits == 0x03)
14224 bf_regs = 2;
14225 else
14226 bf_regs = 0;
14227
14228 for (index_r = 0; index_r < bf_regs; index_r++)
14229 for (index_e = 0; index_e < f_elem; index_e++)
14230 {
14231 for (loop_t = 0; loop_t < 2; loop_t++)
14232 {
14233 record_buf_mem[index_m++] = f_ebytes;
14234 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14235 thumb2_insn_r->mem_rec_count += 1;
14236 }
14237 address = address + (2 * f_ebytes);
14238 }
14239 }
14240 /* Handle VST3. */
14241 else if ((b_bits & 0x0e) == 0x04)
14242 {
14243 for (index_e = 0; index_e < f_elem; index_e++)
14244 {
14245 for (loop_t = 0; loop_t < 3; loop_t++)
14246 {
14247 record_buf_mem[index_m++] = f_ebytes;
14248 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14249 thumb2_insn_r->mem_rec_count += 1;
14250 }
14251 address = address + (3 * f_ebytes);
14252 }
14253 }
14254 /* Handle VST4. */
14255 else if (!(b_bits & 0x0e))
14256 {
14257 for (index_e = 0; index_e < f_elem; index_e++)
14258 {
14259 for (loop_t = 0; loop_t < 4; loop_t++)
14260 {
14261 record_buf_mem[index_m++] = f_ebytes;
14262 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14263 thumb2_insn_r->mem_rec_count += 1;
14264 }
14265 address = address + (4 * f_ebytes);
14266 }
14267 }
14268 }
14269 else
14270 {
14271 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
14272
14273 if (bft_size == 0x00)
14274 f_ebytes = 1;
14275 else if (bft_size == 0x01)
14276 f_ebytes = 2;
14277 else if (bft_size == 0x02)
14278 f_ebytes = 4;
14279 else
14280 f_ebytes = 0;
14281
14282 /* Handle VST1. */
14283 if (!(b_bits & 0x0b) || b_bits == 0x08)
14284 thumb2_insn_r->mem_rec_count = 1;
14285 /* Handle VST2. */
14286 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
14287 thumb2_insn_r->mem_rec_count = 2;
14288 /* Handle VST3. */
14289 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
14290 thumb2_insn_r->mem_rec_count = 3;
14291 /* Handle VST4. */
14292 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
14293 thumb2_insn_r->mem_rec_count = 4;
14294
14295 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
14296 {
14297 record_buf_mem[index_m] = f_ebytes;
14298 record_buf_mem[index_m] = address + (index_m * f_ebytes);
14299 }
14300 }
14301 }
14302 else
14303 {
14304 if (!a_bit)
14305 {
14306 /* Handle VLD1. */
14307 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
14308 thumb2_insn_r->reg_rec_count = 1;
14309 /* Handle VLD2. */
14310 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
14311 thumb2_insn_r->reg_rec_count = 2;
14312 /* Handle VLD3. */
14313 else if ((b_bits & 0x0e) == 0x04)
14314 thumb2_insn_r->reg_rec_count = 3;
14315 /* Handle VLD4. */
14316 else if (!(b_bits & 0x0e))
14317 thumb2_insn_r->reg_rec_count = 4;
14318 }
14319 else
14320 {
14321 /* Handle VLD1. */
14322 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
14323 thumb2_insn_r->reg_rec_count = 1;
14324 /* Handle VLD2. */
14325 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
14326 thumb2_insn_r->reg_rec_count = 2;
14327 /* Handle VLD3. */
14328 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
14329 thumb2_insn_r->reg_rec_count = 3;
14330 /* Handle VLD4. */
14331 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
14332 thumb2_insn_r->reg_rec_count = 4;
14333
14334 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
14335 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
14336 }
14337 }
14338
14339 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
14340 {
14341 record_buf[index_r] = reg_rn;
14342 thumb2_insn_r->reg_rec_count += 1;
14343 }
14344
14345 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14346 record_buf);
14347 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
14348 record_buf_mem);
14349 return 0;
14350}
14351
14352/* Decodes thumb2 instruction type and invokes its record handler. */
14353
14354static unsigned int
14355thumb2_record_decode_insn_handler (arm_insn_decode_record *thumb2_insn_r)
14356{
14357 uint32_t op, op1, op2;
14358
14359 op = bit (thumb2_insn_r->arm_insn, 15);
14360 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
14361 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
14362
14363 if (op1 == 0x01)
14364 {
14365 if (!(op2 & 0x64 ))
14366 {
14367 /* Load/store multiple instruction. */
14368 return thumb2_record_ld_st_multiple (thumb2_insn_r);
14369 }
14370 else if ((op2 & 0x64) == 0x4)
14371 {
14372 /* Load/store (dual/exclusive) and table branch instruction. */
14373 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
14374 }
14375 else if ((op2 & 0x60) == 0x20)
14376 {
14377 /* Data-processing (shifted register). */
14378 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
14379 }
14380 else if (op2 & 0x40)
14381 {
14382 /* Co-processor instructions. */
14383 return thumb2_record_coproc_insn (thumb2_insn_r);
14384 }
14385 }
14386 else if (op1 == 0x02)
14387 {
14388 if (op)
14389 {
14390 /* Branches and miscellaneous control instructions. */
14391 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
14392 }
14393 else if (op2 & 0x20)
14394 {
14395 /* Data-processing (plain binary immediate) instruction. */
14396 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14397 }
14398 else
14399 {
14400 /* Data-processing (modified immediate). */
14401 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
14402 }
14403 }
14404 else if (op1 == 0x03)
14405 {
14406 if (!(op2 & 0x71 ))
14407 {
14408 /* Store single data item. */
14409 return thumb2_record_str_single_data (thumb2_insn_r);
14410 }
14411 else if (!((op2 & 0x71) ^ 0x10))
14412 {
14413 /* Advanced SIMD or structure load/store instructions. */
14414 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
14415 }
14416 else if (!((op2 & 0x67) ^ 0x01))
14417 {
14418 /* Load byte, memory hints instruction. */
14419 return thumb2_record_ld_mem_hints (thumb2_insn_r);
14420 }
14421 else if (!((op2 & 0x67) ^ 0x03))
14422 {
14423 /* Load halfword, memory hints instruction. */
14424 return thumb2_record_ld_mem_hints (thumb2_insn_r);
14425 }
14426 else if (!((op2 & 0x67) ^ 0x05))
14427 {
14428 /* Load word instruction. */
14429 return thumb2_record_ld_word (thumb2_insn_r);
14430 }
14431 else if (!((op2 & 0x70) ^ 0x20))
14432 {
14433 /* Data-processing (register) instruction. */
14434 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14435 }
14436 else if (!((op2 & 0x78) ^ 0x30))
14437 {
14438 /* Multiply, multiply accumulate, abs diff instruction. */
14439 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14440 }
14441 else if (!((op2 & 0x78) ^ 0x38))
14442 {
14443 /* Long multiply, long multiply accumulate, and divide. */
14444 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
14445 }
14446 else if (op2 & 0x40)
14447 {
14448 /* Co-processor instructions. */
14449 return thumb2_record_coproc_insn (thumb2_insn_r);
14450 }
14451 }
14452
14453 return -1;
14454}
14455
14456namespace {
14457/* Abstract instruction reader. */
14458
14459class abstract_instruction_reader
14460{
14461public:
14462 /* Read one instruction of size LEN from address MEMADDR and using
14463 BYTE_ORDER endianness. */
14464
14465 virtual ULONGEST read (CORE_ADDR memaddr, const size_t len,
14466 enum bfd_endian byte_order) = 0;
14467};
14468
14469/* Instruction reader from real target. */
14470
14471class instruction_reader : public abstract_instruction_reader
14472{
14473 public:
14474 ULONGEST read (CORE_ADDR memaddr, const size_t len,
14475 enum bfd_endian byte_order) override
14476 {
14477 return read_code_unsigned_integer (memaddr, len, byte_order);
14478 }
14479};
14480
14481} // namespace
14482
14483typedef int (*sti_arm_hdl_fp_t) (arm_insn_decode_record*);
14484
14485/* Decode arm/thumb insn depending on condition cods and opcodes; and
14486 dispatch it. */
14487
14488static int
14489decode_insn (abstract_instruction_reader &reader,
14490 arm_insn_decode_record *arm_record,
14491 record_type_t record_type, uint32_t insn_size)
14492{
14493
14494 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
14495 instruction. */
14496 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
14497 {
14498 arm_record_data_proc_misc_ld_str, /* 000. */
14499 arm_record_data_proc_imm, /* 001. */
14500 arm_record_ld_st_imm_offset, /* 010. */
14501 arm_record_ld_st_reg_offset, /* 011. */
14502 arm_record_ld_st_multiple, /* 100. */
14503 arm_record_b_bl, /* 101. */
14504 arm_record_asimd_vfp_coproc, /* 110. */
14505 arm_record_coproc_data_proc /* 111. */
14506 };
14507
14508 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
14509 instruction. */
14510 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
14511 { \
14512 thumb_record_shift_add_sub, /* 000. */
14513 thumb_record_add_sub_cmp_mov, /* 001. */
14514 thumb_record_ld_st_reg_offset, /* 010. */
14515 thumb_record_ld_st_imm_offset, /* 011. */
14516 thumb_record_ld_st_stack, /* 100. */
14517 thumb_record_misc, /* 101. */
14518 thumb_record_ldm_stm_swi, /* 110. */
14519 thumb_record_branch /* 111. */
14520 };
14521
14522 uint32_t ret = 0; /* return value: negative:failure 0:success. */
14523 uint32_t insn_id = 0;
14524 enum bfd_endian code_endian
14525 = gdbarch_byte_order_for_code (arm_record->gdbarch);
14526 arm_record->arm_insn
14527 = reader.read (arm_record->this_addr, insn_size, code_endian);
14528
14529 if (ARM_RECORD == record_type)
14530 {
14531 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
14532 insn_id = bits (arm_record->arm_insn, 25, 27);
14533
14534 if (arm_record->cond == 0xf)
14535 ret = arm_record_extension_space (arm_record);
14536 else
14537 {
14538 /* If this insn has fallen into extension space
14539 then we need not decode it anymore. */
14540 ret = arm_handle_insn[insn_id] (arm_record);
14541 }
14542 if (ret != ARM_RECORD_SUCCESS)
14543 {
14544 arm_record_unsupported_insn (arm_record);
14545 ret = -1;
14546 }
14547 }
14548 else if (THUMB_RECORD == record_type)
14549 {
14550 /* As thumb does not have condition codes, we set negative. */
14551 arm_record->cond = -1;
14552 insn_id = bits (arm_record->arm_insn, 13, 15);
14553 ret = thumb_handle_insn[insn_id] (arm_record);
14554 if (ret != ARM_RECORD_SUCCESS)
14555 {
14556 arm_record_unsupported_insn (arm_record);
14557 ret = -1;
14558 }
14559 }
14560 else if (THUMB2_RECORD == record_type)
14561 {
14562 /* As thumb does not have condition codes, we set negative. */
14563 arm_record->cond = -1;
14564
14565 /* Swap first half of 32bit thumb instruction with second half. */
14566 arm_record->arm_insn
14567 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
14568
14569 ret = thumb2_record_decode_insn_handler (arm_record);
14570
14571 if (ret != ARM_RECORD_SUCCESS)
14572 {
14573 arm_record_unsupported_insn (arm_record);
14574 ret = -1;
14575 }
14576 }
14577 else
14578 {
14579 /* Throw assertion. */
14580 gdb_assert_not_reached ("not a valid instruction, could not decode");
14581 }
14582
14583 return ret;
14584}
14585
14586#if GDB_SELF_TEST
14587namespace selftests {
14588
14589/* Instruction reader class for selftests.
14590
14591 For 16-bit Thumb instructions, an array of uint16_t should be used.
14592
14593 For 32-bit Thumb instructions and regular 32-bit Arm instructions, an array
14594 of uint32_t should be used. */
14595
14596template<typename T>
14597class instruction_reader_selftest : public abstract_instruction_reader
14598{
14599public:
14600 template<size_t SIZE>
14601 instruction_reader_selftest (const T (&insns)[SIZE])
14602 : m_insns (insns), m_insns_size (SIZE)
14603 {}
14604
14605 ULONGEST read (CORE_ADDR memaddr, const size_t length,
14606 enum bfd_endian byte_order) override
14607 {
14608 SELF_CHECK (length == sizeof (T));
14609 SELF_CHECK (memaddr % sizeof (T) == 0);
14610 SELF_CHECK ((memaddr / sizeof (T)) < m_insns_size);
14611
14612 return m_insns[memaddr / sizeof (T)];
14613 }
14614
14615private:
14616 const T *m_insns;
14617 const size_t m_insns_size;
14618};
14619
14620static void
14621arm_record_test (void)
14622{
14623 struct gdbarch_info info;
14624 info.bfd_arch_info = bfd_scan_arch ("arm");
14625
14626 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14627
14628 SELF_CHECK (gdbarch != NULL);
14629
14630 /* 16-bit Thumb instructions. */
14631 {
14632 arm_insn_decode_record arm_record;
14633
14634 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14635 arm_record.gdbarch = gdbarch;
14636
14637 /* Use the endian-free representation of the instructions here. The test
14638 will handle endianness conversions. */
14639 static const uint16_t insns[] = {
14640 /* db b2 uxtb r3, r3 */
14641 0xb2db,
14642 /* cd 58 ldr r5, [r1, r3] */
14643 0x58cd,
14644 };
14645
14646 instruction_reader_selftest<uint16_t> reader (insns);
14647 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14648 THUMB_INSN_SIZE_BYTES);
14649
14650 SELF_CHECK (ret == 0);
14651 SELF_CHECK (arm_record.mem_rec_count == 0);
14652 SELF_CHECK (arm_record.reg_rec_count == 1);
14653 SELF_CHECK (arm_record.arm_regs[0] == 3);
14654
14655 arm_record.this_addr += 2;
14656 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14657 THUMB_INSN_SIZE_BYTES);
14658
14659 SELF_CHECK (ret == 0);
14660 SELF_CHECK (arm_record.mem_rec_count == 0);
14661 SELF_CHECK (arm_record.reg_rec_count == 1);
14662 SELF_CHECK (arm_record.arm_regs[0] == 5);
14663 }
14664
14665 /* 32-bit Thumb-2 instructions. */
14666 {
14667 arm_insn_decode_record arm_record;
14668
14669 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14670 arm_record.gdbarch = gdbarch;
14671
14672 /* Use the endian-free representation of the instruction here. The test
14673 will handle endianness conversions. */
14674 static const uint32_t insns[] = {
14675 /* mrc 15, 0, r7, cr13, cr0, {3} */
14676 0x7f70ee1d,
14677 };
14678
14679 instruction_reader_selftest<uint32_t> reader (insns);
14680 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14681 THUMB2_INSN_SIZE_BYTES);
14682
14683 SELF_CHECK (ret == 0);
14684 SELF_CHECK (arm_record.mem_rec_count == 0);
14685 SELF_CHECK (arm_record.reg_rec_count == 1);
14686 SELF_CHECK (arm_record.arm_regs[0] == 7);
14687 }
14688
14689 /* 32-bit instructions. */
14690 {
14691 arm_insn_decode_record arm_record;
14692
14693 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14694 arm_record.gdbarch = gdbarch;
14695
14696 /* Use the endian-free representation of the instruction here. The test
14697 will handle endianness conversions. */
14698 static const uint32_t insns[] = {
14699 /* mov r5, r0 */
14700 0xe1a05000,
14701 };
14702
14703 instruction_reader_selftest<uint32_t> reader (insns);
14704 int ret = decode_insn (reader, &arm_record, ARM_RECORD,
14705 ARM_INSN_SIZE_BYTES);
14706
14707 SELF_CHECK (ret == 0);
14708 }
14709}
14710
14711/* Instruction reader from manually cooked instruction sequences. */
14712
14713class test_arm_instruction_reader : public arm_instruction_reader
14714{
14715public:
14716 explicit test_arm_instruction_reader (gdb::array_view<const uint32_t> insns)
14717 : m_insns (insns)
14718 {}
14719
14720 uint32_t read (CORE_ADDR memaddr, enum bfd_endian byte_order) const override
14721 {
14722 SELF_CHECK (memaddr % 4 == 0);
14723 SELF_CHECK (memaddr / 4 < m_insns.size ());
14724
14725 return m_insns[memaddr / 4];
14726 }
14727
14728private:
14729 const gdb::array_view<const uint32_t> m_insns;
14730};
14731
14732static void
14733arm_analyze_prologue_test ()
14734{
14735 for (bfd_endian endianness : {BFD_ENDIAN_LITTLE, BFD_ENDIAN_BIG})
14736 {
14737 struct gdbarch_info info;
14738 info.byte_order = endianness;
14739 info.byte_order_for_code = endianness;
14740 info.bfd_arch_info = bfd_scan_arch ("arm");
14741
14742 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14743
14744 SELF_CHECK (gdbarch != NULL);
14745
14746 /* The "sub" instruction contains an immediate value rotate count of 0,
14747 which resulted in a 32-bit shift of a 32-bit value, caught by
14748 UBSan. */
14749 const uint32_t insns[] = {
14750 0xe92d4ff0, /* push {r4, r5, r6, r7, r8, r9, sl, fp, lr} */
14751 0xe1a05000, /* mov r5, r0 */
14752 0xe5903020, /* ldr r3, [r0, #32] */
14753 0xe24dd044, /* sub sp, sp, #68 ; 0x44 */
14754 };
14755
14756 test_arm_instruction_reader mem_reader (insns);
14757 arm_prologue_cache cache;
14758 arm_cache_init (&cache, gdbarch);
14759
14760 arm_analyze_prologue (gdbarch, 0, sizeof (insns) - 1, &cache, mem_reader);
14761 }
14762}
14763
14764} // namespace selftests
14765#endif /* GDB_SELF_TEST */
14766
14767/* Cleans up local record registers and memory allocations. */
14768
14769static void
14770deallocate_reg_mem (arm_insn_decode_record *record)
14771{
14772 xfree (record->arm_regs);
14773 xfree (record->arm_mems);
14774}
14775
14776
14777/* Parse the current instruction and record the values of the registers and
14778 memory that will be changed in current instruction to record_arch_list".
14779 Return -1 if something is wrong. */
14780
14781int
14782arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
14783 CORE_ADDR insn_addr)
14784{
14785
14786 uint32_t no_of_rec = 0;
14787 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
14788 ULONGEST t_bit = 0, insn_id = 0;
14789
14790 ULONGEST u_regval = 0;
14791
14792 arm_insn_decode_record arm_record;
14793
14794 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14795 arm_record.regcache = regcache;
14796 arm_record.this_addr = insn_addr;
14797 arm_record.gdbarch = gdbarch;
14798
14799
14800 if (record_debug > 1)
14801 {
14802 gdb_printf (gdb_stdlog, "Process record: arm_process_record "
14803 "addr = %s\n",
14804 paddress (gdbarch, arm_record.this_addr));
14805 }
14806
14807 instruction_reader reader;
14808 enum bfd_endian code_endian
14809 = gdbarch_byte_order_for_code (arm_record.gdbarch);
14810 arm_record.arm_insn
14811 = reader.read (arm_record.this_addr, 2, code_endian);
14812
14813 /* Check the insn, whether it is thumb or arm one. */
14814
14815 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
14816 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
14817
14818
14819 if (!(u_regval & t_bit))
14820 {
14821 /* We are decoding arm insn. */
14822 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
14823 }
14824 else
14825 {
14826 insn_id = bits (arm_record.arm_insn, 11, 15);
14827 /* is it thumb2 insn? */
14828 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
14829 {
14830 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14831 THUMB2_INSN_SIZE_BYTES);
14832 }
14833 else
14834 {
14835 /* We are decoding thumb insn. */
14836 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14837 THUMB_INSN_SIZE_BYTES);
14838 }
14839 }
14840
14841 if (0 == ret)
14842 {
14843 /* Record registers. */
14844 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
14845 if (arm_record.arm_regs)
14846 {
14847 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
14848 {
14849 if (record_full_arch_list_add_reg
14850 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
14851 ret = -1;
14852 }
14853 }
14854 /* Record memories. */
14855 if (arm_record.arm_mems)
14856 {
14857 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
14858 {
14859 if (record_full_arch_list_add_mem
14860 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
14861 arm_record.arm_mems[no_of_rec].len))
14862 ret = -1;
14863 }
14864 }
14865
14866 if (record_full_arch_list_add_end ())
14867 ret = -1;
14868 }
14869
14870
14871 deallocate_reg_mem (&arm_record);
14872
14873 return ret;
14874}
14875
14876/* See arm-tdep.h. */
14877
14878const target_desc *
14879arm_read_description (arm_fp_type fp_type, bool tls)
14880{
14881 struct target_desc *tdesc = tdesc_arm_list[fp_type][tls];
14882
14883 if (tdesc == nullptr)
14884 {
14885 tdesc = arm_create_target_description (fp_type, tls);
14886 tdesc_arm_list[fp_type][tls] = tdesc;
14887 }
14888
14889 return tdesc;
14890}
14891
14892/* See arm-tdep.h. */
14893
14894const target_desc *
14895arm_read_mprofile_description (arm_m_profile_type m_type)
14896{
14897 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
14898
14899 if (tdesc == nullptr)
14900 {
14901 tdesc = arm_create_mprofile_target_description (m_type);
14902 tdesc_arm_mprofile_list[m_type] = tdesc;
14903 }
14904
14905 return tdesc;
14906}