]> git.ipfire.org Git - thirdparty/binutils-gdb.git/blob - gdb/arm-tdep.c
Replace most calls to help_list and cmd_show_list
[thirdparty/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2/frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48 #include "count-one-bits.h"
49
50 #include "arch/arm.h"
51 #include "arch/arm-get-next-pcs.h"
52 #include "arm-tdep.h"
53 #include "gdb/sim-arm.h"
54
55 #include "elf-bfd.h"
56 #include "coff/internal.h"
57 #include "elf/arm.h"
58
59 #include "record.h"
60 #include "record-full.h"
61 #include <algorithm>
62
63 #if GDB_SELF_TEST
64 #include "gdbsupport/selftest.h"
65 #endif
66
67 static bool arm_debug;
68
69 /* Macros for setting and testing a bit in a minimal symbol that marks
70 it as Thumb function. The MSB of the minimal symbol's "info" field
71 is used for this purpose.
72
73 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
74 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
75
76 #define MSYMBOL_SET_SPECIAL(msym) \
77 MSYMBOL_TARGET_FLAG_1 (msym) = 1
78
79 #define MSYMBOL_IS_SPECIAL(msym) \
80 MSYMBOL_TARGET_FLAG_1 (msym)
81
82 struct arm_mapping_symbol
83 {
84 CORE_ADDR value;
85 char type;
86
87 bool operator< (const arm_mapping_symbol &other) const
88 { return this->value < other.value; }
89 };
90
91 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
92
93 struct arm_per_bfd
94 {
95 explicit arm_per_bfd (size_t num_sections)
96 : section_maps (new arm_mapping_symbol_vec[num_sections]),
97 section_maps_sorted (new bool[num_sections] ())
98 {}
99
100 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
101
102 /* Information about mapping symbols ($a, $d, $t) in the objfile.
103
104 The format is an array of vectors of arm_mapping_symbols, there is one
105 vector for each section of the objfile (the array is index by BFD section
106 index).
107
108 For each section, the vector of arm_mapping_symbol is sorted by
109 symbol value (address). */
110 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
111
112 /* For each corresponding element of section_maps above, is this vector
113 sorted. */
114 std::unique_ptr<bool[]> section_maps_sorted;
115 };
116
117 /* Per-bfd data used for mapping symbols. */
118 static bfd_key<arm_per_bfd> arm_bfd_data_key;
119
120 /* The list of available "set arm ..." and "show arm ..." commands. */
121 static struct cmd_list_element *setarmcmdlist = NULL;
122 static struct cmd_list_element *showarmcmdlist = NULL;
123
124 /* The type of floating-point to use. Keep this in sync with enum
125 arm_float_model, and the help string in _initialize_arm_tdep. */
126 static const char *const fp_model_strings[] =
127 {
128 "auto",
129 "softfpa",
130 "fpa",
131 "softvfp",
132 "vfp",
133 NULL
134 };
135
136 /* A variable that can be configured by the user. */
137 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
138 static const char *current_fp_model = "auto";
139
140 /* The ABI to use. Keep this in sync with arm_abi_kind. */
141 static const char *const arm_abi_strings[] =
142 {
143 "auto",
144 "APCS",
145 "AAPCS",
146 NULL
147 };
148
149 /* A variable that can be configured by the user. */
150 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
151 static const char *arm_abi_string = "auto";
152
153 /* The execution mode to assume. */
154 static const char *const arm_mode_strings[] =
155 {
156 "auto",
157 "arm",
158 "thumb",
159 NULL
160 };
161
162 static const char *arm_fallback_mode_string = "auto";
163 static const char *arm_force_mode_string = "auto";
164
165 /* The standard register names, and all the valid aliases for them. Note
166 that `fp', `sp' and `pc' are not added in this alias list, because they
167 have been added as builtin user registers in
168 std-regs.c:_initialize_frame_reg. */
169 static const struct
170 {
171 const char *name;
172 int regnum;
173 } arm_register_aliases[] = {
174 /* Basic register numbers. */
175 { "r0", 0 },
176 { "r1", 1 },
177 { "r2", 2 },
178 { "r3", 3 },
179 { "r4", 4 },
180 { "r5", 5 },
181 { "r6", 6 },
182 { "r7", 7 },
183 { "r8", 8 },
184 { "r9", 9 },
185 { "r10", 10 },
186 { "r11", 11 },
187 { "r12", 12 },
188 { "r13", 13 },
189 { "r14", 14 },
190 { "r15", 15 },
191 /* Synonyms (argument and variable registers). */
192 { "a1", 0 },
193 { "a2", 1 },
194 { "a3", 2 },
195 { "a4", 3 },
196 { "v1", 4 },
197 { "v2", 5 },
198 { "v3", 6 },
199 { "v4", 7 },
200 { "v5", 8 },
201 { "v6", 9 },
202 { "v7", 10 },
203 { "v8", 11 },
204 /* Other platform-specific names for r9. */
205 { "sb", 9 },
206 { "tr", 9 },
207 /* Special names. */
208 { "ip", 12 },
209 { "lr", 14 },
210 /* Names used by GCC (not listed in the ARM EABI). */
211 { "sl", 10 },
212 /* A special name from the older ATPCS. */
213 { "wr", 7 },
214 };
215
216 static const char *const arm_register_names[] =
217 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
218 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
219 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
220 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
221 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
222 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
223 "fps", "cpsr" }; /* 24 25 */
224
225 /* Holds the current set of options to be passed to the disassembler. */
226 static char *arm_disassembler_options;
227
228 /* Valid register name styles. */
229 static const char **valid_disassembly_styles;
230
231 /* Disassembly style to use. Default to "std" register names. */
232 static const char *disassembly_style;
233
234 /* All possible arm target descriptors. */
235 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
236 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
237
238 /* This is used to keep the bfd arch_info in sync with the disassembly
239 style. */
240 static void set_disassembly_style_sfunc (const char *, int,
241 struct cmd_list_element *);
242 static void show_disassembly_style_sfunc (struct ui_file *, int,
243 struct cmd_list_element *,
244 const char *);
245
246 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
247 readable_regcache *regcache,
248 int regnum, gdb_byte *buf);
249 static void arm_neon_quad_write (struct gdbarch *gdbarch,
250 struct regcache *regcache,
251 int regnum, const gdb_byte *buf);
252
253 static CORE_ADDR
254 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
255
256
257 /* get_next_pcs operations. */
258 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
259 arm_get_next_pcs_read_memory_unsigned_integer,
260 arm_get_next_pcs_syscall_next_pc,
261 arm_get_next_pcs_addr_bits_remove,
262 arm_get_next_pcs_is_thumb,
263 NULL,
264 };
265
266 struct arm_prologue_cache
267 {
268 /* The stack pointer at the time this frame was created; i.e. the
269 caller's stack pointer when this function was called. It is used
270 to identify this frame. */
271 CORE_ADDR prev_sp;
272
273 /* The frame base for this frame is just prev_sp - frame size.
274 FRAMESIZE is the distance from the frame pointer to the
275 initial stack pointer. */
276
277 int framesize;
278
279 /* The register used to hold the frame pointer for this frame. */
280 int framereg;
281
282 /* Saved register offsets. */
283 struct trad_frame_saved_reg *saved_regs;
284 };
285
286 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
287 CORE_ADDR prologue_start,
288 CORE_ADDR prologue_end,
289 struct arm_prologue_cache *cache);
290
291 /* Architecture version for displaced stepping. This effects the behaviour of
292 certain instructions, and really should not be hard-wired. */
293
294 #define DISPLACED_STEPPING_ARCH_VERSION 5
295
296 /* See arm-tdep.h. */
297
298 bool arm_apcs_32 = true;
299
300 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
301
302 int
303 arm_psr_thumb_bit (struct gdbarch *gdbarch)
304 {
305 if (gdbarch_tdep (gdbarch)->is_m)
306 return XPSR_T;
307 else
308 return CPSR_T;
309 }
310
311 /* Determine if the processor is currently executing in Thumb mode. */
312
313 int
314 arm_is_thumb (struct regcache *regcache)
315 {
316 ULONGEST cpsr;
317 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
318
319 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
320
321 return (cpsr & t_bit) != 0;
322 }
323
324 /* Determine if FRAME is executing in Thumb mode. */
325
326 int
327 arm_frame_is_thumb (struct frame_info *frame)
328 {
329 CORE_ADDR cpsr;
330 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
331
332 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
333 directly (from a signal frame or dummy frame) or by interpreting
334 the saved LR (from a prologue or DWARF frame). So consult it and
335 trust the unwinders. */
336 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
337
338 return (cpsr & t_bit) != 0;
339 }
340
341 /* Search for the mapping symbol covering MEMADDR. If one is found,
342 return its type. Otherwise, return 0. If START is non-NULL,
343 set *START to the location of the mapping symbol. */
344
345 static char
346 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
347 {
348 struct obj_section *sec;
349
350 /* If there are mapping symbols, consult them. */
351 sec = find_pc_section (memaddr);
352 if (sec != NULL)
353 {
354 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd);
355 if (data != NULL)
356 {
357 unsigned int section_idx = sec->the_bfd_section->index;
358 arm_mapping_symbol_vec &map
359 = data->section_maps[section_idx];
360
361 /* Sort the vector on first use. */
362 if (!data->section_maps_sorted[section_idx])
363 {
364 std::sort (map.begin (), map.end ());
365 data->section_maps_sorted[section_idx] = true;
366 }
367
368 struct arm_mapping_symbol map_key
369 = { memaddr - obj_section_addr (sec), 0 };
370 arm_mapping_symbol_vec::const_iterator it
371 = std::lower_bound (map.begin (), map.end (), map_key);
372
373 /* std::lower_bound finds the earliest ordered insertion
374 point. If the symbol at this position starts at this exact
375 address, we use that; otherwise, the preceding
376 mapping symbol covers this address. */
377 if (it < map.end ())
378 {
379 if (it->value == map_key.value)
380 {
381 if (start)
382 *start = it->value + obj_section_addr (sec);
383 return it->type;
384 }
385 }
386
387 if (it > map.begin ())
388 {
389 arm_mapping_symbol_vec::const_iterator prev_it
390 = it - 1;
391
392 if (start)
393 *start = prev_it->value + obj_section_addr (sec);
394 return prev_it->type;
395 }
396 }
397 }
398
399 return 0;
400 }
401
402 /* Determine if the program counter specified in MEMADDR is in a Thumb
403 function. This function should be called for addresses unrelated to
404 any executing frame; otherwise, prefer arm_frame_is_thumb. */
405
406 int
407 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
408 {
409 struct bound_minimal_symbol sym;
410 char type;
411 arm_displaced_step_closure *dsc
412 = ((arm_displaced_step_closure * )
413 get_displaced_step_closure_by_addr (memaddr));
414
415 /* If checking the mode of displaced instruction in copy area, the mode
416 should be determined by instruction on the original address. */
417 if (dsc)
418 {
419 if (debug_displaced)
420 fprintf_unfiltered (gdb_stdlog,
421 "displaced: check mode of %.8lx instead of %.8lx\n",
422 (unsigned long) dsc->insn_addr,
423 (unsigned long) memaddr);
424 memaddr = dsc->insn_addr;
425 }
426
427 /* If bit 0 of the address is set, assume this is a Thumb address. */
428 if (IS_THUMB_ADDR (memaddr))
429 return 1;
430
431 /* If the user wants to override the symbol table, let him. */
432 if (strcmp (arm_force_mode_string, "arm") == 0)
433 return 0;
434 if (strcmp (arm_force_mode_string, "thumb") == 0)
435 return 1;
436
437 /* ARM v6-M and v7-M are always in Thumb mode. */
438 if (gdbarch_tdep (gdbarch)->is_m)
439 return 1;
440
441 /* If there are mapping symbols, consult them. */
442 type = arm_find_mapping_symbol (memaddr, NULL);
443 if (type)
444 return type == 't';
445
446 /* Thumb functions have a "special" bit set in minimal symbols. */
447 sym = lookup_minimal_symbol_by_pc (memaddr);
448 if (sym.minsym)
449 return (MSYMBOL_IS_SPECIAL (sym.minsym));
450
451 /* If the user wants to override the fallback mode, let them. */
452 if (strcmp (arm_fallback_mode_string, "arm") == 0)
453 return 0;
454 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
455 return 1;
456
457 /* If we couldn't find any symbol, but we're talking to a running
458 target, then trust the current value of $cpsr. This lets
459 "display/i $pc" always show the correct mode (though if there is
460 a symbol table we will not reach here, so it still may not be
461 displayed in the mode it will be executed). */
462 if (target_has_registers)
463 return arm_frame_is_thumb (get_current_frame ());
464
465 /* Otherwise we're out of luck; we assume ARM. */
466 return 0;
467 }
468
469 /* Determine if the address specified equals any of these magic return
470 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
471 architectures.
472
473 From ARMv6-M Reference Manual B1.5.8
474 Table B1-5 Exception return behavior
475
476 EXC_RETURN Return To Return Stack
477 0xFFFFFFF1 Handler mode Main
478 0xFFFFFFF9 Thread mode Main
479 0xFFFFFFFD Thread mode Process
480
481 From ARMv7-M Reference Manual B1.5.8
482 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
483
484 EXC_RETURN Return To Return Stack
485 0xFFFFFFF1 Handler mode Main
486 0xFFFFFFF9 Thread mode Main
487 0xFFFFFFFD Thread mode Process
488
489 Table B1-9 EXC_RETURN definition of exception return behavior, with
490 FP
491
492 EXC_RETURN Return To Return Stack Frame Type
493 0xFFFFFFE1 Handler mode Main Extended
494 0xFFFFFFE9 Thread mode Main Extended
495 0xFFFFFFED Thread mode Process Extended
496 0xFFFFFFF1 Handler mode Main Basic
497 0xFFFFFFF9 Thread mode Main Basic
498 0xFFFFFFFD Thread mode Process Basic
499
500 For more details see "B1.5.8 Exception return behavior"
501 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
502
503 static int
504 arm_m_addr_is_magic (CORE_ADDR addr)
505 {
506 switch (addr)
507 {
508 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
509 the exception return behavior. */
510 case 0xffffffe1:
511 case 0xffffffe9:
512 case 0xffffffed:
513 case 0xfffffff1:
514 case 0xfffffff9:
515 case 0xfffffffd:
516 /* Address is magic. */
517 return 1;
518
519 default:
520 /* Address is not magic. */
521 return 0;
522 }
523 }
524
525 /* Remove useless bits from addresses in a running program. */
526 static CORE_ADDR
527 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
528 {
529 /* On M-profile devices, do not strip the low bit from EXC_RETURN
530 (the magic exception return address). */
531 if (gdbarch_tdep (gdbarch)->is_m
532 && arm_m_addr_is_magic (val))
533 return val;
534
535 if (arm_apcs_32)
536 return UNMAKE_THUMB_ADDR (val);
537 else
538 return (val & 0x03fffffc);
539 }
540
541 /* Return 1 if PC is the start of a compiler helper function which
542 can be safely ignored during prologue skipping. IS_THUMB is true
543 if the function is known to be a Thumb function due to the way it
544 is being called. */
545 static int
546 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
547 {
548 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
549 struct bound_minimal_symbol msym;
550
551 msym = lookup_minimal_symbol_by_pc (pc);
552 if (msym.minsym != NULL
553 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
554 && msym.minsym->linkage_name () != NULL)
555 {
556 const char *name = msym.minsym->linkage_name ();
557
558 /* The GNU linker's Thumb call stub to foo is named
559 __foo_from_thumb. */
560 if (strstr (name, "_from_thumb") != NULL)
561 name += 2;
562
563 /* On soft-float targets, __truncdfsf2 is called to convert promoted
564 arguments to their argument types in non-prototyped
565 functions. */
566 if (startswith (name, "__truncdfsf2"))
567 return 1;
568 if (startswith (name, "__aeabi_d2f"))
569 return 1;
570
571 /* Internal functions related to thread-local storage. */
572 if (startswith (name, "__tls_get_addr"))
573 return 1;
574 if (startswith (name, "__aeabi_read_tp"))
575 return 1;
576 }
577 else
578 {
579 /* If we run against a stripped glibc, we may be unable to identify
580 special functions by name. Check for one important case,
581 __aeabi_read_tp, by comparing the *code* against the default
582 implementation (this is hand-written ARM assembler in glibc). */
583
584 if (!is_thumb
585 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
586 == 0xe3e00a0f /* mov r0, #0xffff0fff */
587 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
588 == 0xe240f01f) /* sub pc, r0, #31 */
589 return 1;
590 }
591
592 return 0;
593 }
594
595 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
596 the first 16-bit of instruction, and INSN2 is the second 16-bit of
597 instruction. */
598 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
599 ((bits ((insn1), 0, 3) << 12) \
600 | (bits ((insn1), 10, 10) << 11) \
601 | (bits ((insn2), 12, 14) << 8) \
602 | bits ((insn2), 0, 7))
603
604 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
605 the 32-bit instruction. */
606 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
607 ((bits ((insn), 16, 19) << 12) \
608 | bits ((insn), 0, 11))
609
610 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
611
612 static unsigned int
613 thumb_expand_immediate (unsigned int imm)
614 {
615 unsigned int count = imm >> 7;
616
617 if (count < 8)
618 switch (count / 2)
619 {
620 case 0:
621 return imm & 0xff;
622 case 1:
623 return (imm & 0xff) | ((imm & 0xff) << 16);
624 case 2:
625 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
626 case 3:
627 return (imm & 0xff) | ((imm & 0xff) << 8)
628 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
629 }
630
631 return (0x80 | (imm & 0x7f)) << (32 - count);
632 }
633
634 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
635 epilogue, 0 otherwise. */
636
637 static int
638 thumb_instruction_restores_sp (unsigned short insn)
639 {
640 return (insn == 0x46bd /* mov sp, r7 */
641 || (insn & 0xff80) == 0xb000 /* add sp, imm */
642 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
643 }
644
645 /* Analyze a Thumb prologue, looking for a recognizable stack frame
646 and frame pointer. Scan until we encounter a store that could
647 clobber the stack frame unexpectedly, or an unknown instruction.
648 Return the last address which is definitely safe to skip for an
649 initial breakpoint. */
650
651 static CORE_ADDR
652 thumb_analyze_prologue (struct gdbarch *gdbarch,
653 CORE_ADDR start, CORE_ADDR limit,
654 struct arm_prologue_cache *cache)
655 {
656 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
657 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
658 int i;
659 pv_t regs[16];
660 CORE_ADDR offset;
661 CORE_ADDR unrecognized_pc = 0;
662
663 for (i = 0; i < 16; i++)
664 regs[i] = pv_register (i, 0);
665 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
666
667 while (start < limit)
668 {
669 unsigned short insn;
670
671 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
672
673 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
674 {
675 int regno;
676 int mask;
677
678 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
679 break;
680
681 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
682 whether to save LR (R14). */
683 mask = (insn & 0xff) | ((insn & 0x100) << 6);
684
685 /* Calculate offsets of saved R0-R7 and LR. */
686 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
687 if (mask & (1 << regno))
688 {
689 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
690 -4);
691 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
692 }
693 }
694 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
695 {
696 offset = (insn & 0x7f) << 2; /* get scaled offset */
697 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
698 -offset);
699 }
700 else if (thumb_instruction_restores_sp (insn))
701 {
702 /* Don't scan past the epilogue. */
703 break;
704 }
705 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
706 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
707 (insn & 0xff) << 2);
708 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
709 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
710 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
711 bits (insn, 6, 8));
712 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
713 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
714 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
715 bits (insn, 0, 7));
716 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
717 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
718 && pv_is_constant (regs[bits (insn, 3, 5)]))
719 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
720 regs[bits (insn, 6, 8)]);
721 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
722 && pv_is_constant (regs[bits (insn, 3, 6)]))
723 {
724 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
725 int rm = bits (insn, 3, 6);
726 regs[rd] = pv_add (regs[rd], regs[rm]);
727 }
728 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
729 {
730 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
731 int src_reg = (insn & 0x78) >> 3;
732 regs[dst_reg] = regs[src_reg];
733 }
734 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
735 {
736 /* Handle stores to the stack. Normally pushes are used,
737 but with GCC -mtpcs-frame, there may be other stores
738 in the prologue to create the frame. */
739 int regno = (insn >> 8) & 0x7;
740 pv_t addr;
741
742 offset = (insn & 0xff) << 2;
743 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
744
745 if (stack.store_would_trash (addr))
746 break;
747
748 stack.store (addr, 4, regs[regno]);
749 }
750 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
751 {
752 int rd = bits (insn, 0, 2);
753 int rn = bits (insn, 3, 5);
754 pv_t addr;
755
756 offset = bits (insn, 6, 10) << 2;
757 addr = pv_add_constant (regs[rn], offset);
758
759 if (stack.store_would_trash (addr))
760 break;
761
762 stack.store (addr, 4, regs[rd]);
763 }
764 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
765 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
766 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
767 /* Ignore stores of argument registers to the stack. */
768 ;
769 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
770 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
771 /* Ignore block loads from the stack, potentially copying
772 parameters from memory. */
773 ;
774 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
775 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
776 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
777 /* Similarly ignore single loads from the stack. */
778 ;
779 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
780 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
781 /* Skip register copies, i.e. saves to another register
782 instead of the stack. */
783 ;
784 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
785 /* Recognize constant loads; even with small stacks these are necessary
786 on Thumb. */
787 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
788 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
789 {
790 /* Constant pool loads, for the same reason. */
791 unsigned int constant;
792 CORE_ADDR loc;
793
794 loc = start + 4 + bits (insn, 0, 7) * 4;
795 constant = read_memory_unsigned_integer (loc, 4, byte_order);
796 regs[bits (insn, 8, 10)] = pv_constant (constant);
797 }
798 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
799 {
800 unsigned short inst2;
801
802 inst2 = read_code_unsigned_integer (start + 2, 2,
803 byte_order_for_code);
804
805 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
806 {
807 /* BL, BLX. Allow some special function calls when
808 skipping the prologue; GCC generates these before
809 storing arguments to the stack. */
810 CORE_ADDR nextpc;
811 int j1, j2, imm1, imm2;
812
813 imm1 = sbits (insn, 0, 10);
814 imm2 = bits (inst2, 0, 10);
815 j1 = bit (inst2, 13);
816 j2 = bit (inst2, 11);
817
818 offset = ((imm1 << 12) + (imm2 << 1));
819 offset ^= ((!j2) << 22) | ((!j1) << 23);
820
821 nextpc = start + 4 + offset;
822 /* For BLX make sure to clear the low bits. */
823 if (bit (inst2, 12) == 0)
824 nextpc = nextpc & 0xfffffffc;
825
826 if (!skip_prologue_function (gdbarch, nextpc,
827 bit (inst2, 12) != 0))
828 break;
829 }
830
831 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
832 { registers } */
833 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
834 {
835 pv_t addr = regs[bits (insn, 0, 3)];
836 int regno;
837
838 if (stack.store_would_trash (addr))
839 break;
840
841 /* Calculate offsets of saved registers. */
842 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
843 if (inst2 & (1 << regno))
844 {
845 addr = pv_add_constant (addr, -4);
846 stack.store (addr, 4, regs[regno]);
847 }
848
849 if (insn & 0x0020)
850 regs[bits (insn, 0, 3)] = addr;
851 }
852
853 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
854 [Rn, #+/-imm]{!} */
855 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
856 {
857 int regno1 = bits (inst2, 12, 15);
858 int regno2 = bits (inst2, 8, 11);
859 pv_t addr = regs[bits (insn, 0, 3)];
860
861 offset = inst2 & 0xff;
862 if (insn & 0x0080)
863 addr = pv_add_constant (addr, offset);
864 else
865 addr = pv_add_constant (addr, -offset);
866
867 if (stack.store_would_trash (addr))
868 break;
869
870 stack.store (addr, 4, regs[regno1]);
871 stack.store (pv_add_constant (addr, 4),
872 4, regs[regno2]);
873
874 if (insn & 0x0020)
875 regs[bits (insn, 0, 3)] = addr;
876 }
877
878 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
879 && (inst2 & 0x0c00) == 0x0c00
880 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
881 {
882 int regno = bits (inst2, 12, 15);
883 pv_t addr = regs[bits (insn, 0, 3)];
884
885 offset = inst2 & 0xff;
886 if (inst2 & 0x0200)
887 addr = pv_add_constant (addr, offset);
888 else
889 addr = pv_add_constant (addr, -offset);
890
891 if (stack.store_would_trash (addr))
892 break;
893
894 stack.store (addr, 4, regs[regno]);
895
896 if (inst2 & 0x0100)
897 regs[bits (insn, 0, 3)] = addr;
898 }
899
900 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
901 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
902 {
903 int regno = bits (inst2, 12, 15);
904 pv_t addr;
905
906 offset = inst2 & 0xfff;
907 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
908
909 if (stack.store_would_trash (addr))
910 break;
911
912 stack.store (addr, 4, regs[regno]);
913 }
914
915 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
916 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
917 /* Ignore stores of argument registers to the stack. */
918 ;
919
920 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
921 && (inst2 & 0x0d00) == 0x0c00
922 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
923 /* Ignore stores of argument registers to the stack. */
924 ;
925
926 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
927 { registers } */
928 && (inst2 & 0x8000) == 0x0000
929 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
930 /* Ignore block loads from the stack, potentially copying
931 parameters from memory. */
932 ;
933
934 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
935 [Rn, #+/-imm] */
936 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
937 /* Similarly ignore dual loads from the stack. */
938 ;
939
940 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
941 && (inst2 & 0x0d00) == 0x0c00
942 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
943 /* Similarly ignore single loads from the stack. */
944 ;
945
946 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
947 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
948 /* Similarly ignore single loads from the stack. */
949 ;
950
951 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
952 && (inst2 & 0x8000) == 0x0000)
953 {
954 unsigned int imm = ((bits (insn, 10, 10) << 11)
955 | (bits (inst2, 12, 14) << 8)
956 | bits (inst2, 0, 7));
957
958 regs[bits (inst2, 8, 11)]
959 = pv_add_constant (regs[bits (insn, 0, 3)],
960 thumb_expand_immediate (imm));
961 }
962
963 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
964 && (inst2 & 0x8000) == 0x0000)
965 {
966 unsigned int imm = ((bits (insn, 10, 10) << 11)
967 | (bits (inst2, 12, 14) << 8)
968 | bits (inst2, 0, 7));
969
970 regs[bits (inst2, 8, 11)]
971 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
972 }
973
974 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
975 && (inst2 & 0x8000) == 0x0000)
976 {
977 unsigned int imm = ((bits (insn, 10, 10) << 11)
978 | (bits (inst2, 12, 14) << 8)
979 | bits (inst2, 0, 7));
980
981 regs[bits (inst2, 8, 11)]
982 = pv_add_constant (regs[bits (insn, 0, 3)],
983 - (CORE_ADDR) thumb_expand_immediate (imm));
984 }
985
986 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
987 && (inst2 & 0x8000) == 0x0000)
988 {
989 unsigned int imm = ((bits (insn, 10, 10) << 11)
990 | (bits (inst2, 12, 14) << 8)
991 | bits (inst2, 0, 7));
992
993 regs[bits (inst2, 8, 11)]
994 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
995 }
996
997 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
998 {
999 unsigned int imm = ((bits (insn, 10, 10) << 11)
1000 | (bits (inst2, 12, 14) << 8)
1001 | bits (inst2, 0, 7));
1002
1003 regs[bits (inst2, 8, 11)]
1004 = pv_constant (thumb_expand_immediate (imm));
1005 }
1006
1007 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1008 {
1009 unsigned int imm
1010 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1011
1012 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1013 }
1014
1015 else if (insn == 0xea5f /* mov.w Rd,Rm */
1016 && (inst2 & 0xf0f0) == 0)
1017 {
1018 int dst_reg = (inst2 & 0x0f00) >> 8;
1019 int src_reg = inst2 & 0xf;
1020 regs[dst_reg] = regs[src_reg];
1021 }
1022
1023 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1024 {
1025 /* Constant pool loads. */
1026 unsigned int constant;
1027 CORE_ADDR loc;
1028
1029 offset = bits (inst2, 0, 11);
1030 if (insn & 0x0080)
1031 loc = start + 4 + offset;
1032 else
1033 loc = start + 4 - offset;
1034
1035 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1036 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1037 }
1038
1039 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1040 {
1041 /* Constant pool loads. */
1042 unsigned int constant;
1043 CORE_ADDR loc;
1044
1045 offset = bits (inst2, 0, 7) << 2;
1046 if (insn & 0x0080)
1047 loc = start + 4 + offset;
1048 else
1049 loc = start + 4 - offset;
1050
1051 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1052 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1053
1054 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1055 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1056 }
1057
1058 else if (thumb2_instruction_changes_pc (insn, inst2))
1059 {
1060 /* Don't scan past anything that might change control flow. */
1061 break;
1062 }
1063 else
1064 {
1065 /* The optimizer might shove anything into the prologue,
1066 so we just skip what we don't recognize. */
1067 unrecognized_pc = start;
1068 }
1069
1070 start += 2;
1071 }
1072 else if (thumb_instruction_changes_pc (insn))
1073 {
1074 /* Don't scan past anything that might change control flow. */
1075 break;
1076 }
1077 else
1078 {
1079 /* The optimizer might shove anything into the prologue,
1080 so we just skip what we don't recognize. */
1081 unrecognized_pc = start;
1082 }
1083
1084 start += 2;
1085 }
1086
1087 if (arm_debug)
1088 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1089 paddress (gdbarch, start));
1090
1091 if (unrecognized_pc == 0)
1092 unrecognized_pc = start;
1093
1094 if (cache == NULL)
1095 return unrecognized_pc;
1096
1097 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1098 {
1099 /* Frame pointer is fp. Frame size is constant. */
1100 cache->framereg = ARM_FP_REGNUM;
1101 cache->framesize = -regs[ARM_FP_REGNUM].k;
1102 }
1103 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1104 {
1105 /* Frame pointer is r7. Frame size is constant. */
1106 cache->framereg = THUMB_FP_REGNUM;
1107 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1108 }
1109 else
1110 {
1111 /* Try the stack pointer... this is a bit desperate. */
1112 cache->framereg = ARM_SP_REGNUM;
1113 cache->framesize = -regs[ARM_SP_REGNUM].k;
1114 }
1115
1116 for (i = 0; i < 16; i++)
1117 if (stack.find_reg (gdbarch, i, &offset))
1118 cache->saved_regs[i].addr = offset;
1119
1120 return unrecognized_pc;
1121 }
1122
1123
1124 /* Try to analyze the instructions starting from PC, which load symbol
1125 __stack_chk_guard. Return the address of instruction after loading this
1126 symbol, set the dest register number to *BASEREG, and set the size of
1127 instructions for loading symbol in OFFSET. Return 0 if instructions are
1128 not recognized. */
1129
1130 static CORE_ADDR
1131 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1132 unsigned int *destreg, int *offset)
1133 {
1134 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1135 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1136 unsigned int low, high, address;
1137
1138 address = 0;
1139 if (is_thumb)
1140 {
1141 unsigned short insn1
1142 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1143
1144 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1145 {
1146 *destreg = bits (insn1, 8, 10);
1147 *offset = 2;
1148 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1149 address = read_memory_unsigned_integer (address, 4,
1150 byte_order_for_code);
1151 }
1152 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1153 {
1154 unsigned short insn2
1155 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1156
1157 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1158
1159 insn1
1160 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1161 insn2
1162 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1163
1164 /* movt Rd, #const */
1165 if ((insn1 & 0xfbc0) == 0xf2c0)
1166 {
1167 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1168 *destreg = bits (insn2, 8, 11);
1169 *offset = 8;
1170 address = (high << 16 | low);
1171 }
1172 }
1173 }
1174 else
1175 {
1176 unsigned int insn
1177 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1178
1179 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1180 {
1181 address = bits (insn, 0, 11) + pc + 8;
1182 address = read_memory_unsigned_integer (address, 4,
1183 byte_order_for_code);
1184
1185 *destreg = bits (insn, 12, 15);
1186 *offset = 4;
1187 }
1188 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1189 {
1190 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1191
1192 insn
1193 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1194
1195 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1196 {
1197 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1198 *destreg = bits (insn, 12, 15);
1199 *offset = 8;
1200 address = (high << 16 | low);
1201 }
1202 }
1203 }
1204
1205 return address;
1206 }
1207
1208 /* Try to skip a sequence of instructions used for stack protector. If PC
1209 points to the first instruction of this sequence, return the address of
1210 first instruction after this sequence, otherwise, return original PC.
1211
1212 On arm, this sequence of instructions is composed of mainly three steps,
1213 Step 1: load symbol __stack_chk_guard,
1214 Step 2: load from address of __stack_chk_guard,
1215 Step 3: store it to somewhere else.
1216
1217 Usually, instructions on step 2 and step 3 are the same on various ARM
1218 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1219 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1220 instructions in step 1 vary from different ARM architectures. On ARMv7,
1221 they are,
1222
1223 movw Rn, #:lower16:__stack_chk_guard
1224 movt Rn, #:upper16:__stack_chk_guard
1225
1226 On ARMv5t, it is,
1227
1228 ldr Rn, .Label
1229 ....
1230 .Lable:
1231 .word __stack_chk_guard
1232
1233 Since ldr/str is a very popular instruction, we can't use them as
1234 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1235 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1236 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1237
1238 static CORE_ADDR
1239 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1240 {
1241 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1242 unsigned int basereg;
1243 struct bound_minimal_symbol stack_chk_guard;
1244 int offset;
1245 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1246 CORE_ADDR addr;
1247
1248 /* Try to parse the instructions in Step 1. */
1249 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1250 &basereg, &offset);
1251 if (!addr)
1252 return pc;
1253
1254 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1255 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1256 Otherwise, this sequence cannot be for stack protector. */
1257 if (stack_chk_guard.minsym == NULL
1258 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1259 return pc;
1260
1261 if (is_thumb)
1262 {
1263 unsigned int destreg;
1264 unsigned short insn
1265 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1266
1267 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1268 if ((insn & 0xf800) != 0x6800)
1269 return pc;
1270 if (bits (insn, 3, 5) != basereg)
1271 return pc;
1272 destreg = bits (insn, 0, 2);
1273
1274 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1275 byte_order_for_code);
1276 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1277 if ((insn & 0xf800) != 0x6000)
1278 return pc;
1279 if (destreg != bits (insn, 0, 2))
1280 return pc;
1281 }
1282 else
1283 {
1284 unsigned int destreg;
1285 unsigned int insn
1286 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1287
1288 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1289 if ((insn & 0x0e500000) != 0x04100000)
1290 return pc;
1291 if (bits (insn, 16, 19) != basereg)
1292 return pc;
1293 destreg = bits (insn, 12, 15);
1294 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1295 insn = read_code_unsigned_integer (pc + offset + 4,
1296 4, byte_order_for_code);
1297 if ((insn & 0x0e500000) != 0x04000000)
1298 return pc;
1299 if (bits (insn, 12, 15) != destreg)
1300 return pc;
1301 }
1302 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1303 on arm. */
1304 if (is_thumb)
1305 return pc + offset + 4;
1306 else
1307 return pc + offset + 8;
1308 }
1309
1310 /* Advance the PC across any function entry prologue instructions to
1311 reach some "real" code.
1312
1313 The APCS (ARM Procedure Call Standard) defines the following
1314 prologue:
1315
1316 mov ip, sp
1317 [stmfd sp!, {a1,a2,a3,a4}]
1318 stmfd sp!, {...,fp,ip,lr,pc}
1319 [stfe f7, [sp, #-12]!]
1320 [stfe f6, [sp, #-12]!]
1321 [stfe f5, [sp, #-12]!]
1322 [stfe f4, [sp, #-12]!]
1323 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1324
1325 static CORE_ADDR
1326 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1327 {
1328 CORE_ADDR func_addr, limit_pc;
1329
1330 /* See if we can determine the end of the prologue via the symbol table.
1331 If so, then return either PC, or the PC after the prologue, whichever
1332 is greater. */
1333 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1334 {
1335 CORE_ADDR post_prologue_pc
1336 = skip_prologue_using_sal (gdbarch, func_addr);
1337 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1338
1339 if (post_prologue_pc)
1340 post_prologue_pc
1341 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1342
1343
1344 /* GCC always emits a line note before the prologue and another
1345 one after, even if the two are at the same address or on the
1346 same line. Take advantage of this so that we do not need to
1347 know every instruction that might appear in the prologue. We
1348 will have producer information for most binaries; if it is
1349 missing (e.g. for -gstabs), assuming the GNU tools. */
1350 if (post_prologue_pc
1351 && (cust == NULL
1352 || COMPUNIT_PRODUCER (cust) == NULL
1353 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1354 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1355 return post_prologue_pc;
1356
1357 if (post_prologue_pc != 0)
1358 {
1359 CORE_ADDR analyzed_limit;
1360
1361 /* For non-GCC compilers, make sure the entire line is an
1362 acceptable prologue; GDB will round this function's
1363 return value up to the end of the following line so we
1364 can not skip just part of a line (and we do not want to).
1365
1366 RealView does not treat the prologue specially, but does
1367 associate prologue code with the opening brace; so this
1368 lets us skip the first line if we think it is the opening
1369 brace. */
1370 if (arm_pc_is_thumb (gdbarch, func_addr))
1371 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1372 post_prologue_pc, NULL);
1373 else
1374 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1375 post_prologue_pc, NULL);
1376
1377 if (analyzed_limit != post_prologue_pc)
1378 return func_addr;
1379
1380 return post_prologue_pc;
1381 }
1382 }
1383
1384 /* Can't determine prologue from the symbol table, need to examine
1385 instructions. */
1386
1387 /* Find an upper limit on the function prologue using the debug
1388 information. If the debug information could not be used to provide
1389 that bound, then use an arbitrary large number as the upper bound. */
1390 /* Like arm_scan_prologue, stop no later than pc + 64. */
1391 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1392 if (limit_pc == 0)
1393 limit_pc = pc + 64; /* Magic. */
1394
1395
1396 /* Check if this is Thumb code. */
1397 if (arm_pc_is_thumb (gdbarch, pc))
1398 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1399 else
1400 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1401 }
1402
1403 /* *INDENT-OFF* */
1404 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1405 This function decodes a Thumb function prologue to determine:
1406 1) the size of the stack frame
1407 2) which registers are saved on it
1408 3) the offsets of saved regs
1409 4) the offset from the stack pointer to the frame pointer
1410
1411 A typical Thumb function prologue would create this stack frame
1412 (offsets relative to FP)
1413 old SP -> 24 stack parameters
1414 20 LR
1415 16 R7
1416 R7 -> 0 local variables (16 bytes)
1417 SP -> -12 additional stack space (12 bytes)
1418 The frame size would thus be 36 bytes, and the frame offset would be
1419 12 bytes. The frame register is R7.
1420
1421 The comments for thumb_skip_prolog() describe the algorithm we use
1422 to detect the end of the prolog. */
1423 /* *INDENT-ON* */
1424
1425 static void
1426 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1427 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1428 {
1429 CORE_ADDR prologue_start;
1430 CORE_ADDR prologue_end;
1431
1432 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1433 &prologue_end))
1434 {
1435 /* See comment in arm_scan_prologue for an explanation of
1436 this heuristics. */
1437 if (prologue_end > prologue_start + 64)
1438 {
1439 prologue_end = prologue_start + 64;
1440 }
1441 }
1442 else
1443 /* We're in the boondocks: we have no idea where the start of the
1444 function is. */
1445 return;
1446
1447 prologue_end = std::min (prologue_end, prev_pc);
1448
1449 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1450 }
1451
1452 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1453 otherwise. */
1454
1455 static int
1456 arm_instruction_restores_sp (unsigned int insn)
1457 {
1458 if (bits (insn, 28, 31) != INST_NV)
1459 {
1460 if ((insn & 0x0df0f000) == 0x0080d000
1461 /* ADD SP (register or immediate). */
1462 || (insn & 0x0df0f000) == 0x0040d000
1463 /* SUB SP (register or immediate). */
1464 || (insn & 0x0ffffff0) == 0x01a0d000
1465 /* MOV SP. */
1466 || (insn & 0x0fff0000) == 0x08bd0000
1467 /* POP (LDMIA). */
1468 || (insn & 0x0fff0000) == 0x049d0000)
1469 /* POP of a single register. */
1470 return 1;
1471 }
1472
1473 return 0;
1474 }
1475
1476 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1477 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1478 fill it in. Return the first address not recognized as a prologue
1479 instruction.
1480
1481 We recognize all the instructions typically found in ARM prologues,
1482 plus harmless instructions which can be skipped (either for analysis
1483 purposes, or a more restrictive set that can be skipped when finding
1484 the end of the prologue). */
1485
1486 static CORE_ADDR
1487 arm_analyze_prologue (struct gdbarch *gdbarch,
1488 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1489 struct arm_prologue_cache *cache)
1490 {
1491 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1492 int regno;
1493 CORE_ADDR offset, current_pc;
1494 pv_t regs[ARM_FPS_REGNUM];
1495 CORE_ADDR unrecognized_pc = 0;
1496
1497 /* Search the prologue looking for instructions that set up the
1498 frame pointer, adjust the stack pointer, and save registers.
1499
1500 Be careful, however, and if it doesn't look like a prologue,
1501 don't try to scan it. If, for instance, a frameless function
1502 begins with stmfd sp!, then we will tell ourselves there is
1503 a frame, which will confuse stack traceback, as well as "finish"
1504 and other operations that rely on a knowledge of the stack
1505 traceback. */
1506
1507 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1508 regs[regno] = pv_register (regno, 0);
1509 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1510
1511 for (current_pc = prologue_start;
1512 current_pc < prologue_end;
1513 current_pc += 4)
1514 {
1515 unsigned int insn
1516 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1517
1518 if (insn == 0xe1a0c00d) /* mov ip, sp */
1519 {
1520 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1521 continue;
1522 }
1523 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1524 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1525 {
1526 unsigned imm = insn & 0xff; /* immediate value */
1527 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1528 int rd = bits (insn, 12, 15);
1529 imm = (imm >> rot) | (imm << (32 - rot));
1530 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1531 continue;
1532 }
1533 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1534 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1535 {
1536 unsigned imm = insn & 0xff; /* immediate value */
1537 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1538 int rd = bits (insn, 12, 15);
1539 imm = (imm >> rot) | (imm << (32 - rot));
1540 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1541 continue;
1542 }
1543 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1544 [sp, #-4]! */
1545 {
1546 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1547 break;
1548 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1549 stack.store (regs[ARM_SP_REGNUM], 4,
1550 regs[bits (insn, 12, 15)]);
1551 continue;
1552 }
1553 else if ((insn & 0xffff0000) == 0xe92d0000)
1554 /* stmfd sp!, {..., fp, ip, lr, pc}
1555 or
1556 stmfd sp!, {a1, a2, a3, a4} */
1557 {
1558 int mask = insn & 0xffff;
1559
1560 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1561 break;
1562
1563 /* Calculate offsets of saved registers. */
1564 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1565 if (mask & (1 << regno))
1566 {
1567 regs[ARM_SP_REGNUM]
1568 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1569 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1570 }
1571 }
1572 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1573 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1574 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1575 {
1576 /* No need to add this to saved_regs -- it's just an arg reg. */
1577 continue;
1578 }
1579 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1580 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1581 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1582 {
1583 /* No need to add this to saved_regs -- it's just an arg reg. */
1584 continue;
1585 }
1586 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1587 { registers } */
1588 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1589 {
1590 /* No need to add this to saved_regs -- it's just arg regs. */
1591 continue;
1592 }
1593 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1594 {
1595 unsigned imm = insn & 0xff; /* immediate value */
1596 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1597 imm = (imm >> rot) | (imm << (32 - rot));
1598 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1599 }
1600 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1601 {
1602 unsigned imm = insn & 0xff; /* immediate value */
1603 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1604 imm = (imm >> rot) | (imm << (32 - rot));
1605 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1606 }
1607 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1608 [sp, -#c]! */
1609 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1610 {
1611 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1612 break;
1613
1614 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1615 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1616 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1617 }
1618 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1619 [sp!] */
1620 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1621 {
1622 int n_saved_fp_regs;
1623 unsigned int fp_start_reg, fp_bound_reg;
1624
1625 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1626 break;
1627
1628 if ((insn & 0x800) == 0x800) /* N0 is set */
1629 {
1630 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1631 n_saved_fp_regs = 3;
1632 else
1633 n_saved_fp_regs = 1;
1634 }
1635 else
1636 {
1637 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1638 n_saved_fp_regs = 2;
1639 else
1640 n_saved_fp_regs = 4;
1641 }
1642
1643 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1644 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1645 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1646 {
1647 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1648 stack.store (regs[ARM_SP_REGNUM], 12,
1649 regs[fp_start_reg++]);
1650 }
1651 }
1652 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1653 {
1654 /* Allow some special function calls when skipping the
1655 prologue; GCC generates these before storing arguments to
1656 the stack. */
1657 CORE_ADDR dest = BranchDest (current_pc, insn);
1658
1659 if (skip_prologue_function (gdbarch, dest, 0))
1660 continue;
1661 else
1662 break;
1663 }
1664 else if ((insn & 0xf0000000) != 0xe0000000)
1665 break; /* Condition not true, exit early. */
1666 else if (arm_instruction_changes_pc (insn))
1667 /* Don't scan past anything that might change control flow. */
1668 break;
1669 else if (arm_instruction_restores_sp (insn))
1670 {
1671 /* Don't scan past the epilogue. */
1672 break;
1673 }
1674 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1675 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1676 /* Ignore block loads from the stack, potentially copying
1677 parameters from memory. */
1678 continue;
1679 else if ((insn & 0xfc500000) == 0xe4100000
1680 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1681 /* Similarly ignore single loads from the stack. */
1682 continue;
1683 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1684 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1685 register instead of the stack. */
1686 continue;
1687 else
1688 {
1689 /* The optimizer might shove anything into the prologue, if
1690 we build up cache (cache != NULL) from scanning prologue,
1691 we just skip what we don't recognize and scan further to
1692 make cache as complete as possible. However, if we skip
1693 prologue, we'll stop immediately on unrecognized
1694 instruction. */
1695 unrecognized_pc = current_pc;
1696 if (cache != NULL)
1697 continue;
1698 else
1699 break;
1700 }
1701 }
1702
1703 if (unrecognized_pc == 0)
1704 unrecognized_pc = current_pc;
1705
1706 if (cache)
1707 {
1708 int framereg, framesize;
1709
1710 /* The frame size is just the distance from the frame register
1711 to the original stack pointer. */
1712 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1713 {
1714 /* Frame pointer is fp. */
1715 framereg = ARM_FP_REGNUM;
1716 framesize = -regs[ARM_FP_REGNUM].k;
1717 }
1718 else
1719 {
1720 /* Try the stack pointer... this is a bit desperate. */
1721 framereg = ARM_SP_REGNUM;
1722 framesize = -regs[ARM_SP_REGNUM].k;
1723 }
1724
1725 cache->framereg = framereg;
1726 cache->framesize = framesize;
1727
1728 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1729 if (stack.find_reg (gdbarch, regno, &offset))
1730 cache->saved_regs[regno].addr = offset;
1731 }
1732
1733 if (arm_debug)
1734 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1735 paddress (gdbarch, unrecognized_pc));
1736
1737 return unrecognized_pc;
1738 }
1739
1740 static void
1741 arm_scan_prologue (struct frame_info *this_frame,
1742 struct arm_prologue_cache *cache)
1743 {
1744 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1745 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1746 CORE_ADDR prologue_start, prologue_end;
1747 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1748 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1749
1750 /* Assume there is no frame until proven otherwise. */
1751 cache->framereg = ARM_SP_REGNUM;
1752 cache->framesize = 0;
1753
1754 /* Check for Thumb prologue. */
1755 if (arm_frame_is_thumb (this_frame))
1756 {
1757 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1758 return;
1759 }
1760
1761 /* Find the function prologue. If we can't find the function in
1762 the symbol table, peek in the stack frame to find the PC. */
1763 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1764 &prologue_end))
1765 {
1766 /* One way to find the end of the prologue (which works well
1767 for unoptimized code) is to do the following:
1768
1769 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1770
1771 if (sal.line == 0)
1772 prologue_end = prev_pc;
1773 else if (sal.end < prologue_end)
1774 prologue_end = sal.end;
1775
1776 This mechanism is very accurate so long as the optimizer
1777 doesn't move any instructions from the function body into the
1778 prologue. If this happens, sal.end will be the last
1779 instruction in the first hunk of prologue code just before
1780 the first instruction that the scheduler has moved from
1781 the body to the prologue.
1782
1783 In order to make sure that we scan all of the prologue
1784 instructions, we use a slightly less accurate mechanism which
1785 may scan more than necessary. To help compensate for this
1786 lack of accuracy, the prologue scanning loop below contains
1787 several clauses which'll cause the loop to terminate early if
1788 an implausible prologue instruction is encountered.
1789
1790 The expression
1791
1792 prologue_start + 64
1793
1794 is a suitable endpoint since it accounts for the largest
1795 possible prologue plus up to five instructions inserted by
1796 the scheduler. */
1797
1798 if (prologue_end > prologue_start + 64)
1799 {
1800 prologue_end = prologue_start + 64; /* See above. */
1801 }
1802 }
1803 else
1804 {
1805 /* We have no symbol information. Our only option is to assume this
1806 function has a standard stack frame and the normal frame register.
1807 Then, we can find the value of our frame pointer on entrance to
1808 the callee (or at the present moment if this is the innermost frame).
1809 The value stored there should be the address of the stmfd + 8. */
1810 CORE_ADDR frame_loc;
1811 ULONGEST return_value;
1812
1813 /* AAPCS does not use a frame register, so we can abort here. */
1814 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1815 return;
1816
1817 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1818 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1819 &return_value))
1820 return;
1821 else
1822 {
1823 prologue_start = gdbarch_addr_bits_remove
1824 (gdbarch, return_value) - 8;
1825 prologue_end = prologue_start + 64; /* See above. */
1826 }
1827 }
1828
1829 if (prev_pc < prologue_end)
1830 prologue_end = prev_pc;
1831
1832 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1833 }
1834
1835 static struct arm_prologue_cache *
1836 arm_make_prologue_cache (struct frame_info *this_frame)
1837 {
1838 int reg;
1839 struct arm_prologue_cache *cache;
1840 CORE_ADDR unwound_fp;
1841
1842 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1843 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1844
1845 arm_scan_prologue (this_frame, cache);
1846
1847 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1848 if (unwound_fp == 0)
1849 return cache;
1850
1851 cache->prev_sp = unwound_fp + cache->framesize;
1852
1853 /* Calculate actual addresses of saved registers using offsets
1854 determined by arm_scan_prologue. */
1855 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1856 if (trad_frame_addr_p (cache->saved_regs, reg))
1857 cache->saved_regs[reg].addr += cache->prev_sp;
1858
1859 return cache;
1860 }
1861
1862 /* Implementation of the stop_reason hook for arm_prologue frames. */
1863
1864 static enum unwind_stop_reason
1865 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1866 void **this_cache)
1867 {
1868 struct arm_prologue_cache *cache;
1869 CORE_ADDR pc;
1870
1871 if (*this_cache == NULL)
1872 *this_cache = arm_make_prologue_cache (this_frame);
1873 cache = (struct arm_prologue_cache *) *this_cache;
1874
1875 /* This is meant to halt the backtrace at "_start". */
1876 pc = get_frame_pc (this_frame);
1877 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1878 return UNWIND_OUTERMOST;
1879
1880 /* If we've hit a wall, stop. */
1881 if (cache->prev_sp == 0)
1882 return UNWIND_OUTERMOST;
1883
1884 return UNWIND_NO_REASON;
1885 }
1886
1887 /* Our frame ID for a normal frame is the current function's starting PC
1888 and the caller's SP when we were called. */
1889
1890 static void
1891 arm_prologue_this_id (struct frame_info *this_frame,
1892 void **this_cache,
1893 struct frame_id *this_id)
1894 {
1895 struct arm_prologue_cache *cache;
1896 struct frame_id id;
1897 CORE_ADDR pc, func;
1898
1899 if (*this_cache == NULL)
1900 *this_cache = arm_make_prologue_cache (this_frame);
1901 cache = (struct arm_prologue_cache *) *this_cache;
1902
1903 /* Use function start address as part of the frame ID. If we cannot
1904 identify the start address (due to missing symbol information),
1905 fall back to just using the current PC. */
1906 pc = get_frame_pc (this_frame);
1907 func = get_frame_func (this_frame);
1908 if (!func)
1909 func = pc;
1910
1911 id = frame_id_build (cache->prev_sp, func);
1912 *this_id = id;
1913 }
1914
1915 static struct value *
1916 arm_prologue_prev_register (struct frame_info *this_frame,
1917 void **this_cache,
1918 int prev_regnum)
1919 {
1920 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1921 struct arm_prologue_cache *cache;
1922
1923 if (*this_cache == NULL)
1924 *this_cache = arm_make_prologue_cache (this_frame);
1925 cache = (struct arm_prologue_cache *) *this_cache;
1926
1927 /* If we are asked to unwind the PC, then we need to return the LR
1928 instead. The prologue may save PC, but it will point into this
1929 frame's prologue, not the next frame's resume location. Also
1930 strip the saved T bit. A valid LR may have the low bit set, but
1931 a valid PC never does. */
1932 if (prev_regnum == ARM_PC_REGNUM)
1933 {
1934 CORE_ADDR lr;
1935
1936 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1937 return frame_unwind_got_constant (this_frame, prev_regnum,
1938 arm_addr_bits_remove (gdbarch, lr));
1939 }
1940
1941 /* SP is generally not saved to the stack, but this frame is
1942 identified by the next frame's stack pointer at the time of the call.
1943 The value was already reconstructed into PREV_SP. */
1944 if (prev_regnum == ARM_SP_REGNUM)
1945 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1946
1947 /* The CPSR may have been changed by the call instruction and by the
1948 called function. The only bit we can reconstruct is the T bit,
1949 by checking the low bit of LR as of the call. This is a reliable
1950 indicator of Thumb-ness except for some ARM v4T pre-interworking
1951 Thumb code, which could get away with a clear low bit as long as
1952 the called function did not use bx. Guess that all other
1953 bits are unchanged; the condition flags are presumably lost,
1954 but the processor status is likely valid. */
1955 if (prev_regnum == ARM_PS_REGNUM)
1956 {
1957 CORE_ADDR lr, cpsr;
1958 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1959
1960 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1961 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1962 if (IS_THUMB_ADDR (lr))
1963 cpsr |= t_bit;
1964 else
1965 cpsr &= ~t_bit;
1966 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1967 }
1968
1969 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1970 prev_regnum);
1971 }
1972
1973 struct frame_unwind arm_prologue_unwind = {
1974 NORMAL_FRAME,
1975 arm_prologue_unwind_stop_reason,
1976 arm_prologue_this_id,
1977 arm_prologue_prev_register,
1978 NULL,
1979 default_frame_sniffer
1980 };
1981
1982 /* Maintain a list of ARM exception table entries per objfile, similar to the
1983 list of mapping symbols. We only cache entries for standard ARM-defined
1984 personality routines; the cache will contain only the frame unwinding
1985 instructions associated with the entry (not the descriptors). */
1986
1987 struct arm_exidx_entry
1988 {
1989 CORE_ADDR addr;
1990 gdb_byte *entry;
1991
1992 bool operator< (const arm_exidx_entry &other) const
1993 {
1994 return addr < other.addr;
1995 }
1996 };
1997
1998 struct arm_exidx_data
1999 {
2000 std::vector<std::vector<arm_exidx_entry>> section_maps;
2001 };
2002
2003 /* Per-BFD key to store exception handling information. */
2004 static const struct bfd_key<arm_exidx_data> arm_exidx_data_key;
2005
2006 static struct obj_section *
2007 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2008 {
2009 struct obj_section *osect;
2010
2011 ALL_OBJFILE_OSECTIONS (objfile, osect)
2012 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2013 {
2014 bfd_vma start, size;
2015 start = bfd_section_vma (osect->the_bfd_section);
2016 size = bfd_section_size (osect->the_bfd_section);
2017
2018 if (start <= vma && vma < start + size)
2019 return osect;
2020 }
2021
2022 return NULL;
2023 }
2024
2025 /* Parse contents of exception table and exception index sections
2026 of OBJFILE, and fill in the exception table entry cache.
2027
2028 For each entry that refers to a standard ARM-defined personality
2029 routine, extract the frame unwinding instructions (from either
2030 the index or the table section). The unwinding instructions
2031 are normalized by:
2032 - extracting them from the rest of the table data
2033 - converting to host endianness
2034 - appending the implicit 0xb0 ("Finish") code
2035
2036 The extracted and normalized instructions are stored for later
2037 retrieval by the arm_find_exidx_entry routine. */
2038
2039 static void
2040 arm_exidx_new_objfile (struct objfile *objfile)
2041 {
2042 struct arm_exidx_data *data;
2043 asection *exidx, *extab;
2044 bfd_vma exidx_vma = 0, extab_vma = 0;
2045 LONGEST i;
2046
2047 /* If we've already touched this file, do nothing. */
2048 if (!objfile || arm_exidx_data_key.get (objfile->obfd) != NULL)
2049 return;
2050
2051 /* Read contents of exception table and index. */
2052 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2053 gdb::byte_vector exidx_data;
2054 if (exidx)
2055 {
2056 exidx_vma = bfd_section_vma (exidx);
2057 exidx_data.resize (bfd_section_size (exidx));
2058
2059 if (!bfd_get_section_contents (objfile->obfd, exidx,
2060 exidx_data.data (), 0,
2061 exidx_data.size ()))
2062 return;
2063 }
2064
2065 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2066 gdb::byte_vector extab_data;
2067 if (extab)
2068 {
2069 extab_vma = bfd_section_vma (extab);
2070 extab_data.resize (bfd_section_size (extab));
2071
2072 if (!bfd_get_section_contents (objfile->obfd, extab,
2073 extab_data.data (), 0,
2074 extab_data.size ()))
2075 return;
2076 }
2077
2078 /* Allocate exception table data structure. */
2079 data = arm_exidx_data_key.emplace (objfile->obfd);
2080 data->section_maps.resize (objfile->obfd->section_count);
2081
2082 /* Fill in exception table. */
2083 for (i = 0; i < exidx_data.size () / 8; i++)
2084 {
2085 struct arm_exidx_entry new_exidx_entry;
2086 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2087 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2088 exidx_data.data () + i * 8 + 4);
2089 bfd_vma addr = 0, word = 0;
2090 int n_bytes = 0, n_words = 0;
2091 struct obj_section *sec;
2092 gdb_byte *entry = NULL;
2093
2094 /* Extract address of start of function. */
2095 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2096 idx += exidx_vma + i * 8;
2097
2098 /* Find section containing function and compute section offset. */
2099 sec = arm_obj_section_from_vma (objfile, idx);
2100 if (sec == NULL)
2101 continue;
2102 idx -= bfd_section_vma (sec->the_bfd_section);
2103
2104 /* Determine address of exception table entry. */
2105 if (val == 1)
2106 {
2107 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2108 }
2109 else if ((val & 0xff000000) == 0x80000000)
2110 {
2111 /* Exception table entry embedded in .ARM.exidx
2112 -- must be short form. */
2113 word = val;
2114 n_bytes = 3;
2115 }
2116 else if (!(val & 0x80000000))
2117 {
2118 /* Exception table entry in .ARM.extab. */
2119 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2120 addr += exidx_vma + i * 8 + 4;
2121
2122 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2123 {
2124 word = bfd_h_get_32 (objfile->obfd,
2125 extab_data.data () + addr - extab_vma);
2126 addr += 4;
2127
2128 if ((word & 0xff000000) == 0x80000000)
2129 {
2130 /* Short form. */
2131 n_bytes = 3;
2132 }
2133 else if ((word & 0xff000000) == 0x81000000
2134 || (word & 0xff000000) == 0x82000000)
2135 {
2136 /* Long form. */
2137 n_bytes = 2;
2138 n_words = ((word >> 16) & 0xff);
2139 }
2140 else if (!(word & 0x80000000))
2141 {
2142 bfd_vma pers;
2143 struct obj_section *pers_sec;
2144 int gnu_personality = 0;
2145
2146 /* Custom personality routine. */
2147 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2148 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2149
2150 /* Check whether we've got one of the variants of the
2151 GNU personality routines. */
2152 pers_sec = arm_obj_section_from_vma (objfile, pers);
2153 if (pers_sec)
2154 {
2155 static const char *personality[] =
2156 {
2157 "__gcc_personality_v0",
2158 "__gxx_personality_v0",
2159 "__gcj_personality_v0",
2160 "__gnu_objc_personality_v0",
2161 NULL
2162 };
2163
2164 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2165 int k;
2166
2167 for (k = 0; personality[k]; k++)
2168 if (lookup_minimal_symbol_by_pc_name
2169 (pc, personality[k], objfile))
2170 {
2171 gnu_personality = 1;
2172 break;
2173 }
2174 }
2175
2176 /* If so, the next word contains a word count in the high
2177 byte, followed by the same unwind instructions as the
2178 pre-defined forms. */
2179 if (gnu_personality
2180 && addr + 4 <= extab_vma + extab_data.size ())
2181 {
2182 word = bfd_h_get_32 (objfile->obfd,
2183 (extab_data.data ()
2184 + addr - extab_vma));
2185 addr += 4;
2186 n_bytes = 3;
2187 n_words = ((word >> 24) & 0xff);
2188 }
2189 }
2190 }
2191 }
2192
2193 /* Sanity check address. */
2194 if (n_words)
2195 if (addr < extab_vma
2196 || addr + 4 * n_words > extab_vma + extab_data.size ())
2197 n_words = n_bytes = 0;
2198
2199 /* The unwind instructions reside in WORD (only the N_BYTES least
2200 significant bytes are valid), followed by N_WORDS words in the
2201 extab section starting at ADDR. */
2202 if (n_bytes || n_words)
2203 {
2204 gdb_byte *p = entry
2205 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2206 n_bytes + n_words * 4 + 1);
2207
2208 while (n_bytes--)
2209 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2210
2211 while (n_words--)
2212 {
2213 word = bfd_h_get_32 (objfile->obfd,
2214 extab_data.data () + addr - extab_vma);
2215 addr += 4;
2216
2217 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2218 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2219 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2220 *p++ = (gdb_byte) (word & 0xff);
2221 }
2222
2223 /* Implied "Finish" to terminate the list. */
2224 *p++ = 0xb0;
2225 }
2226
2227 /* Push entry onto vector. They are guaranteed to always
2228 appear in order of increasing addresses. */
2229 new_exidx_entry.addr = idx;
2230 new_exidx_entry.entry = entry;
2231 data->section_maps[sec->the_bfd_section->index].push_back
2232 (new_exidx_entry);
2233 }
2234 }
2235
2236 /* Search for the exception table entry covering MEMADDR. If one is found,
2237 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2238 set *START to the start of the region covered by this entry. */
2239
2240 static gdb_byte *
2241 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2242 {
2243 struct obj_section *sec;
2244
2245 sec = find_pc_section (memaddr);
2246 if (sec != NULL)
2247 {
2248 struct arm_exidx_data *data;
2249 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2250
2251 data = arm_exidx_data_key.get (sec->objfile->obfd);
2252 if (data != NULL)
2253 {
2254 std::vector<arm_exidx_entry> &map
2255 = data->section_maps[sec->the_bfd_section->index];
2256 if (!map.empty ())
2257 {
2258 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2259
2260 /* std::lower_bound finds the earliest ordered insertion
2261 point. If the following symbol starts at this exact
2262 address, we use that; otherwise, the preceding
2263 exception table entry covers this address. */
2264 if (idx < map.end ())
2265 {
2266 if (idx->addr == map_key.addr)
2267 {
2268 if (start)
2269 *start = idx->addr + obj_section_addr (sec);
2270 return idx->entry;
2271 }
2272 }
2273
2274 if (idx > map.begin ())
2275 {
2276 idx = idx - 1;
2277 if (start)
2278 *start = idx->addr + obj_section_addr (sec);
2279 return idx->entry;
2280 }
2281 }
2282 }
2283 }
2284
2285 return NULL;
2286 }
2287
2288 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2289 instruction list from the ARM exception table entry ENTRY, allocate and
2290 return a prologue cache structure describing how to unwind this frame.
2291
2292 Return NULL if the unwinding instruction list contains a "spare",
2293 "reserved" or "refuse to unwind" instruction as defined in section
2294 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2295 for the ARM Architecture" document. */
2296
2297 static struct arm_prologue_cache *
2298 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2299 {
2300 CORE_ADDR vsp = 0;
2301 int vsp_valid = 0;
2302
2303 struct arm_prologue_cache *cache;
2304 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2305 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2306
2307 for (;;)
2308 {
2309 gdb_byte insn;
2310
2311 /* Whenever we reload SP, we actually have to retrieve its
2312 actual value in the current frame. */
2313 if (!vsp_valid)
2314 {
2315 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2316 {
2317 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2318 vsp = get_frame_register_unsigned (this_frame, reg);
2319 }
2320 else
2321 {
2322 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2323 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2324 }
2325
2326 vsp_valid = 1;
2327 }
2328
2329 /* Decode next unwind instruction. */
2330 insn = *entry++;
2331
2332 if ((insn & 0xc0) == 0)
2333 {
2334 int offset = insn & 0x3f;
2335 vsp += (offset << 2) + 4;
2336 }
2337 else if ((insn & 0xc0) == 0x40)
2338 {
2339 int offset = insn & 0x3f;
2340 vsp -= (offset << 2) + 4;
2341 }
2342 else if ((insn & 0xf0) == 0x80)
2343 {
2344 int mask = ((insn & 0xf) << 8) | *entry++;
2345 int i;
2346
2347 /* The special case of an all-zero mask identifies
2348 "Refuse to unwind". We return NULL to fall back
2349 to the prologue analyzer. */
2350 if (mask == 0)
2351 return NULL;
2352
2353 /* Pop registers r4..r15 under mask. */
2354 for (i = 0; i < 12; i++)
2355 if (mask & (1 << i))
2356 {
2357 cache->saved_regs[4 + i].addr = vsp;
2358 vsp += 4;
2359 }
2360
2361 /* Special-case popping SP -- we need to reload vsp. */
2362 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2363 vsp_valid = 0;
2364 }
2365 else if ((insn & 0xf0) == 0x90)
2366 {
2367 int reg = insn & 0xf;
2368
2369 /* Reserved cases. */
2370 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2371 return NULL;
2372
2373 /* Set SP from another register and mark VSP for reload. */
2374 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2375 vsp_valid = 0;
2376 }
2377 else if ((insn & 0xf0) == 0xa0)
2378 {
2379 int count = insn & 0x7;
2380 int pop_lr = (insn & 0x8) != 0;
2381 int i;
2382
2383 /* Pop r4..r[4+count]. */
2384 for (i = 0; i <= count; i++)
2385 {
2386 cache->saved_regs[4 + i].addr = vsp;
2387 vsp += 4;
2388 }
2389
2390 /* If indicated by flag, pop LR as well. */
2391 if (pop_lr)
2392 {
2393 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2394 vsp += 4;
2395 }
2396 }
2397 else if (insn == 0xb0)
2398 {
2399 /* We could only have updated PC by popping into it; if so, it
2400 will show up as address. Otherwise, copy LR into PC. */
2401 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2402 cache->saved_regs[ARM_PC_REGNUM]
2403 = cache->saved_regs[ARM_LR_REGNUM];
2404
2405 /* We're done. */
2406 break;
2407 }
2408 else if (insn == 0xb1)
2409 {
2410 int mask = *entry++;
2411 int i;
2412
2413 /* All-zero mask and mask >= 16 is "spare". */
2414 if (mask == 0 || mask >= 16)
2415 return NULL;
2416
2417 /* Pop r0..r3 under mask. */
2418 for (i = 0; i < 4; i++)
2419 if (mask & (1 << i))
2420 {
2421 cache->saved_regs[i].addr = vsp;
2422 vsp += 4;
2423 }
2424 }
2425 else if (insn == 0xb2)
2426 {
2427 ULONGEST offset = 0;
2428 unsigned shift = 0;
2429
2430 do
2431 {
2432 offset |= (*entry & 0x7f) << shift;
2433 shift += 7;
2434 }
2435 while (*entry++ & 0x80);
2436
2437 vsp += 0x204 + (offset << 2);
2438 }
2439 else if (insn == 0xb3)
2440 {
2441 int start = *entry >> 4;
2442 int count = (*entry++) & 0xf;
2443 int i;
2444
2445 /* Only registers D0..D15 are valid here. */
2446 if (start + count >= 16)
2447 return NULL;
2448
2449 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2450 for (i = 0; i <= count; i++)
2451 {
2452 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2453 vsp += 8;
2454 }
2455
2456 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2457 vsp += 4;
2458 }
2459 else if ((insn & 0xf8) == 0xb8)
2460 {
2461 int count = insn & 0x7;
2462 int i;
2463
2464 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2465 for (i = 0; i <= count; i++)
2466 {
2467 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2468 vsp += 8;
2469 }
2470
2471 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2472 vsp += 4;
2473 }
2474 else if (insn == 0xc6)
2475 {
2476 int start = *entry >> 4;
2477 int count = (*entry++) & 0xf;
2478 int i;
2479
2480 /* Only registers WR0..WR15 are valid. */
2481 if (start + count >= 16)
2482 return NULL;
2483
2484 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2485 for (i = 0; i <= count; i++)
2486 {
2487 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2488 vsp += 8;
2489 }
2490 }
2491 else if (insn == 0xc7)
2492 {
2493 int mask = *entry++;
2494 int i;
2495
2496 /* All-zero mask and mask >= 16 is "spare". */
2497 if (mask == 0 || mask >= 16)
2498 return NULL;
2499
2500 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2501 for (i = 0; i < 4; i++)
2502 if (mask & (1 << i))
2503 {
2504 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2505 vsp += 4;
2506 }
2507 }
2508 else if ((insn & 0xf8) == 0xc0)
2509 {
2510 int count = insn & 0x7;
2511 int i;
2512
2513 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2514 for (i = 0; i <= count; i++)
2515 {
2516 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2517 vsp += 8;
2518 }
2519 }
2520 else if (insn == 0xc8)
2521 {
2522 int start = *entry >> 4;
2523 int count = (*entry++) & 0xf;
2524 int i;
2525
2526 /* Only registers D0..D31 are valid. */
2527 if (start + count >= 16)
2528 return NULL;
2529
2530 /* Pop VFP double-precision registers
2531 D[16+start]..D[16+start+count]. */
2532 for (i = 0; i <= count; i++)
2533 {
2534 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2535 vsp += 8;
2536 }
2537 }
2538 else if (insn == 0xc9)
2539 {
2540 int start = *entry >> 4;
2541 int count = (*entry++) & 0xf;
2542 int i;
2543
2544 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2545 for (i = 0; i <= count; i++)
2546 {
2547 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2548 vsp += 8;
2549 }
2550 }
2551 else if ((insn & 0xf8) == 0xd0)
2552 {
2553 int count = insn & 0x7;
2554 int i;
2555
2556 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2557 for (i = 0; i <= count; i++)
2558 {
2559 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2560 vsp += 8;
2561 }
2562 }
2563 else
2564 {
2565 /* Everything else is "spare". */
2566 return NULL;
2567 }
2568 }
2569
2570 /* If we restore SP from a register, assume this was the frame register.
2571 Otherwise just fall back to SP as frame register. */
2572 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2573 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2574 else
2575 cache->framereg = ARM_SP_REGNUM;
2576
2577 /* Determine offset to previous frame. */
2578 cache->framesize
2579 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2580
2581 /* We already got the previous SP. */
2582 cache->prev_sp = vsp;
2583
2584 return cache;
2585 }
2586
2587 /* Unwinding via ARM exception table entries. Note that the sniffer
2588 already computes a filled-in prologue cache, which is then used
2589 with the same arm_prologue_this_id and arm_prologue_prev_register
2590 routines also used for prologue-parsing based unwinding. */
2591
2592 static int
2593 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2594 struct frame_info *this_frame,
2595 void **this_prologue_cache)
2596 {
2597 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2598 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2599 CORE_ADDR addr_in_block, exidx_region, func_start;
2600 struct arm_prologue_cache *cache;
2601 gdb_byte *entry;
2602
2603 /* See if we have an ARM exception table entry covering this address. */
2604 addr_in_block = get_frame_address_in_block (this_frame);
2605 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2606 if (!entry)
2607 return 0;
2608
2609 /* The ARM exception table does not describe unwind information
2610 for arbitrary PC values, but is guaranteed to be correct only
2611 at call sites. We have to decide here whether we want to use
2612 ARM exception table information for this frame, or fall back
2613 to using prologue parsing. (Note that if we have DWARF CFI,
2614 this sniffer isn't even called -- CFI is always preferred.)
2615
2616 Before we make this decision, however, we check whether we
2617 actually have *symbol* information for the current frame.
2618 If not, prologue parsing would not work anyway, so we might
2619 as well use the exception table and hope for the best. */
2620 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2621 {
2622 int exc_valid = 0;
2623
2624 /* If the next frame is "normal", we are at a call site in this
2625 frame, so exception information is guaranteed to be valid. */
2626 if (get_next_frame (this_frame)
2627 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2628 exc_valid = 1;
2629
2630 /* We also assume exception information is valid if we're currently
2631 blocked in a system call. The system library is supposed to
2632 ensure this, so that e.g. pthread cancellation works. */
2633 if (arm_frame_is_thumb (this_frame))
2634 {
2635 ULONGEST insn;
2636
2637 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2638 2, byte_order_for_code, &insn)
2639 && (insn & 0xff00) == 0xdf00 /* svc */)
2640 exc_valid = 1;
2641 }
2642 else
2643 {
2644 ULONGEST insn;
2645
2646 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2647 4, byte_order_for_code, &insn)
2648 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2649 exc_valid = 1;
2650 }
2651
2652 /* Bail out if we don't know that exception information is valid. */
2653 if (!exc_valid)
2654 return 0;
2655
2656 /* The ARM exception index does not mark the *end* of the region
2657 covered by the entry, and some functions will not have any entry.
2658 To correctly recognize the end of the covered region, the linker
2659 should have inserted dummy records with a CANTUNWIND marker.
2660
2661 Unfortunately, current versions of GNU ld do not reliably do
2662 this, and thus we may have found an incorrect entry above.
2663 As a (temporary) sanity check, we only use the entry if it
2664 lies *within* the bounds of the function. Note that this check
2665 might reject perfectly valid entries that just happen to cover
2666 multiple functions; therefore this check ought to be removed
2667 once the linker is fixed. */
2668 if (func_start > exidx_region)
2669 return 0;
2670 }
2671
2672 /* Decode the list of unwinding instructions into a prologue cache.
2673 Note that this may fail due to e.g. a "refuse to unwind" code. */
2674 cache = arm_exidx_fill_cache (this_frame, entry);
2675 if (!cache)
2676 return 0;
2677
2678 *this_prologue_cache = cache;
2679 return 1;
2680 }
2681
2682 struct frame_unwind arm_exidx_unwind = {
2683 NORMAL_FRAME,
2684 default_frame_unwind_stop_reason,
2685 arm_prologue_this_id,
2686 arm_prologue_prev_register,
2687 NULL,
2688 arm_exidx_unwind_sniffer
2689 };
2690
2691 static struct arm_prologue_cache *
2692 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2693 {
2694 struct arm_prologue_cache *cache;
2695 int reg;
2696
2697 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2698 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2699
2700 /* Still rely on the offset calculated from prologue. */
2701 arm_scan_prologue (this_frame, cache);
2702
2703 /* Since we are in epilogue, the SP has been restored. */
2704 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2705
2706 /* Calculate actual addresses of saved registers using offsets
2707 determined by arm_scan_prologue. */
2708 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2709 if (trad_frame_addr_p (cache->saved_regs, reg))
2710 cache->saved_regs[reg].addr += cache->prev_sp;
2711
2712 return cache;
2713 }
2714
2715 /* Implementation of function hook 'this_id' in
2716 'struct frame_uwnind' for epilogue unwinder. */
2717
2718 static void
2719 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2720 void **this_cache,
2721 struct frame_id *this_id)
2722 {
2723 struct arm_prologue_cache *cache;
2724 CORE_ADDR pc, func;
2725
2726 if (*this_cache == NULL)
2727 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2728 cache = (struct arm_prologue_cache *) *this_cache;
2729
2730 /* Use function start address as part of the frame ID. If we cannot
2731 identify the start address (due to missing symbol information),
2732 fall back to just using the current PC. */
2733 pc = get_frame_pc (this_frame);
2734 func = get_frame_func (this_frame);
2735 if (func == 0)
2736 func = pc;
2737
2738 (*this_id) = frame_id_build (cache->prev_sp, pc);
2739 }
2740
2741 /* Implementation of function hook 'prev_register' in
2742 'struct frame_uwnind' for epilogue unwinder. */
2743
2744 static struct value *
2745 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2746 void **this_cache, int regnum)
2747 {
2748 if (*this_cache == NULL)
2749 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2750
2751 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2752 }
2753
2754 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2755 CORE_ADDR pc);
2756 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2757 CORE_ADDR pc);
2758
2759 /* Implementation of function hook 'sniffer' in
2760 'struct frame_uwnind' for epilogue unwinder. */
2761
2762 static int
2763 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2764 struct frame_info *this_frame,
2765 void **this_prologue_cache)
2766 {
2767 if (frame_relative_level (this_frame) == 0)
2768 {
2769 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2770 CORE_ADDR pc = get_frame_pc (this_frame);
2771
2772 if (arm_frame_is_thumb (this_frame))
2773 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2774 else
2775 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2776 }
2777 else
2778 return 0;
2779 }
2780
2781 /* Frame unwinder from epilogue. */
2782
2783 static const struct frame_unwind arm_epilogue_frame_unwind =
2784 {
2785 NORMAL_FRAME,
2786 default_frame_unwind_stop_reason,
2787 arm_epilogue_frame_this_id,
2788 arm_epilogue_frame_prev_register,
2789 NULL,
2790 arm_epilogue_frame_sniffer,
2791 };
2792
2793 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2794 trampoline, return the target PC. Otherwise return 0.
2795
2796 void call0a (char c, short s, int i, long l) {}
2797
2798 int main (void)
2799 {
2800 (*pointer_to_call0a) (c, s, i, l);
2801 }
2802
2803 Instead of calling a stub library function _call_via_xx (xx is
2804 the register name), GCC may inline the trampoline in the object
2805 file as below (register r2 has the address of call0a).
2806
2807 .global main
2808 .type main, %function
2809 ...
2810 bl .L1
2811 ...
2812 .size main, .-main
2813
2814 .L1:
2815 bx r2
2816
2817 The trampoline 'bx r2' doesn't belong to main. */
2818
2819 static CORE_ADDR
2820 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2821 {
2822 /* The heuristics of recognizing such trampoline is that FRAME is
2823 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2824 if (arm_frame_is_thumb (frame))
2825 {
2826 gdb_byte buf[2];
2827
2828 if (target_read_memory (pc, buf, 2) == 0)
2829 {
2830 struct gdbarch *gdbarch = get_frame_arch (frame);
2831 enum bfd_endian byte_order_for_code
2832 = gdbarch_byte_order_for_code (gdbarch);
2833 uint16_t insn
2834 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2835
2836 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2837 {
2838 CORE_ADDR dest
2839 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2840
2841 /* Clear the LSB so that gdb core sets step-resume
2842 breakpoint at the right address. */
2843 return UNMAKE_THUMB_ADDR (dest);
2844 }
2845 }
2846 }
2847
2848 return 0;
2849 }
2850
2851 static struct arm_prologue_cache *
2852 arm_make_stub_cache (struct frame_info *this_frame)
2853 {
2854 struct arm_prologue_cache *cache;
2855
2856 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2857 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2858
2859 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2860
2861 return cache;
2862 }
2863
2864 /* Our frame ID for a stub frame is the current SP and LR. */
2865
2866 static void
2867 arm_stub_this_id (struct frame_info *this_frame,
2868 void **this_cache,
2869 struct frame_id *this_id)
2870 {
2871 struct arm_prologue_cache *cache;
2872
2873 if (*this_cache == NULL)
2874 *this_cache = arm_make_stub_cache (this_frame);
2875 cache = (struct arm_prologue_cache *) *this_cache;
2876
2877 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2878 }
2879
2880 static int
2881 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2882 struct frame_info *this_frame,
2883 void **this_prologue_cache)
2884 {
2885 CORE_ADDR addr_in_block;
2886 gdb_byte dummy[4];
2887 CORE_ADDR pc, start_addr;
2888 const char *name;
2889
2890 addr_in_block = get_frame_address_in_block (this_frame);
2891 pc = get_frame_pc (this_frame);
2892 if (in_plt_section (addr_in_block)
2893 /* We also use the stub winder if the target memory is unreadable
2894 to avoid having the prologue unwinder trying to read it. */
2895 || target_read_memory (pc, dummy, 4) != 0)
2896 return 1;
2897
2898 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2899 && arm_skip_bx_reg (this_frame, pc) != 0)
2900 return 1;
2901
2902 return 0;
2903 }
2904
2905 struct frame_unwind arm_stub_unwind = {
2906 NORMAL_FRAME,
2907 default_frame_unwind_stop_reason,
2908 arm_stub_this_id,
2909 arm_prologue_prev_register,
2910 NULL,
2911 arm_stub_unwind_sniffer
2912 };
2913
2914 /* Put here the code to store, into CACHE->saved_regs, the addresses
2915 of the saved registers of frame described by THIS_FRAME. CACHE is
2916 returned. */
2917
2918 static struct arm_prologue_cache *
2919 arm_m_exception_cache (struct frame_info *this_frame)
2920 {
2921 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2922 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2923 struct arm_prologue_cache *cache;
2924 CORE_ADDR unwound_sp;
2925 LONGEST xpsr;
2926
2927 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2928 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2929
2930 unwound_sp = get_frame_register_unsigned (this_frame,
2931 ARM_SP_REGNUM);
2932
2933 /* The hardware saves eight 32-bit words, comprising xPSR,
2934 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2935 "B1.5.6 Exception entry behavior" in
2936 "ARMv7-M Architecture Reference Manual". */
2937 cache->saved_regs[0].addr = unwound_sp;
2938 cache->saved_regs[1].addr = unwound_sp + 4;
2939 cache->saved_regs[2].addr = unwound_sp + 8;
2940 cache->saved_regs[3].addr = unwound_sp + 12;
2941 cache->saved_regs[12].addr = unwound_sp + 16;
2942 cache->saved_regs[14].addr = unwound_sp + 20;
2943 cache->saved_regs[15].addr = unwound_sp + 24;
2944 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2945
2946 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2947 aligner between the top of the 32-byte stack frame and the
2948 previous context's stack pointer. */
2949 cache->prev_sp = unwound_sp + 32;
2950 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2951 && (xpsr & (1 << 9)) != 0)
2952 cache->prev_sp += 4;
2953
2954 return cache;
2955 }
2956
2957 /* Implementation of function hook 'this_id' in
2958 'struct frame_uwnind'. */
2959
2960 static void
2961 arm_m_exception_this_id (struct frame_info *this_frame,
2962 void **this_cache,
2963 struct frame_id *this_id)
2964 {
2965 struct arm_prologue_cache *cache;
2966
2967 if (*this_cache == NULL)
2968 *this_cache = arm_m_exception_cache (this_frame);
2969 cache = (struct arm_prologue_cache *) *this_cache;
2970
2971 /* Our frame ID for a stub frame is the current SP and LR. */
2972 *this_id = frame_id_build (cache->prev_sp,
2973 get_frame_pc (this_frame));
2974 }
2975
2976 /* Implementation of function hook 'prev_register' in
2977 'struct frame_uwnind'. */
2978
2979 static struct value *
2980 arm_m_exception_prev_register (struct frame_info *this_frame,
2981 void **this_cache,
2982 int prev_regnum)
2983 {
2984 struct arm_prologue_cache *cache;
2985
2986 if (*this_cache == NULL)
2987 *this_cache = arm_m_exception_cache (this_frame);
2988 cache = (struct arm_prologue_cache *) *this_cache;
2989
2990 /* The value was already reconstructed into PREV_SP. */
2991 if (prev_regnum == ARM_SP_REGNUM)
2992 return frame_unwind_got_constant (this_frame, prev_regnum,
2993 cache->prev_sp);
2994
2995 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2996 prev_regnum);
2997 }
2998
2999 /* Implementation of function hook 'sniffer' in
3000 'struct frame_uwnind'. */
3001
3002 static int
3003 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3004 struct frame_info *this_frame,
3005 void **this_prologue_cache)
3006 {
3007 CORE_ADDR this_pc = get_frame_pc (this_frame);
3008
3009 /* No need to check is_m; this sniffer is only registered for
3010 M-profile architectures. */
3011
3012 /* Check if exception frame returns to a magic PC value. */
3013 return arm_m_addr_is_magic (this_pc);
3014 }
3015
3016 /* Frame unwinder for M-profile exceptions. */
3017
3018 struct frame_unwind arm_m_exception_unwind =
3019 {
3020 SIGTRAMP_FRAME,
3021 default_frame_unwind_stop_reason,
3022 arm_m_exception_this_id,
3023 arm_m_exception_prev_register,
3024 NULL,
3025 arm_m_exception_unwind_sniffer
3026 };
3027
3028 static CORE_ADDR
3029 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3030 {
3031 struct arm_prologue_cache *cache;
3032
3033 if (*this_cache == NULL)
3034 *this_cache = arm_make_prologue_cache (this_frame);
3035 cache = (struct arm_prologue_cache *) *this_cache;
3036
3037 return cache->prev_sp - cache->framesize;
3038 }
3039
3040 struct frame_base arm_normal_base = {
3041 &arm_prologue_unwind,
3042 arm_normal_frame_base,
3043 arm_normal_frame_base,
3044 arm_normal_frame_base
3045 };
3046
3047 static struct value *
3048 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3049 int regnum)
3050 {
3051 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3052 CORE_ADDR lr, cpsr;
3053 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3054
3055 switch (regnum)
3056 {
3057 case ARM_PC_REGNUM:
3058 /* The PC is normally copied from the return column, which
3059 describes saves of LR. However, that version may have an
3060 extra bit set to indicate Thumb state. The bit is not
3061 part of the PC. */
3062 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3063 return frame_unwind_got_constant (this_frame, regnum,
3064 arm_addr_bits_remove (gdbarch, lr));
3065
3066 case ARM_PS_REGNUM:
3067 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3068 cpsr = get_frame_register_unsigned (this_frame, regnum);
3069 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3070 if (IS_THUMB_ADDR (lr))
3071 cpsr |= t_bit;
3072 else
3073 cpsr &= ~t_bit;
3074 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3075
3076 default:
3077 internal_error (__FILE__, __LINE__,
3078 _("Unexpected register %d"), regnum);
3079 }
3080 }
3081
3082 static void
3083 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3084 struct dwarf2_frame_state_reg *reg,
3085 struct frame_info *this_frame)
3086 {
3087 switch (regnum)
3088 {
3089 case ARM_PC_REGNUM:
3090 case ARM_PS_REGNUM:
3091 reg->how = DWARF2_FRAME_REG_FN;
3092 reg->loc.fn = arm_dwarf2_prev_register;
3093 break;
3094 case ARM_SP_REGNUM:
3095 reg->how = DWARF2_FRAME_REG_CFA;
3096 break;
3097 }
3098 }
3099
3100 /* Implement the stack_frame_destroyed_p gdbarch method. */
3101
3102 static int
3103 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3104 {
3105 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3106 unsigned int insn, insn2;
3107 int found_return = 0, found_stack_adjust = 0;
3108 CORE_ADDR func_start, func_end;
3109 CORE_ADDR scan_pc;
3110 gdb_byte buf[4];
3111
3112 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3113 return 0;
3114
3115 /* The epilogue is a sequence of instructions along the following lines:
3116
3117 - add stack frame size to SP or FP
3118 - [if frame pointer used] restore SP from FP
3119 - restore registers from SP [may include PC]
3120 - a return-type instruction [if PC wasn't already restored]
3121
3122 In a first pass, we scan forward from the current PC and verify the
3123 instructions we find as compatible with this sequence, ending in a
3124 return instruction.
3125
3126 However, this is not sufficient to distinguish indirect function calls
3127 within a function from indirect tail calls in the epilogue in some cases.
3128 Therefore, if we didn't already find any SP-changing instruction during
3129 forward scan, we add a backward scanning heuristic to ensure we actually
3130 are in the epilogue. */
3131
3132 scan_pc = pc;
3133 while (scan_pc < func_end && !found_return)
3134 {
3135 if (target_read_memory (scan_pc, buf, 2))
3136 break;
3137
3138 scan_pc += 2;
3139 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3140
3141 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3142 found_return = 1;
3143 else if (insn == 0x46f7) /* mov pc, lr */
3144 found_return = 1;
3145 else if (thumb_instruction_restores_sp (insn))
3146 {
3147 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3148 found_return = 1;
3149 }
3150 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3151 {
3152 if (target_read_memory (scan_pc, buf, 2))
3153 break;
3154
3155 scan_pc += 2;
3156 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3157
3158 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3159 {
3160 if (insn2 & 0x8000) /* <registers> include PC. */
3161 found_return = 1;
3162 }
3163 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3164 && (insn2 & 0x0fff) == 0x0b04)
3165 {
3166 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3167 found_return = 1;
3168 }
3169 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3170 && (insn2 & 0x0e00) == 0x0a00)
3171 ;
3172 else
3173 break;
3174 }
3175 else
3176 break;
3177 }
3178
3179 if (!found_return)
3180 return 0;
3181
3182 /* Since any instruction in the epilogue sequence, with the possible
3183 exception of return itself, updates the stack pointer, we need to
3184 scan backwards for at most one instruction. Try either a 16-bit or
3185 a 32-bit instruction. This is just a heuristic, so we do not worry
3186 too much about false positives. */
3187
3188 if (pc - 4 < func_start)
3189 return 0;
3190 if (target_read_memory (pc - 4, buf, 4))
3191 return 0;
3192
3193 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3194 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3195
3196 if (thumb_instruction_restores_sp (insn2))
3197 found_stack_adjust = 1;
3198 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3199 found_stack_adjust = 1;
3200 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3201 && (insn2 & 0x0fff) == 0x0b04)
3202 found_stack_adjust = 1;
3203 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3204 && (insn2 & 0x0e00) == 0x0a00)
3205 found_stack_adjust = 1;
3206
3207 return found_stack_adjust;
3208 }
3209
3210 static int
3211 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3212 {
3213 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3214 unsigned int insn;
3215 int found_return;
3216 CORE_ADDR func_start, func_end;
3217
3218 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3219 return 0;
3220
3221 /* We are in the epilogue if the previous instruction was a stack
3222 adjustment and the next instruction is a possible return (bx, mov
3223 pc, or pop). We could have to scan backwards to find the stack
3224 adjustment, or forwards to find the return, but this is a decent
3225 approximation. First scan forwards. */
3226
3227 found_return = 0;
3228 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3229 if (bits (insn, 28, 31) != INST_NV)
3230 {
3231 if ((insn & 0x0ffffff0) == 0x012fff10)
3232 /* BX. */
3233 found_return = 1;
3234 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3235 /* MOV PC. */
3236 found_return = 1;
3237 else if ((insn & 0x0fff0000) == 0x08bd0000
3238 && (insn & 0x0000c000) != 0)
3239 /* POP (LDMIA), including PC or LR. */
3240 found_return = 1;
3241 }
3242
3243 if (!found_return)
3244 return 0;
3245
3246 /* Scan backwards. This is just a heuristic, so do not worry about
3247 false positives from mode changes. */
3248
3249 if (pc < func_start + 4)
3250 return 0;
3251
3252 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3253 if (arm_instruction_restores_sp (insn))
3254 return 1;
3255
3256 return 0;
3257 }
3258
3259 /* Implement the stack_frame_destroyed_p gdbarch method. */
3260
3261 static int
3262 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3263 {
3264 if (arm_pc_is_thumb (gdbarch, pc))
3265 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3266 else
3267 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3268 }
3269
3270 /* When arguments must be pushed onto the stack, they go on in reverse
3271 order. The code below implements a FILO (stack) to do this. */
3272
3273 struct stack_item
3274 {
3275 int len;
3276 struct stack_item *prev;
3277 gdb_byte *data;
3278 };
3279
3280 static struct stack_item *
3281 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3282 {
3283 struct stack_item *si;
3284 si = XNEW (struct stack_item);
3285 si->data = (gdb_byte *) xmalloc (len);
3286 si->len = len;
3287 si->prev = prev;
3288 memcpy (si->data, contents, len);
3289 return si;
3290 }
3291
3292 static struct stack_item *
3293 pop_stack_item (struct stack_item *si)
3294 {
3295 struct stack_item *dead = si;
3296 si = si->prev;
3297 xfree (dead->data);
3298 xfree (dead);
3299 return si;
3300 }
3301
3302 /* Implement the gdbarch type alignment method, overrides the generic
3303 alignment algorithm for anything that is arm specific. */
3304
3305 static ULONGEST
3306 arm_type_align (gdbarch *gdbarch, struct type *t)
3307 {
3308 t = check_typedef (t);
3309 if (TYPE_CODE (t) == TYPE_CODE_ARRAY && TYPE_VECTOR (t))
3310 {
3311 /* Use the natural alignment for vector types (the same for
3312 scalar type), but the maximum alignment is 64-bit. */
3313 if (TYPE_LENGTH (t) > 8)
3314 return 8;
3315 else
3316 return TYPE_LENGTH (t);
3317 }
3318
3319 /* Allow the common code to calculate the alignment. */
3320 return 0;
3321 }
3322
3323 /* Possible base types for a candidate for passing and returning in
3324 VFP registers. */
3325
3326 enum arm_vfp_cprc_base_type
3327 {
3328 VFP_CPRC_UNKNOWN,
3329 VFP_CPRC_SINGLE,
3330 VFP_CPRC_DOUBLE,
3331 VFP_CPRC_VEC64,
3332 VFP_CPRC_VEC128
3333 };
3334
3335 /* The length of one element of base type B. */
3336
3337 static unsigned
3338 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3339 {
3340 switch (b)
3341 {
3342 case VFP_CPRC_SINGLE:
3343 return 4;
3344 case VFP_CPRC_DOUBLE:
3345 return 8;
3346 case VFP_CPRC_VEC64:
3347 return 8;
3348 case VFP_CPRC_VEC128:
3349 return 16;
3350 default:
3351 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3352 (int) b);
3353 }
3354 }
3355
3356 /* The character ('s', 'd' or 'q') for the type of VFP register used
3357 for passing base type B. */
3358
3359 static int
3360 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3361 {
3362 switch (b)
3363 {
3364 case VFP_CPRC_SINGLE:
3365 return 's';
3366 case VFP_CPRC_DOUBLE:
3367 return 'd';
3368 case VFP_CPRC_VEC64:
3369 return 'd';
3370 case VFP_CPRC_VEC128:
3371 return 'q';
3372 default:
3373 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3374 (int) b);
3375 }
3376 }
3377
3378 /* Determine whether T may be part of a candidate for passing and
3379 returning in VFP registers, ignoring the limit on the total number
3380 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3381 classification of the first valid component found; if it is not
3382 VFP_CPRC_UNKNOWN, all components must have the same classification
3383 as *BASE_TYPE. If it is found that T contains a type not permitted
3384 for passing and returning in VFP registers, a type differently
3385 classified from *BASE_TYPE, or two types differently classified
3386 from each other, return -1, otherwise return the total number of
3387 base-type elements found (possibly 0 in an empty structure or
3388 array). Vector types are not currently supported, matching the
3389 generic AAPCS support. */
3390
3391 static int
3392 arm_vfp_cprc_sub_candidate (struct type *t,
3393 enum arm_vfp_cprc_base_type *base_type)
3394 {
3395 t = check_typedef (t);
3396 switch (TYPE_CODE (t))
3397 {
3398 case TYPE_CODE_FLT:
3399 switch (TYPE_LENGTH (t))
3400 {
3401 case 4:
3402 if (*base_type == VFP_CPRC_UNKNOWN)
3403 *base_type = VFP_CPRC_SINGLE;
3404 else if (*base_type != VFP_CPRC_SINGLE)
3405 return -1;
3406 return 1;
3407
3408 case 8:
3409 if (*base_type == VFP_CPRC_UNKNOWN)
3410 *base_type = VFP_CPRC_DOUBLE;
3411 else if (*base_type != VFP_CPRC_DOUBLE)
3412 return -1;
3413 return 1;
3414
3415 default:
3416 return -1;
3417 }
3418 break;
3419
3420 case TYPE_CODE_COMPLEX:
3421 /* Arguments of complex T where T is one of the types float or
3422 double get treated as if they are implemented as:
3423
3424 struct complexT
3425 {
3426 T real;
3427 T imag;
3428 };
3429
3430 */
3431 switch (TYPE_LENGTH (t))
3432 {
3433 case 8:
3434 if (*base_type == VFP_CPRC_UNKNOWN)
3435 *base_type = VFP_CPRC_SINGLE;
3436 else if (*base_type != VFP_CPRC_SINGLE)
3437 return -1;
3438 return 2;
3439
3440 case 16:
3441 if (*base_type == VFP_CPRC_UNKNOWN)
3442 *base_type = VFP_CPRC_DOUBLE;
3443 else if (*base_type != VFP_CPRC_DOUBLE)
3444 return -1;
3445 return 2;
3446
3447 default:
3448 return -1;
3449 }
3450 break;
3451
3452 case TYPE_CODE_ARRAY:
3453 {
3454 if (TYPE_VECTOR (t))
3455 {
3456 /* A 64-bit or 128-bit containerized vector type are VFP
3457 CPRCs. */
3458 switch (TYPE_LENGTH (t))
3459 {
3460 case 8:
3461 if (*base_type == VFP_CPRC_UNKNOWN)
3462 *base_type = VFP_CPRC_VEC64;
3463 return 1;
3464 case 16:
3465 if (*base_type == VFP_CPRC_UNKNOWN)
3466 *base_type = VFP_CPRC_VEC128;
3467 return 1;
3468 default:
3469 return -1;
3470 }
3471 }
3472 else
3473 {
3474 int count;
3475 unsigned unitlen;
3476
3477 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3478 base_type);
3479 if (count == -1)
3480 return -1;
3481 if (TYPE_LENGTH (t) == 0)
3482 {
3483 gdb_assert (count == 0);
3484 return 0;
3485 }
3486 else if (count == 0)
3487 return -1;
3488 unitlen = arm_vfp_cprc_unit_length (*base_type);
3489 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3490 return TYPE_LENGTH (t) / unitlen;
3491 }
3492 }
3493 break;
3494
3495 case TYPE_CODE_STRUCT:
3496 {
3497 int count = 0;
3498 unsigned unitlen;
3499 int i;
3500 for (i = 0; i < TYPE_NFIELDS (t); i++)
3501 {
3502 int sub_count = 0;
3503
3504 if (!field_is_static (&TYPE_FIELD (t, i)))
3505 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3506 base_type);
3507 if (sub_count == -1)
3508 return -1;
3509 count += sub_count;
3510 }
3511 if (TYPE_LENGTH (t) == 0)
3512 {
3513 gdb_assert (count == 0);
3514 return 0;
3515 }
3516 else if (count == 0)
3517 return -1;
3518 unitlen = arm_vfp_cprc_unit_length (*base_type);
3519 if (TYPE_LENGTH (t) != unitlen * count)
3520 return -1;
3521 return count;
3522 }
3523
3524 case TYPE_CODE_UNION:
3525 {
3526 int count = 0;
3527 unsigned unitlen;
3528 int i;
3529 for (i = 0; i < TYPE_NFIELDS (t); i++)
3530 {
3531 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3532 base_type);
3533 if (sub_count == -1)
3534 return -1;
3535 count = (count > sub_count ? count : sub_count);
3536 }
3537 if (TYPE_LENGTH (t) == 0)
3538 {
3539 gdb_assert (count == 0);
3540 return 0;
3541 }
3542 else if (count == 0)
3543 return -1;
3544 unitlen = arm_vfp_cprc_unit_length (*base_type);
3545 if (TYPE_LENGTH (t) != unitlen * count)
3546 return -1;
3547 return count;
3548 }
3549
3550 default:
3551 break;
3552 }
3553
3554 return -1;
3555 }
3556
3557 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3558 if passed to or returned from a non-variadic function with the VFP
3559 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3560 *BASE_TYPE to the base type for T and *COUNT to the number of
3561 elements of that base type before returning. */
3562
3563 static int
3564 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3565 int *count)
3566 {
3567 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3568 int c = arm_vfp_cprc_sub_candidate (t, &b);
3569 if (c <= 0 || c > 4)
3570 return 0;
3571 *base_type = b;
3572 *count = c;
3573 return 1;
3574 }
3575
3576 /* Return 1 if the VFP ABI should be used for passing arguments to and
3577 returning values from a function of type FUNC_TYPE, 0
3578 otherwise. */
3579
3580 static int
3581 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3582 {
3583 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3584 /* Variadic functions always use the base ABI. Assume that functions
3585 without debug info are not variadic. */
3586 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3587 return 0;
3588 /* The VFP ABI is only supported as a variant of AAPCS. */
3589 if (tdep->arm_abi != ARM_ABI_AAPCS)
3590 return 0;
3591 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3592 }
3593
3594 /* We currently only support passing parameters in integer registers, which
3595 conforms with GCC's default model, and VFP argument passing following
3596 the VFP variant of AAPCS. Several other variants exist and
3597 we should probably support some of them based on the selected ABI. */
3598
3599 static CORE_ADDR
3600 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3601 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3602 struct value **args, CORE_ADDR sp,
3603 function_call_return_method return_method,
3604 CORE_ADDR struct_addr)
3605 {
3606 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3607 int argnum;
3608 int argreg;
3609 int nstack;
3610 struct stack_item *si = NULL;
3611 int use_vfp_abi;
3612 struct type *ftype;
3613 unsigned vfp_regs_free = (1 << 16) - 1;
3614
3615 /* Determine the type of this function and whether the VFP ABI
3616 applies. */
3617 ftype = check_typedef (value_type (function));
3618 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3619 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3620 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3621
3622 /* Set the return address. For the ARM, the return breakpoint is
3623 always at BP_ADDR. */
3624 if (arm_pc_is_thumb (gdbarch, bp_addr))
3625 bp_addr |= 1;
3626 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3627
3628 /* Walk through the list of args and determine how large a temporary
3629 stack is required. Need to take care here as structs may be
3630 passed on the stack, and we have to push them. */
3631 nstack = 0;
3632
3633 argreg = ARM_A1_REGNUM;
3634 nstack = 0;
3635
3636 /* The struct_return pointer occupies the first parameter
3637 passing register. */
3638 if (return_method == return_method_struct)
3639 {
3640 if (arm_debug)
3641 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3642 gdbarch_register_name (gdbarch, argreg),
3643 paddress (gdbarch, struct_addr));
3644 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3645 argreg++;
3646 }
3647
3648 for (argnum = 0; argnum < nargs; argnum++)
3649 {
3650 int len;
3651 struct type *arg_type;
3652 struct type *target_type;
3653 enum type_code typecode;
3654 const bfd_byte *val;
3655 int align;
3656 enum arm_vfp_cprc_base_type vfp_base_type;
3657 int vfp_base_count;
3658 int may_use_core_reg = 1;
3659
3660 arg_type = check_typedef (value_type (args[argnum]));
3661 len = TYPE_LENGTH (arg_type);
3662 target_type = TYPE_TARGET_TYPE (arg_type);
3663 typecode = TYPE_CODE (arg_type);
3664 val = value_contents (args[argnum]);
3665
3666 align = type_align (arg_type);
3667 /* Round alignment up to a whole number of words. */
3668 align = (align + ARM_INT_REGISTER_SIZE - 1)
3669 & ~(ARM_INT_REGISTER_SIZE - 1);
3670 /* Different ABIs have different maximum alignments. */
3671 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3672 {
3673 /* The APCS ABI only requires word alignment. */
3674 align = ARM_INT_REGISTER_SIZE;
3675 }
3676 else
3677 {
3678 /* The AAPCS requires at most doubleword alignment. */
3679 if (align > ARM_INT_REGISTER_SIZE * 2)
3680 align = ARM_INT_REGISTER_SIZE * 2;
3681 }
3682
3683 if (use_vfp_abi
3684 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3685 &vfp_base_count))
3686 {
3687 int regno;
3688 int unit_length;
3689 int shift;
3690 unsigned mask;
3691
3692 /* Because this is a CPRC it cannot go in a core register or
3693 cause a core register to be skipped for alignment.
3694 Either it goes in VFP registers and the rest of this loop
3695 iteration is skipped for this argument, or it goes on the
3696 stack (and the stack alignment code is correct for this
3697 case). */
3698 may_use_core_reg = 0;
3699
3700 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3701 shift = unit_length / 4;
3702 mask = (1 << (shift * vfp_base_count)) - 1;
3703 for (regno = 0; regno < 16; regno += shift)
3704 if (((vfp_regs_free >> regno) & mask) == mask)
3705 break;
3706
3707 if (regno < 16)
3708 {
3709 int reg_char;
3710 int reg_scaled;
3711 int i;
3712
3713 vfp_regs_free &= ~(mask << regno);
3714 reg_scaled = regno / shift;
3715 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3716 for (i = 0; i < vfp_base_count; i++)
3717 {
3718 char name_buf[4];
3719 int regnum;
3720 if (reg_char == 'q')
3721 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3722 val + i * unit_length);
3723 else
3724 {
3725 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3726 reg_char, reg_scaled + i);
3727 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3728 strlen (name_buf));
3729 regcache->cooked_write (regnum, val + i * unit_length);
3730 }
3731 }
3732 continue;
3733 }
3734 else
3735 {
3736 /* This CPRC could not go in VFP registers, so all VFP
3737 registers are now marked as used. */
3738 vfp_regs_free = 0;
3739 }
3740 }
3741
3742 /* Push stack padding for doubleword alignment. */
3743 if (nstack & (align - 1))
3744 {
3745 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3746 nstack += ARM_INT_REGISTER_SIZE;
3747 }
3748
3749 /* Doubleword aligned quantities must go in even register pairs. */
3750 if (may_use_core_reg
3751 && argreg <= ARM_LAST_ARG_REGNUM
3752 && align > ARM_INT_REGISTER_SIZE
3753 && argreg & 1)
3754 argreg++;
3755
3756 /* If the argument is a pointer to a function, and it is a
3757 Thumb function, create a LOCAL copy of the value and set
3758 the THUMB bit in it. */
3759 if (TYPE_CODE_PTR == typecode
3760 && target_type != NULL
3761 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3762 {
3763 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3764 if (arm_pc_is_thumb (gdbarch, regval))
3765 {
3766 bfd_byte *copy = (bfd_byte *) alloca (len);
3767 store_unsigned_integer (copy, len, byte_order,
3768 MAKE_THUMB_ADDR (regval));
3769 val = copy;
3770 }
3771 }
3772
3773 /* Copy the argument to general registers or the stack in
3774 register-sized pieces. Large arguments are split between
3775 registers and stack. */
3776 while (len > 0)
3777 {
3778 int partial_len = len < ARM_INT_REGISTER_SIZE
3779 ? len : ARM_INT_REGISTER_SIZE;
3780 CORE_ADDR regval
3781 = extract_unsigned_integer (val, partial_len, byte_order);
3782
3783 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3784 {
3785 /* The argument is being passed in a general purpose
3786 register. */
3787 if (byte_order == BFD_ENDIAN_BIG)
3788 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3789 if (arm_debug)
3790 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3791 argnum,
3792 gdbarch_register_name
3793 (gdbarch, argreg),
3794 phex (regval, ARM_INT_REGISTER_SIZE));
3795 regcache_cooked_write_unsigned (regcache, argreg, regval);
3796 argreg++;
3797 }
3798 else
3799 {
3800 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3801
3802 memset (buf, 0, sizeof (buf));
3803 store_unsigned_integer (buf, partial_len, byte_order, regval);
3804
3805 /* Push the arguments onto the stack. */
3806 if (arm_debug)
3807 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3808 argnum, nstack);
3809 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3810 nstack += ARM_INT_REGISTER_SIZE;
3811 }
3812
3813 len -= partial_len;
3814 val += partial_len;
3815 }
3816 }
3817 /* If we have an odd number of words to push, then decrement the stack
3818 by one word now, so first stack argument will be dword aligned. */
3819 if (nstack & 4)
3820 sp -= 4;
3821
3822 while (si)
3823 {
3824 sp -= si->len;
3825 write_memory (sp, si->data, si->len);
3826 si = pop_stack_item (si);
3827 }
3828
3829 /* Finally, update teh SP register. */
3830 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3831
3832 return sp;
3833 }
3834
3835
3836 /* Always align the frame to an 8-byte boundary. This is required on
3837 some platforms and harmless on the rest. */
3838
3839 static CORE_ADDR
3840 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3841 {
3842 /* Align the stack to eight bytes. */
3843 return sp & ~ (CORE_ADDR) 7;
3844 }
3845
3846 static void
3847 print_fpu_flags (struct ui_file *file, int flags)
3848 {
3849 if (flags & (1 << 0))
3850 fputs_filtered ("IVO ", file);
3851 if (flags & (1 << 1))
3852 fputs_filtered ("DVZ ", file);
3853 if (flags & (1 << 2))
3854 fputs_filtered ("OFL ", file);
3855 if (flags & (1 << 3))
3856 fputs_filtered ("UFL ", file);
3857 if (flags & (1 << 4))
3858 fputs_filtered ("INX ", file);
3859 fputc_filtered ('\n', file);
3860 }
3861
3862 /* Print interesting information about the floating point processor
3863 (if present) or emulator. */
3864 static void
3865 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3866 struct frame_info *frame, const char *args)
3867 {
3868 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3869 int type;
3870
3871 type = (status >> 24) & 127;
3872 if (status & (1 << 31))
3873 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3874 else
3875 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3876 /* i18n: [floating point unit] mask */
3877 fputs_filtered (_("mask: "), file);
3878 print_fpu_flags (file, status >> 16);
3879 /* i18n: [floating point unit] flags */
3880 fputs_filtered (_("flags: "), file);
3881 print_fpu_flags (file, status);
3882 }
3883
3884 /* Construct the ARM extended floating point type. */
3885 static struct type *
3886 arm_ext_type (struct gdbarch *gdbarch)
3887 {
3888 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3889
3890 if (!tdep->arm_ext_type)
3891 tdep->arm_ext_type
3892 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3893 floatformats_arm_ext);
3894
3895 return tdep->arm_ext_type;
3896 }
3897
3898 static struct type *
3899 arm_neon_double_type (struct gdbarch *gdbarch)
3900 {
3901 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3902
3903 if (tdep->neon_double_type == NULL)
3904 {
3905 struct type *t, *elem;
3906
3907 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3908 TYPE_CODE_UNION);
3909 elem = builtin_type (gdbarch)->builtin_uint8;
3910 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3911 elem = builtin_type (gdbarch)->builtin_uint16;
3912 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3913 elem = builtin_type (gdbarch)->builtin_uint32;
3914 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3915 elem = builtin_type (gdbarch)->builtin_uint64;
3916 append_composite_type_field (t, "u64", elem);
3917 elem = builtin_type (gdbarch)->builtin_float;
3918 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3919 elem = builtin_type (gdbarch)->builtin_double;
3920 append_composite_type_field (t, "f64", elem);
3921
3922 TYPE_VECTOR (t) = 1;
3923 TYPE_NAME (t) = "neon_d";
3924 tdep->neon_double_type = t;
3925 }
3926
3927 return tdep->neon_double_type;
3928 }
3929
3930 /* FIXME: The vector types are not correctly ordered on big-endian
3931 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3932 bits of d0 - regardless of what unit size is being held in d0. So
3933 the offset of the first uint8 in d0 is 7, but the offset of the
3934 first float is 4. This code works as-is for little-endian
3935 targets. */
3936
3937 static struct type *
3938 arm_neon_quad_type (struct gdbarch *gdbarch)
3939 {
3940 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3941
3942 if (tdep->neon_quad_type == NULL)
3943 {
3944 struct type *t, *elem;
3945
3946 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3947 TYPE_CODE_UNION);
3948 elem = builtin_type (gdbarch)->builtin_uint8;
3949 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3950 elem = builtin_type (gdbarch)->builtin_uint16;
3951 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3952 elem = builtin_type (gdbarch)->builtin_uint32;
3953 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3954 elem = builtin_type (gdbarch)->builtin_uint64;
3955 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3956 elem = builtin_type (gdbarch)->builtin_float;
3957 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3958 elem = builtin_type (gdbarch)->builtin_double;
3959 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3960
3961 TYPE_VECTOR (t) = 1;
3962 TYPE_NAME (t) = "neon_q";
3963 tdep->neon_quad_type = t;
3964 }
3965
3966 return tdep->neon_quad_type;
3967 }
3968
3969 /* Return the GDB type object for the "standard" data type of data in
3970 register N. */
3971
3972 static struct type *
3973 arm_register_type (struct gdbarch *gdbarch, int regnum)
3974 {
3975 int num_regs = gdbarch_num_regs (gdbarch);
3976
3977 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3978 && regnum >= num_regs && regnum < num_regs + 32)
3979 return builtin_type (gdbarch)->builtin_float;
3980
3981 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3982 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3983 return arm_neon_quad_type (gdbarch);
3984
3985 /* If the target description has register information, we are only
3986 in this function so that we can override the types of
3987 double-precision registers for NEON. */
3988 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3989 {
3990 struct type *t = tdesc_register_type (gdbarch, regnum);
3991
3992 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3993 && TYPE_CODE (t) == TYPE_CODE_FLT
3994 && gdbarch_tdep (gdbarch)->have_neon)
3995 return arm_neon_double_type (gdbarch);
3996 else
3997 return t;
3998 }
3999
4000 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4001 {
4002 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4003 return builtin_type (gdbarch)->builtin_void;
4004
4005 return arm_ext_type (gdbarch);
4006 }
4007 else if (regnum == ARM_SP_REGNUM)
4008 return builtin_type (gdbarch)->builtin_data_ptr;
4009 else if (regnum == ARM_PC_REGNUM)
4010 return builtin_type (gdbarch)->builtin_func_ptr;
4011 else if (regnum >= ARRAY_SIZE (arm_register_names))
4012 /* These registers are only supported on targets which supply
4013 an XML description. */
4014 return builtin_type (gdbarch)->builtin_int0;
4015 else
4016 return builtin_type (gdbarch)->builtin_uint32;
4017 }
4018
4019 /* Map a DWARF register REGNUM onto the appropriate GDB register
4020 number. */
4021
4022 static int
4023 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4024 {
4025 /* Core integer regs. */
4026 if (reg >= 0 && reg <= 15)
4027 return reg;
4028
4029 /* Legacy FPA encoding. These were once used in a way which
4030 overlapped with VFP register numbering, so their use is
4031 discouraged, but GDB doesn't support the ARM toolchain
4032 which used them for VFP. */
4033 if (reg >= 16 && reg <= 23)
4034 return ARM_F0_REGNUM + reg - 16;
4035
4036 /* New assignments for the FPA registers. */
4037 if (reg >= 96 && reg <= 103)
4038 return ARM_F0_REGNUM + reg - 96;
4039
4040 /* WMMX register assignments. */
4041 if (reg >= 104 && reg <= 111)
4042 return ARM_WCGR0_REGNUM + reg - 104;
4043
4044 if (reg >= 112 && reg <= 127)
4045 return ARM_WR0_REGNUM + reg - 112;
4046
4047 if (reg >= 192 && reg <= 199)
4048 return ARM_WC0_REGNUM + reg - 192;
4049
4050 /* VFP v2 registers. A double precision value is actually
4051 in d1 rather than s2, but the ABI only defines numbering
4052 for the single precision registers. This will "just work"
4053 in GDB for little endian targets (we'll read eight bytes,
4054 starting in s0 and then progressing to s1), but will be
4055 reversed on big endian targets with VFP. This won't
4056 be a problem for the new Neon quad registers; you're supposed
4057 to use DW_OP_piece for those. */
4058 if (reg >= 64 && reg <= 95)
4059 {
4060 char name_buf[4];
4061
4062 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4063 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4064 strlen (name_buf));
4065 }
4066
4067 /* VFP v3 / Neon registers. This range is also used for VFP v2
4068 registers, except that it now describes d0 instead of s0. */
4069 if (reg >= 256 && reg <= 287)
4070 {
4071 char name_buf[4];
4072
4073 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4074 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4075 strlen (name_buf));
4076 }
4077
4078 return -1;
4079 }
4080
4081 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4082 static int
4083 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4084 {
4085 int reg = regnum;
4086 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4087
4088 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4089 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4090
4091 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4092 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4093
4094 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4095 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4096
4097 if (reg < NUM_GREGS)
4098 return SIM_ARM_R0_REGNUM + reg;
4099 reg -= NUM_GREGS;
4100
4101 if (reg < NUM_FREGS)
4102 return SIM_ARM_FP0_REGNUM + reg;
4103 reg -= NUM_FREGS;
4104
4105 if (reg < NUM_SREGS)
4106 return SIM_ARM_FPS_REGNUM + reg;
4107 reg -= NUM_SREGS;
4108
4109 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4110 }
4111
4112 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4113 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4114 NULL if an error occurs. BUF is freed. */
4115
4116 static gdb_byte *
4117 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4118 int old_len, int new_len)
4119 {
4120 gdb_byte *new_buf;
4121 int bytes_to_read = new_len - old_len;
4122
4123 new_buf = (gdb_byte *) xmalloc (new_len);
4124 memcpy (new_buf + bytes_to_read, buf, old_len);
4125 xfree (buf);
4126 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4127 {
4128 xfree (new_buf);
4129 return NULL;
4130 }
4131 return new_buf;
4132 }
4133
4134 /* An IT block is at most the 2-byte IT instruction followed by
4135 four 4-byte instructions. The furthest back we must search to
4136 find an IT block that affects the current instruction is thus
4137 2 + 3 * 4 == 14 bytes. */
4138 #define MAX_IT_BLOCK_PREFIX 14
4139
4140 /* Use a quick scan if there are more than this many bytes of
4141 code. */
4142 #define IT_SCAN_THRESHOLD 32
4143
4144 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4145 A breakpoint in an IT block may not be hit, depending on the
4146 condition flags. */
4147 static CORE_ADDR
4148 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4149 {
4150 gdb_byte *buf;
4151 char map_type;
4152 CORE_ADDR boundary, func_start;
4153 int buf_len;
4154 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4155 int i, any, last_it, last_it_count;
4156
4157 /* If we are using BKPT breakpoints, none of this is necessary. */
4158 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4159 return bpaddr;
4160
4161 /* ARM mode does not have this problem. */
4162 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4163 return bpaddr;
4164
4165 /* We are setting a breakpoint in Thumb code that could potentially
4166 contain an IT block. The first step is to find how much Thumb
4167 code there is; we do not need to read outside of known Thumb
4168 sequences. */
4169 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4170 if (map_type == 0)
4171 /* Thumb-2 code must have mapping symbols to have a chance. */
4172 return bpaddr;
4173
4174 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4175
4176 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4177 && func_start > boundary)
4178 boundary = func_start;
4179
4180 /* Search for a candidate IT instruction. We have to do some fancy
4181 footwork to distinguish a real IT instruction from the second
4182 half of a 32-bit instruction, but there is no need for that if
4183 there's no candidate. */
4184 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4185 if (buf_len == 0)
4186 /* No room for an IT instruction. */
4187 return bpaddr;
4188
4189 buf = (gdb_byte *) xmalloc (buf_len);
4190 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4191 return bpaddr;
4192 any = 0;
4193 for (i = 0; i < buf_len; i += 2)
4194 {
4195 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4196 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4197 {
4198 any = 1;
4199 break;
4200 }
4201 }
4202
4203 if (any == 0)
4204 {
4205 xfree (buf);
4206 return bpaddr;
4207 }
4208
4209 /* OK, the code bytes before this instruction contain at least one
4210 halfword which resembles an IT instruction. We know that it's
4211 Thumb code, but there are still two possibilities. Either the
4212 halfword really is an IT instruction, or it is the second half of
4213 a 32-bit Thumb instruction. The only way we can tell is to
4214 scan forwards from a known instruction boundary. */
4215 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4216 {
4217 int definite;
4218
4219 /* There's a lot of code before this instruction. Start with an
4220 optimistic search; it's easy to recognize halfwords that can
4221 not be the start of a 32-bit instruction, and use that to
4222 lock on to the instruction boundaries. */
4223 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4224 if (buf == NULL)
4225 return bpaddr;
4226 buf_len = IT_SCAN_THRESHOLD;
4227
4228 definite = 0;
4229 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4230 {
4231 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4232 if (thumb_insn_size (inst1) == 2)
4233 {
4234 definite = 1;
4235 break;
4236 }
4237 }
4238
4239 /* At this point, if DEFINITE, BUF[I] is the first place we
4240 are sure that we know the instruction boundaries, and it is far
4241 enough from BPADDR that we could not miss an IT instruction
4242 affecting BPADDR. If ! DEFINITE, give up - start from a
4243 known boundary. */
4244 if (! definite)
4245 {
4246 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4247 bpaddr - boundary);
4248 if (buf == NULL)
4249 return bpaddr;
4250 buf_len = bpaddr - boundary;
4251 i = 0;
4252 }
4253 }
4254 else
4255 {
4256 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4257 if (buf == NULL)
4258 return bpaddr;
4259 buf_len = bpaddr - boundary;
4260 i = 0;
4261 }
4262
4263 /* Scan forwards. Find the last IT instruction before BPADDR. */
4264 last_it = -1;
4265 last_it_count = 0;
4266 while (i < buf_len)
4267 {
4268 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4269 last_it_count--;
4270 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4271 {
4272 last_it = i;
4273 if (inst1 & 0x0001)
4274 last_it_count = 4;
4275 else if (inst1 & 0x0002)
4276 last_it_count = 3;
4277 else if (inst1 & 0x0004)
4278 last_it_count = 2;
4279 else
4280 last_it_count = 1;
4281 }
4282 i += thumb_insn_size (inst1);
4283 }
4284
4285 xfree (buf);
4286
4287 if (last_it == -1)
4288 /* There wasn't really an IT instruction after all. */
4289 return bpaddr;
4290
4291 if (last_it_count < 1)
4292 /* It was too far away. */
4293 return bpaddr;
4294
4295 /* This really is a trouble spot. Move the breakpoint to the IT
4296 instruction. */
4297 return bpaddr - buf_len + last_it;
4298 }
4299
4300 /* ARM displaced stepping support.
4301
4302 Generally ARM displaced stepping works as follows:
4303
4304 1. When an instruction is to be single-stepped, it is first decoded by
4305 arm_process_displaced_insn. Depending on the type of instruction, it is
4306 then copied to a scratch location, possibly in a modified form. The
4307 copy_* set of functions performs such modification, as necessary. A
4308 breakpoint is placed after the modified instruction in the scratch space
4309 to return control to GDB. Note in particular that instructions which
4310 modify the PC will no longer do so after modification.
4311
4312 2. The instruction is single-stepped, by setting the PC to the scratch
4313 location address, and resuming. Control returns to GDB when the
4314 breakpoint is hit.
4315
4316 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4317 function used for the current instruction. This function's job is to
4318 put the CPU/memory state back to what it would have been if the
4319 instruction had been executed unmodified in its original location. */
4320
4321 /* NOP instruction (mov r0, r0). */
4322 #define ARM_NOP 0xe1a00000
4323 #define THUMB_NOP 0x4600
4324
4325 /* Helper for register reads for displaced stepping. In particular, this
4326 returns the PC as it would be seen by the instruction at its original
4327 location. */
4328
4329 ULONGEST
4330 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4331 int regno)
4332 {
4333 ULONGEST ret;
4334 CORE_ADDR from = dsc->insn_addr;
4335
4336 if (regno == ARM_PC_REGNUM)
4337 {
4338 /* Compute pipeline offset:
4339 - When executing an ARM instruction, PC reads as the address of the
4340 current instruction plus 8.
4341 - When executing a Thumb instruction, PC reads as the address of the
4342 current instruction plus 4. */
4343
4344 if (!dsc->is_thumb)
4345 from += 8;
4346 else
4347 from += 4;
4348
4349 if (debug_displaced)
4350 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4351 (unsigned long) from);
4352 return (ULONGEST) from;
4353 }
4354 else
4355 {
4356 regcache_cooked_read_unsigned (regs, regno, &ret);
4357 if (debug_displaced)
4358 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4359 regno, (unsigned long) ret);
4360 return ret;
4361 }
4362 }
4363
4364 static int
4365 displaced_in_arm_mode (struct regcache *regs)
4366 {
4367 ULONGEST ps;
4368 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4369
4370 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4371
4372 return (ps & t_bit) == 0;
4373 }
4374
4375 /* Write to the PC as from a branch instruction. */
4376
4377 static void
4378 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4379 ULONGEST val)
4380 {
4381 if (!dsc->is_thumb)
4382 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4383 architecture versions < 6. */
4384 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4385 val & ~(ULONGEST) 0x3);
4386 else
4387 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4388 val & ~(ULONGEST) 0x1);
4389 }
4390
4391 /* Write to the PC as from a branch-exchange instruction. */
4392
4393 static void
4394 bx_write_pc (struct regcache *regs, ULONGEST val)
4395 {
4396 ULONGEST ps;
4397 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4398
4399 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4400
4401 if ((val & 1) == 1)
4402 {
4403 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4404 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4405 }
4406 else if ((val & 2) == 0)
4407 {
4408 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4409 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4410 }
4411 else
4412 {
4413 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4414 mode, align dest to 4 bytes). */
4415 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4416 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4417 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4418 }
4419 }
4420
4421 /* Write to the PC as if from a load instruction. */
4422
4423 static void
4424 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4425 ULONGEST val)
4426 {
4427 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4428 bx_write_pc (regs, val);
4429 else
4430 branch_write_pc (regs, dsc, val);
4431 }
4432
4433 /* Write to the PC as if from an ALU instruction. */
4434
4435 static void
4436 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4437 ULONGEST val)
4438 {
4439 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4440 bx_write_pc (regs, val);
4441 else
4442 branch_write_pc (regs, dsc, val);
4443 }
4444
4445 /* Helper for writing to registers for displaced stepping. Writing to the PC
4446 has a varying effects depending on the instruction which does the write:
4447 this is controlled by the WRITE_PC argument. */
4448
4449 void
4450 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4451 int regno, ULONGEST val, enum pc_write_style write_pc)
4452 {
4453 if (regno == ARM_PC_REGNUM)
4454 {
4455 if (debug_displaced)
4456 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4457 (unsigned long) val);
4458 switch (write_pc)
4459 {
4460 case BRANCH_WRITE_PC:
4461 branch_write_pc (regs, dsc, val);
4462 break;
4463
4464 case BX_WRITE_PC:
4465 bx_write_pc (regs, val);
4466 break;
4467
4468 case LOAD_WRITE_PC:
4469 load_write_pc (regs, dsc, val);
4470 break;
4471
4472 case ALU_WRITE_PC:
4473 alu_write_pc (regs, dsc, val);
4474 break;
4475
4476 case CANNOT_WRITE_PC:
4477 warning (_("Instruction wrote to PC in an unexpected way when "
4478 "single-stepping"));
4479 break;
4480
4481 default:
4482 internal_error (__FILE__, __LINE__,
4483 _("Invalid argument to displaced_write_reg"));
4484 }
4485
4486 dsc->wrote_to_pc = 1;
4487 }
4488 else
4489 {
4490 if (debug_displaced)
4491 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4492 regno, (unsigned long) val);
4493 regcache_cooked_write_unsigned (regs, regno, val);
4494 }
4495 }
4496
4497 /* This function is used to concisely determine if an instruction INSN
4498 references PC. Register fields of interest in INSN should have the
4499 corresponding fields of BITMASK set to 0b1111. The function
4500 returns return 1 if any of these fields in INSN reference the PC
4501 (also 0b1111, r15), else it returns 0. */
4502
4503 static int
4504 insn_references_pc (uint32_t insn, uint32_t bitmask)
4505 {
4506 uint32_t lowbit = 1;
4507
4508 while (bitmask != 0)
4509 {
4510 uint32_t mask;
4511
4512 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4513 ;
4514
4515 if (!lowbit)
4516 break;
4517
4518 mask = lowbit * 0xf;
4519
4520 if ((insn & mask) == mask)
4521 return 1;
4522
4523 bitmask &= ~mask;
4524 }
4525
4526 return 0;
4527 }
4528
4529 /* The simplest copy function. Many instructions have the same effect no
4530 matter what address they are executed at: in those cases, use this. */
4531
4532 static int
4533 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4534 const char *iname, arm_displaced_step_closure *dsc)
4535 {
4536 if (debug_displaced)
4537 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4538 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4539 iname);
4540
4541 dsc->modinsn[0] = insn;
4542
4543 return 0;
4544 }
4545
4546 static int
4547 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4548 uint16_t insn2, const char *iname,
4549 arm_displaced_step_closure *dsc)
4550 {
4551 if (debug_displaced)
4552 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4553 "opcode/class '%s' unmodified\n", insn1, insn2,
4554 iname);
4555
4556 dsc->modinsn[0] = insn1;
4557 dsc->modinsn[1] = insn2;
4558 dsc->numinsns = 2;
4559
4560 return 0;
4561 }
4562
4563 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4564 modification. */
4565 static int
4566 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4567 const char *iname,
4568 arm_displaced_step_closure *dsc)
4569 {
4570 if (debug_displaced)
4571 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4572 "opcode/class '%s' unmodified\n", insn,
4573 iname);
4574
4575 dsc->modinsn[0] = insn;
4576
4577 return 0;
4578 }
4579
4580 /* Preload instructions with immediate offset. */
4581
4582 static void
4583 cleanup_preload (struct gdbarch *gdbarch,
4584 struct regcache *regs, arm_displaced_step_closure *dsc)
4585 {
4586 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4587 if (!dsc->u.preload.immed)
4588 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4589 }
4590
4591 static void
4592 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4593 arm_displaced_step_closure *dsc, unsigned int rn)
4594 {
4595 ULONGEST rn_val;
4596 /* Preload instructions:
4597
4598 {pli/pld} [rn, #+/-imm]
4599 ->
4600 {pli/pld} [r0, #+/-imm]. */
4601
4602 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4603 rn_val = displaced_read_reg (regs, dsc, rn);
4604 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4605 dsc->u.preload.immed = 1;
4606
4607 dsc->cleanup = &cleanup_preload;
4608 }
4609
4610 static int
4611 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4612 arm_displaced_step_closure *dsc)
4613 {
4614 unsigned int rn = bits (insn, 16, 19);
4615
4616 if (!insn_references_pc (insn, 0x000f0000ul))
4617 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4618
4619 if (debug_displaced)
4620 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4621 (unsigned long) insn);
4622
4623 dsc->modinsn[0] = insn & 0xfff0ffff;
4624
4625 install_preload (gdbarch, regs, dsc, rn);
4626
4627 return 0;
4628 }
4629
4630 static int
4631 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4632 struct regcache *regs, arm_displaced_step_closure *dsc)
4633 {
4634 unsigned int rn = bits (insn1, 0, 3);
4635 unsigned int u_bit = bit (insn1, 7);
4636 int imm12 = bits (insn2, 0, 11);
4637 ULONGEST pc_val;
4638
4639 if (rn != ARM_PC_REGNUM)
4640 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4641
4642 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4643 PLD (literal) Encoding T1. */
4644 if (debug_displaced)
4645 fprintf_unfiltered (gdb_stdlog,
4646 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4647 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4648 imm12);
4649
4650 if (!u_bit)
4651 imm12 = -1 * imm12;
4652
4653 /* Rewrite instruction {pli/pld} PC imm12 into:
4654 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4655
4656 {pli/pld} [r0, r1]
4657
4658 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4659
4660 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4661 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4662
4663 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4664
4665 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4666 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4667 dsc->u.preload.immed = 0;
4668
4669 /* {pli/pld} [r0, r1] */
4670 dsc->modinsn[0] = insn1 & 0xfff0;
4671 dsc->modinsn[1] = 0xf001;
4672 dsc->numinsns = 2;
4673
4674 dsc->cleanup = &cleanup_preload;
4675 return 0;
4676 }
4677
4678 /* Preload instructions with register offset. */
4679
4680 static void
4681 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4682 arm_displaced_step_closure *dsc, unsigned int rn,
4683 unsigned int rm)
4684 {
4685 ULONGEST rn_val, rm_val;
4686
4687 /* Preload register-offset instructions:
4688
4689 {pli/pld} [rn, rm {, shift}]
4690 ->
4691 {pli/pld} [r0, r1 {, shift}]. */
4692
4693 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4694 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4695 rn_val = displaced_read_reg (regs, dsc, rn);
4696 rm_val = displaced_read_reg (regs, dsc, rm);
4697 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4698 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4699 dsc->u.preload.immed = 0;
4700
4701 dsc->cleanup = &cleanup_preload;
4702 }
4703
4704 static int
4705 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4706 struct regcache *regs,
4707 arm_displaced_step_closure *dsc)
4708 {
4709 unsigned int rn = bits (insn, 16, 19);
4710 unsigned int rm = bits (insn, 0, 3);
4711
4712
4713 if (!insn_references_pc (insn, 0x000f000ful))
4714 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4715
4716 if (debug_displaced)
4717 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4718 (unsigned long) insn);
4719
4720 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4721
4722 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4723 return 0;
4724 }
4725
4726 /* Copy/cleanup coprocessor load and store instructions. */
4727
4728 static void
4729 cleanup_copro_load_store (struct gdbarch *gdbarch,
4730 struct regcache *regs,
4731 arm_displaced_step_closure *dsc)
4732 {
4733 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4734
4735 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4736
4737 if (dsc->u.ldst.writeback)
4738 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4739 }
4740
4741 static void
4742 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4743 arm_displaced_step_closure *dsc,
4744 int writeback, unsigned int rn)
4745 {
4746 ULONGEST rn_val;
4747
4748 /* Coprocessor load/store instructions:
4749
4750 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4751 ->
4752 {stc/stc2} [r0, #+/-imm].
4753
4754 ldc/ldc2 are handled identically. */
4755
4756 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4757 rn_val = displaced_read_reg (regs, dsc, rn);
4758 /* PC should be 4-byte aligned. */
4759 rn_val = rn_val & 0xfffffffc;
4760 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4761
4762 dsc->u.ldst.writeback = writeback;
4763 dsc->u.ldst.rn = rn;
4764
4765 dsc->cleanup = &cleanup_copro_load_store;
4766 }
4767
4768 static int
4769 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4770 struct regcache *regs,
4771 arm_displaced_step_closure *dsc)
4772 {
4773 unsigned int rn = bits (insn, 16, 19);
4774
4775 if (!insn_references_pc (insn, 0x000f0000ul))
4776 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4777
4778 if (debug_displaced)
4779 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4780 "load/store insn %.8lx\n", (unsigned long) insn);
4781
4782 dsc->modinsn[0] = insn & 0xfff0ffff;
4783
4784 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4785
4786 return 0;
4787 }
4788
4789 static int
4790 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4791 uint16_t insn2, struct regcache *regs,
4792 arm_displaced_step_closure *dsc)
4793 {
4794 unsigned int rn = bits (insn1, 0, 3);
4795
4796 if (rn != ARM_PC_REGNUM)
4797 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4798 "copro load/store", dsc);
4799
4800 if (debug_displaced)
4801 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4802 "load/store insn %.4x%.4x\n", insn1, insn2);
4803
4804 dsc->modinsn[0] = insn1 & 0xfff0;
4805 dsc->modinsn[1] = insn2;
4806 dsc->numinsns = 2;
4807
4808 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4809 doesn't support writeback, so pass 0. */
4810 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4811
4812 return 0;
4813 }
4814
4815 /* Clean up branch instructions (actually perform the branch, by setting
4816 PC). */
4817
4818 static void
4819 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4820 arm_displaced_step_closure *dsc)
4821 {
4822 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4823 int branch_taken = condition_true (dsc->u.branch.cond, status);
4824 enum pc_write_style write_pc = dsc->u.branch.exchange
4825 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4826
4827 if (!branch_taken)
4828 return;
4829
4830 if (dsc->u.branch.link)
4831 {
4832 /* The value of LR should be the next insn of current one. In order
4833 not to confuse logic handling later insn `bx lr', if current insn mode
4834 is Thumb, the bit 0 of LR value should be set to 1. */
4835 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4836
4837 if (dsc->is_thumb)
4838 next_insn_addr |= 0x1;
4839
4840 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4841 CANNOT_WRITE_PC);
4842 }
4843
4844 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4845 }
4846
4847 /* Copy B/BL/BLX instructions with immediate destinations. */
4848
4849 static void
4850 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4851 arm_displaced_step_closure *dsc,
4852 unsigned int cond, int exchange, int link, long offset)
4853 {
4854 /* Implement "BL<cond> <label>" as:
4855
4856 Preparation: cond <- instruction condition
4857 Insn: mov r0, r0 (nop)
4858 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4859
4860 B<cond> similar, but don't set r14 in cleanup. */
4861
4862 dsc->u.branch.cond = cond;
4863 dsc->u.branch.link = link;
4864 dsc->u.branch.exchange = exchange;
4865
4866 dsc->u.branch.dest = dsc->insn_addr;
4867 if (link && exchange)
4868 /* For BLX, offset is computed from the Align (PC, 4). */
4869 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4870
4871 if (dsc->is_thumb)
4872 dsc->u.branch.dest += 4 + offset;
4873 else
4874 dsc->u.branch.dest += 8 + offset;
4875
4876 dsc->cleanup = &cleanup_branch;
4877 }
4878 static int
4879 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4880 struct regcache *regs, arm_displaced_step_closure *dsc)
4881 {
4882 unsigned int cond = bits (insn, 28, 31);
4883 int exchange = (cond == 0xf);
4884 int link = exchange || bit (insn, 24);
4885 long offset;
4886
4887 if (debug_displaced)
4888 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4889 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4890 (unsigned long) insn);
4891 if (exchange)
4892 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4893 then arrange the switch into Thumb mode. */
4894 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4895 else
4896 offset = bits (insn, 0, 23) << 2;
4897
4898 if (bit (offset, 25))
4899 offset = offset | ~0x3ffffff;
4900
4901 dsc->modinsn[0] = ARM_NOP;
4902
4903 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4904 return 0;
4905 }
4906
4907 static int
4908 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4909 uint16_t insn2, struct regcache *regs,
4910 arm_displaced_step_closure *dsc)
4911 {
4912 int link = bit (insn2, 14);
4913 int exchange = link && !bit (insn2, 12);
4914 int cond = INST_AL;
4915 long offset = 0;
4916 int j1 = bit (insn2, 13);
4917 int j2 = bit (insn2, 11);
4918 int s = sbits (insn1, 10, 10);
4919 int i1 = !(j1 ^ bit (insn1, 10));
4920 int i2 = !(j2 ^ bit (insn1, 10));
4921
4922 if (!link && !exchange) /* B */
4923 {
4924 offset = (bits (insn2, 0, 10) << 1);
4925 if (bit (insn2, 12)) /* Encoding T4 */
4926 {
4927 offset |= (bits (insn1, 0, 9) << 12)
4928 | (i2 << 22)
4929 | (i1 << 23)
4930 | (s << 24);
4931 cond = INST_AL;
4932 }
4933 else /* Encoding T3 */
4934 {
4935 offset |= (bits (insn1, 0, 5) << 12)
4936 | (j1 << 18)
4937 | (j2 << 19)
4938 | (s << 20);
4939 cond = bits (insn1, 6, 9);
4940 }
4941 }
4942 else
4943 {
4944 offset = (bits (insn1, 0, 9) << 12);
4945 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4946 offset |= exchange ?
4947 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4948 }
4949
4950 if (debug_displaced)
4951 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4952 "%.4x %.4x with offset %.8lx\n",
4953 link ? (exchange) ? "blx" : "bl" : "b",
4954 insn1, insn2, offset);
4955
4956 dsc->modinsn[0] = THUMB_NOP;
4957
4958 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4959 return 0;
4960 }
4961
4962 /* Copy B Thumb instructions. */
4963 static int
4964 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
4965 arm_displaced_step_closure *dsc)
4966 {
4967 unsigned int cond = 0;
4968 int offset = 0;
4969 unsigned short bit_12_15 = bits (insn, 12, 15);
4970 CORE_ADDR from = dsc->insn_addr;
4971
4972 if (bit_12_15 == 0xd)
4973 {
4974 /* offset = SignExtend (imm8:0, 32) */
4975 offset = sbits ((insn << 1), 0, 8);
4976 cond = bits (insn, 8, 11);
4977 }
4978 else if (bit_12_15 == 0xe) /* Encoding T2 */
4979 {
4980 offset = sbits ((insn << 1), 0, 11);
4981 cond = INST_AL;
4982 }
4983
4984 if (debug_displaced)
4985 fprintf_unfiltered (gdb_stdlog,
4986 "displaced: copying b immediate insn %.4x "
4987 "with offset %d\n", insn, offset);
4988
4989 dsc->u.branch.cond = cond;
4990 dsc->u.branch.link = 0;
4991 dsc->u.branch.exchange = 0;
4992 dsc->u.branch.dest = from + 4 + offset;
4993
4994 dsc->modinsn[0] = THUMB_NOP;
4995
4996 dsc->cleanup = &cleanup_branch;
4997
4998 return 0;
4999 }
5000
5001 /* Copy BX/BLX with register-specified destinations. */
5002
5003 static void
5004 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5005 arm_displaced_step_closure *dsc, int link,
5006 unsigned int cond, unsigned int rm)
5007 {
5008 /* Implement {BX,BLX}<cond> <reg>" as:
5009
5010 Preparation: cond <- instruction condition
5011 Insn: mov r0, r0 (nop)
5012 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5013
5014 Don't set r14 in cleanup for BX. */
5015
5016 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5017
5018 dsc->u.branch.cond = cond;
5019 dsc->u.branch.link = link;
5020
5021 dsc->u.branch.exchange = 1;
5022
5023 dsc->cleanup = &cleanup_branch;
5024 }
5025
5026 static int
5027 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5028 struct regcache *regs, arm_displaced_step_closure *dsc)
5029 {
5030 unsigned int cond = bits (insn, 28, 31);
5031 /* BX: x12xxx1x
5032 BLX: x12xxx3x. */
5033 int link = bit (insn, 5);
5034 unsigned int rm = bits (insn, 0, 3);
5035
5036 if (debug_displaced)
5037 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5038 (unsigned long) insn);
5039
5040 dsc->modinsn[0] = ARM_NOP;
5041
5042 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5043 return 0;
5044 }
5045
5046 static int
5047 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5048 struct regcache *regs,
5049 arm_displaced_step_closure *dsc)
5050 {
5051 int link = bit (insn, 7);
5052 unsigned int rm = bits (insn, 3, 6);
5053
5054 if (debug_displaced)
5055 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5056 (unsigned short) insn);
5057
5058 dsc->modinsn[0] = THUMB_NOP;
5059
5060 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5061
5062 return 0;
5063 }
5064
5065
5066 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5067
5068 static void
5069 cleanup_alu_imm (struct gdbarch *gdbarch,
5070 struct regcache *regs, arm_displaced_step_closure *dsc)
5071 {
5072 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5073 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5074 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5075 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5076 }
5077
5078 static int
5079 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5080 arm_displaced_step_closure *dsc)
5081 {
5082 unsigned int rn = bits (insn, 16, 19);
5083 unsigned int rd = bits (insn, 12, 15);
5084 unsigned int op = bits (insn, 21, 24);
5085 int is_mov = (op == 0xd);
5086 ULONGEST rd_val, rn_val;
5087
5088 if (!insn_references_pc (insn, 0x000ff000ul))
5089 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5090
5091 if (debug_displaced)
5092 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5093 "%.8lx\n", is_mov ? "move" : "ALU",
5094 (unsigned long) insn);
5095
5096 /* Instruction is of form:
5097
5098 <op><cond> rd, [rn,] #imm
5099
5100 Rewrite as:
5101
5102 Preparation: tmp1, tmp2 <- r0, r1;
5103 r0, r1 <- rd, rn
5104 Insn: <op><cond> r0, r1, #imm
5105 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5106 */
5107
5108 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5109 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5110 rn_val = displaced_read_reg (regs, dsc, rn);
5111 rd_val = displaced_read_reg (regs, dsc, rd);
5112 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5113 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5114 dsc->rd = rd;
5115
5116 if (is_mov)
5117 dsc->modinsn[0] = insn & 0xfff00fff;
5118 else
5119 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5120
5121 dsc->cleanup = &cleanup_alu_imm;
5122
5123 return 0;
5124 }
5125
5126 static int
5127 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5128 uint16_t insn2, struct regcache *regs,
5129 arm_displaced_step_closure *dsc)
5130 {
5131 unsigned int op = bits (insn1, 5, 8);
5132 unsigned int rn, rm, rd;
5133 ULONGEST rd_val, rn_val;
5134
5135 rn = bits (insn1, 0, 3); /* Rn */
5136 rm = bits (insn2, 0, 3); /* Rm */
5137 rd = bits (insn2, 8, 11); /* Rd */
5138
5139 /* This routine is only called for instruction MOV. */
5140 gdb_assert (op == 0x2 && rn == 0xf);
5141
5142 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5143 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5144
5145 if (debug_displaced)
5146 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5147 "ALU", insn1, insn2);
5148
5149 /* Instruction is of form:
5150
5151 <op><cond> rd, [rn,] #imm
5152
5153 Rewrite as:
5154
5155 Preparation: tmp1, tmp2 <- r0, r1;
5156 r0, r1 <- rd, rn
5157 Insn: <op><cond> r0, r1, #imm
5158 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5159 */
5160
5161 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5162 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5163 rn_val = displaced_read_reg (regs, dsc, rn);
5164 rd_val = displaced_read_reg (regs, dsc, rd);
5165 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5166 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5167 dsc->rd = rd;
5168
5169 dsc->modinsn[0] = insn1;
5170 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5171 dsc->numinsns = 2;
5172
5173 dsc->cleanup = &cleanup_alu_imm;
5174
5175 return 0;
5176 }
5177
5178 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5179
5180 static void
5181 cleanup_alu_reg (struct gdbarch *gdbarch,
5182 struct regcache *regs, arm_displaced_step_closure *dsc)
5183 {
5184 ULONGEST rd_val;
5185 int i;
5186
5187 rd_val = displaced_read_reg (regs, dsc, 0);
5188
5189 for (i = 0; i < 3; i++)
5190 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5191
5192 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5193 }
5194
5195 static void
5196 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5197 arm_displaced_step_closure *dsc,
5198 unsigned int rd, unsigned int rn, unsigned int rm)
5199 {
5200 ULONGEST rd_val, rn_val, rm_val;
5201
5202 /* Instruction is of form:
5203
5204 <op><cond> rd, [rn,] rm [, <shift>]
5205
5206 Rewrite as:
5207
5208 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5209 r0, r1, r2 <- rd, rn, rm
5210 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5211 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5212 */
5213
5214 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5215 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5216 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5217 rd_val = displaced_read_reg (regs, dsc, rd);
5218 rn_val = displaced_read_reg (regs, dsc, rn);
5219 rm_val = displaced_read_reg (regs, dsc, rm);
5220 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5221 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5222 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5223 dsc->rd = rd;
5224
5225 dsc->cleanup = &cleanup_alu_reg;
5226 }
5227
5228 static int
5229 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5230 arm_displaced_step_closure *dsc)
5231 {
5232 unsigned int op = bits (insn, 21, 24);
5233 int is_mov = (op == 0xd);
5234
5235 if (!insn_references_pc (insn, 0x000ff00ful))
5236 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5237
5238 if (debug_displaced)
5239 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5240 is_mov ? "move" : "ALU", (unsigned long) insn);
5241
5242 if (is_mov)
5243 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5244 else
5245 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5246
5247 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5248 bits (insn, 0, 3));
5249 return 0;
5250 }
5251
5252 static int
5253 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5254 struct regcache *regs,
5255 arm_displaced_step_closure *dsc)
5256 {
5257 unsigned rm, rd;
5258
5259 rm = bits (insn, 3, 6);
5260 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5261
5262 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5263 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5264
5265 if (debug_displaced)
5266 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5267 (unsigned short) insn);
5268
5269 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5270
5271 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5272
5273 return 0;
5274 }
5275
5276 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5277
5278 static void
5279 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5280 struct regcache *regs,
5281 arm_displaced_step_closure *dsc)
5282 {
5283 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5284 int i;
5285
5286 for (i = 0; i < 4; i++)
5287 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5288
5289 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5290 }
5291
5292 static void
5293 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5294 arm_displaced_step_closure *dsc,
5295 unsigned int rd, unsigned int rn, unsigned int rm,
5296 unsigned rs)
5297 {
5298 int i;
5299 ULONGEST rd_val, rn_val, rm_val, rs_val;
5300
5301 /* Instruction is of form:
5302
5303 <op><cond> rd, [rn,] rm, <shift> rs
5304
5305 Rewrite as:
5306
5307 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5308 r0, r1, r2, r3 <- rd, rn, rm, rs
5309 Insn: <op><cond> r0, r1, r2, <shift> r3
5310 Cleanup: tmp5 <- r0
5311 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5312 rd <- tmp5
5313 */
5314
5315 for (i = 0; i < 4; i++)
5316 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5317
5318 rd_val = displaced_read_reg (regs, dsc, rd);
5319 rn_val = displaced_read_reg (regs, dsc, rn);
5320 rm_val = displaced_read_reg (regs, dsc, rm);
5321 rs_val = displaced_read_reg (regs, dsc, rs);
5322 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5323 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5324 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5325 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5326 dsc->rd = rd;
5327 dsc->cleanup = &cleanup_alu_shifted_reg;
5328 }
5329
5330 static int
5331 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5332 struct regcache *regs,
5333 arm_displaced_step_closure *dsc)
5334 {
5335 unsigned int op = bits (insn, 21, 24);
5336 int is_mov = (op == 0xd);
5337 unsigned int rd, rn, rm, rs;
5338
5339 if (!insn_references_pc (insn, 0x000fff0ful))
5340 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5341
5342 if (debug_displaced)
5343 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5344 "%.8lx\n", is_mov ? "move" : "ALU",
5345 (unsigned long) insn);
5346
5347 rn = bits (insn, 16, 19);
5348 rm = bits (insn, 0, 3);
5349 rs = bits (insn, 8, 11);
5350 rd = bits (insn, 12, 15);
5351
5352 if (is_mov)
5353 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5354 else
5355 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5356
5357 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5358
5359 return 0;
5360 }
5361
5362 /* Clean up load instructions. */
5363
5364 static void
5365 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5366 arm_displaced_step_closure *dsc)
5367 {
5368 ULONGEST rt_val, rt_val2 = 0, rn_val;
5369
5370 rt_val = displaced_read_reg (regs, dsc, 0);
5371 if (dsc->u.ldst.xfersize == 8)
5372 rt_val2 = displaced_read_reg (regs, dsc, 1);
5373 rn_val = displaced_read_reg (regs, dsc, 2);
5374
5375 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5376 if (dsc->u.ldst.xfersize > 4)
5377 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5378 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5379 if (!dsc->u.ldst.immed)
5380 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5381
5382 /* Handle register writeback. */
5383 if (dsc->u.ldst.writeback)
5384 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5385 /* Put result in right place. */
5386 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5387 if (dsc->u.ldst.xfersize == 8)
5388 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5389 }
5390
5391 /* Clean up store instructions. */
5392
5393 static void
5394 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5395 arm_displaced_step_closure *dsc)
5396 {
5397 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5398
5399 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5400 if (dsc->u.ldst.xfersize > 4)
5401 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5402 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5403 if (!dsc->u.ldst.immed)
5404 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5405 if (!dsc->u.ldst.restore_r4)
5406 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5407
5408 /* Writeback. */
5409 if (dsc->u.ldst.writeback)
5410 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5411 }
5412
5413 /* Copy "extra" load/store instructions. These are halfword/doubleword
5414 transfers, which have a different encoding to byte/word transfers. */
5415
5416 static int
5417 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5418 struct regcache *regs, arm_displaced_step_closure *dsc)
5419 {
5420 unsigned int op1 = bits (insn, 20, 24);
5421 unsigned int op2 = bits (insn, 5, 6);
5422 unsigned int rt = bits (insn, 12, 15);
5423 unsigned int rn = bits (insn, 16, 19);
5424 unsigned int rm = bits (insn, 0, 3);
5425 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5426 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5427 int immed = (op1 & 0x4) != 0;
5428 int opcode;
5429 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5430
5431 if (!insn_references_pc (insn, 0x000ff00ful))
5432 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5433
5434 if (debug_displaced)
5435 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5436 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5437 (unsigned long) insn);
5438
5439 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5440
5441 if (opcode < 0)
5442 internal_error (__FILE__, __LINE__,
5443 _("copy_extra_ld_st: instruction decode error"));
5444
5445 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5446 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5447 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5448 if (!immed)
5449 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5450
5451 rt_val = displaced_read_reg (regs, dsc, rt);
5452 if (bytesize[opcode] == 8)
5453 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5454 rn_val = displaced_read_reg (regs, dsc, rn);
5455 if (!immed)
5456 rm_val = displaced_read_reg (regs, dsc, rm);
5457
5458 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5459 if (bytesize[opcode] == 8)
5460 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5461 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5462 if (!immed)
5463 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5464
5465 dsc->rd = rt;
5466 dsc->u.ldst.xfersize = bytesize[opcode];
5467 dsc->u.ldst.rn = rn;
5468 dsc->u.ldst.immed = immed;
5469 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5470 dsc->u.ldst.restore_r4 = 0;
5471
5472 if (immed)
5473 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5474 ->
5475 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5476 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5477 else
5478 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5479 ->
5480 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5481 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5482
5483 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5484
5485 return 0;
5486 }
5487
5488 /* Copy byte/half word/word loads and stores. */
5489
5490 static void
5491 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5492 arm_displaced_step_closure *dsc, int load,
5493 int immed, int writeback, int size, int usermode,
5494 int rt, int rm, int rn)
5495 {
5496 ULONGEST rt_val, rn_val, rm_val = 0;
5497
5498 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5499 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5500 if (!immed)
5501 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5502 if (!load)
5503 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5504
5505 rt_val = displaced_read_reg (regs, dsc, rt);
5506 rn_val = displaced_read_reg (regs, dsc, rn);
5507 if (!immed)
5508 rm_val = displaced_read_reg (regs, dsc, rm);
5509
5510 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5511 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5512 if (!immed)
5513 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5514 dsc->rd = rt;
5515 dsc->u.ldst.xfersize = size;
5516 dsc->u.ldst.rn = rn;
5517 dsc->u.ldst.immed = immed;
5518 dsc->u.ldst.writeback = writeback;
5519
5520 /* To write PC we can do:
5521
5522 Before this sequence of instructions:
5523 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5524 r2 is the Rn value got from displaced_read_reg.
5525
5526 Insn1: push {pc} Write address of STR instruction + offset on stack
5527 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5528 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5529 = addr(Insn1) + offset - addr(Insn3) - 8
5530 = offset - 16
5531 Insn4: add r4, r4, #8 r4 = offset - 8
5532 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5533 = from + offset
5534 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5535
5536 Otherwise we don't know what value to write for PC, since the offset is
5537 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5538 of this can be found in Section "Saving from r15" in
5539 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5540
5541 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5542 }
5543
5544
5545 static int
5546 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5547 uint16_t insn2, struct regcache *regs,
5548 arm_displaced_step_closure *dsc, int size)
5549 {
5550 unsigned int u_bit = bit (insn1, 7);
5551 unsigned int rt = bits (insn2, 12, 15);
5552 int imm12 = bits (insn2, 0, 11);
5553 ULONGEST pc_val;
5554
5555 if (debug_displaced)
5556 fprintf_unfiltered (gdb_stdlog,
5557 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5558 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5559 imm12);
5560
5561 if (!u_bit)
5562 imm12 = -1 * imm12;
5563
5564 /* Rewrite instruction LDR Rt imm12 into:
5565
5566 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5567
5568 LDR R0, R2, R3,
5569
5570 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5571
5572
5573 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5574 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5575 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5576
5577 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5578
5579 pc_val = pc_val & 0xfffffffc;
5580
5581 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5582 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5583
5584 dsc->rd = rt;
5585
5586 dsc->u.ldst.xfersize = size;
5587 dsc->u.ldst.immed = 0;
5588 dsc->u.ldst.writeback = 0;
5589 dsc->u.ldst.restore_r4 = 0;
5590
5591 /* LDR R0, R2, R3 */
5592 dsc->modinsn[0] = 0xf852;
5593 dsc->modinsn[1] = 0x3;
5594 dsc->numinsns = 2;
5595
5596 dsc->cleanup = &cleanup_load;
5597
5598 return 0;
5599 }
5600
5601 static int
5602 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5603 uint16_t insn2, struct regcache *regs,
5604 arm_displaced_step_closure *dsc,
5605 int writeback, int immed)
5606 {
5607 unsigned int rt = bits (insn2, 12, 15);
5608 unsigned int rn = bits (insn1, 0, 3);
5609 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5610 /* In LDR (register), there is also a register Rm, which is not allowed to
5611 be PC, so we don't have to check it. */
5612
5613 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5614 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5615 dsc);
5616
5617 if (debug_displaced)
5618 fprintf_unfiltered (gdb_stdlog,
5619 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5620 rt, rn, insn1, insn2);
5621
5622 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5623 0, rt, rm, rn);
5624
5625 dsc->u.ldst.restore_r4 = 0;
5626
5627 if (immed)
5628 /* ldr[b]<cond> rt, [rn, #imm], etc.
5629 ->
5630 ldr[b]<cond> r0, [r2, #imm]. */
5631 {
5632 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5633 dsc->modinsn[1] = insn2 & 0x0fff;
5634 }
5635 else
5636 /* ldr[b]<cond> rt, [rn, rm], etc.
5637 ->
5638 ldr[b]<cond> r0, [r2, r3]. */
5639 {
5640 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5641 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5642 }
5643
5644 dsc->numinsns = 2;
5645
5646 return 0;
5647 }
5648
5649
5650 static int
5651 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5652 struct regcache *regs,
5653 arm_displaced_step_closure *dsc,
5654 int load, int size, int usermode)
5655 {
5656 int immed = !bit (insn, 25);
5657 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5658 unsigned int rt = bits (insn, 12, 15);
5659 unsigned int rn = bits (insn, 16, 19);
5660 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5661
5662 if (!insn_references_pc (insn, 0x000ff00ful))
5663 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5664
5665 if (debug_displaced)
5666 fprintf_unfiltered (gdb_stdlog,
5667 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5668 load ? (size == 1 ? "ldrb" : "ldr")
5669 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5670 rt, rn,
5671 (unsigned long) insn);
5672
5673 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5674 usermode, rt, rm, rn);
5675
5676 if (load || rt != ARM_PC_REGNUM)
5677 {
5678 dsc->u.ldst.restore_r4 = 0;
5679
5680 if (immed)
5681 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5682 ->
5683 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5684 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5685 else
5686 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5687 ->
5688 {ldr,str}[b]<cond> r0, [r2, r3]. */
5689 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5690 }
5691 else
5692 {
5693 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5694 dsc->u.ldst.restore_r4 = 1;
5695 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5696 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5697 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5698 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5699 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5700
5701 /* As above. */
5702 if (immed)
5703 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5704 else
5705 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5706
5707 dsc->numinsns = 6;
5708 }
5709
5710 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5711
5712 return 0;
5713 }
5714
5715 /* Cleanup LDM instructions with fully-populated register list. This is an
5716 unfortunate corner case: it's impossible to implement correctly by modifying
5717 the instruction. The issue is as follows: we have an instruction,
5718
5719 ldm rN, {r0-r15}
5720
5721 which we must rewrite to avoid loading PC. A possible solution would be to
5722 do the load in two halves, something like (with suitable cleanup
5723 afterwards):
5724
5725 mov r8, rN
5726 ldm[id][ab] r8!, {r0-r7}
5727 str r7, <temp>
5728 ldm[id][ab] r8, {r7-r14}
5729 <bkpt>
5730
5731 but at present there's no suitable place for <temp>, since the scratch space
5732 is overwritten before the cleanup routine is called. For now, we simply
5733 emulate the instruction. */
5734
5735 static void
5736 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5737 arm_displaced_step_closure *dsc)
5738 {
5739 int inc = dsc->u.block.increment;
5740 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5741 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5742 uint32_t regmask = dsc->u.block.regmask;
5743 int regno = inc ? 0 : 15;
5744 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5745 int exception_return = dsc->u.block.load && dsc->u.block.user
5746 && (regmask & 0x8000) != 0;
5747 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5748 int do_transfer = condition_true (dsc->u.block.cond, status);
5749 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5750
5751 if (!do_transfer)
5752 return;
5753
5754 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5755 sensible we can do here. Complain loudly. */
5756 if (exception_return)
5757 error (_("Cannot single-step exception return"));
5758
5759 /* We don't handle any stores here for now. */
5760 gdb_assert (dsc->u.block.load != 0);
5761
5762 if (debug_displaced)
5763 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5764 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5765 dsc->u.block.increment ? "inc" : "dec",
5766 dsc->u.block.before ? "before" : "after");
5767
5768 while (regmask)
5769 {
5770 uint32_t memword;
5771
5772 if (inc)
5773 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5774 regno++;
5775 else
5776 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5777 regno--;
5778
5779 xfer_addr += bump_before;
5780
5781 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5782 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5783
5784 xfer_addr += bump_after;
5785
5786 regmask &= ~(1 << regno);
5787 }
5788
5789 if (dsc->u.block.writeback)
5790 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5791 CANNOT_WRITE_PC);
5792 }
5793
5794 /* Clean up an STM which included the PC in the register list. */
5795
5796 static void
5797 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5798 arm_displaced_step_closure *dsc)
5799 {
5800 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5801 int store_executed = condition_true (dsc->u.block.cond, status);
5802 CORE_ADDR pc_stored_at, transferred_regs
5803 = count_one_bits (dsc->u.block.regmask);
5804 CORE_ADDR stm_insn_addr;
5805 uint32_t pc_val;
5806 long offset;
5807 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5808
5809 /* If condition code fails, there's nothing else to do. */
5810 if (!store_executed)
5811 return;
5812
5813 if (dsc->u.block.increment)
5814 {
5815 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5816
5817 if (dsc->u.block.before)
5818 pc_stored_at += 4;
5819 }
5820 else
5821 {
5822 pc_stored_at = dsc->u.block.xfer_addr;
5823
5824 if (dsc->u.block.before)
5825 pc_stored_at -= 4;
5826 }
5827
5828 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5829 stm_insn_addr = dsc->scratch_base;
5830 offset = pc_val - stm_insn_addr;
5831
5832 if (debug_displaced)
5833 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5834 "STM instruction\n", offset);
5835
5836 /* Rewrite the stored PC to the proper value for the non-displaced original
5837 instruction. */
5838 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5839 dsc->insn_addr + offset);
5840 }
5841
5842 /* Clean up an LDM which includes the PC in the register list. We clumped all
5843 the registers in the transferred list into a contiguous range r0...rX (to
5844 avoid loading PC directly and losing control of the debugged program), so we
5845 must undo that here. */
5846
5847 static void
5848 cleanup_block_load_pc (struct gdbarch *gdbarch,
5849 struct regcache *regs,
5850 arm_displaced_step_closure *dsc)
5851 {
5852 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5853 int load_executed = condition_true (dsc->u.block.cond, status);
5854 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5855 unsigned int regs_loaded = count_one_bits (mask);
5856 unsigned int num_to_shuffle = regs_loaded, clobbered;
5857
5858 /* The method employed here will fail if the register list is fully populated
5859 (we need to avoid loading PC directly). */
5860 gdb_assert (num_to_shuffle < 16);
5861
5862 if (!load_executed)
5863 return;
5864
5865 clobbered = (1 << num_to_shuffle) - 1;
5866
5867 while (num_to_shuffle > 0)
5868 {
5869 if ((mask & (1 << write_reg)) != 0)
5870 {
5871 unsigned int read_reg = num_to_shuffle - 1;
5872
5873 if (read_reg != write_reg)
5874 {
5875 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5876 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5877 if (debug_displaced)
5878 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5879 "loaded register r%d to r%d\n"), read_reg,
5880 write_reg);
5881 }
5882 else if (debug_displaced)
5883 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5884 "r%d already in the right place\n"),
5885 write_reg);
5886
5887 clobbered &= ~(1 << write_reg);
5888
5889 num_to_shuffle--;
5890 }
5891
5892 write_reg--;
5893 }
5894
5895 /* Restore any registers we scribbled over. */
5896 for (write_reg = 0; clobbered != 0; write_reg++)
5897 {
5898 if ((clobbered & (1 << write_reg)) != 0)
5899 {
5900 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5901 CANNOT_WRITE_PC);
5902 if (debug_displaced)
5903 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5904 "clobbered register r%d\n"), write_reg);
5905 clobbered &= ~(1 << write_reg);
5906 }
5907 }
5908
5909 /* Perform register writeback manually. */
5910 if (dsc->u.block.writeback)
5911 {
5912 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5913
5914 if (dsc->u.block.increment)
5915 new_rn_val += regs_loaded * 4;
5916 else
5917 new_rn_val -= regs_loaded * 4;
5918
5919 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5920 CANNOT_WRITE_PC);
5921 }
5922 }
5923
5924 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5925 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5926
5927 static int
5928 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5929 struct regcache *regs,
5930 arm_displaced_step_closure *dsc)
5931 {
5932 int load = bit (insn, 20);
5933 int user = bit (insn, 22);
5934 int increment = bit (insn, 23);
5935 int before = bit (insn, 24);
5936 int writeback = bit (insn, 21);
5937 int rn = bits (insn, 16, 19);
5938
5939 /* Block transfers which don't mention PC can be run directly
5940 out-of-line. */
5941 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5942 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5943
5944 if (rn == ARM_PC_REGNUM)
5945 {
5946 warning (_("displaced: Unpredictable LDM or STM with "
5947 "base register r15"));
5948 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5949 }
5950
5951 if (debug_displaced)
5952 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5953 "%.8lx\n", (unsigned long) insn);
5954
5955 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5956 dsc->u.block.rn = rn;
5957
5958 dsc->u.block.load = load;
5959 dsc->u.block.user = user;
5960 dsc->u.block.increment = increment;
5961 dsc->u.block.before = before;
5962 dsc->u.block.writeback = writeback;
5963 dsc->u.block.cond = bits (insn, 28, 31);
5964
5965 dsc->u.block.regmask = insn & 0xffff;
5966
5967 if (load)
5968 {
5969 if ((insn & 0xffff) == 0xffff)
5970 {
5971 /* LDM with a fully-populated register list. This case is
5972 particularly tricky. Implement for now by fully emulating the
5973 instruction (which might not behave perfectly in all cases, but
5974 these instructions should be rare enough for that not to matter
5975 too much). */
5976 dsc->modinsn[0] = ARM_NOP;
5977
5978 dsc->cleanup = &cleanup_block_load_all;
5979 }
5980 else
5981 {
5982 /* LDM of a list of registers which includes PC. Implement by
5983 rewriting the list of registers to be transferred into a
5984 contiguous chunk r0...rX before doing the transfer, then shuffling
5985 registers into the correct places in the cleanup routine. */
5986 unsigned int regmask = insn & 0xffff;
5987 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
5988 unsigned int i;
5989
5990 for (i = 0; i < num_in_list; i++)
5991 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5992
5993 /* Writeback makes things complicated. We need to avoid clobbering
5994 the base register with one of the registers in our modified
5995 register list, but just using a different register can't work in
5996 all cases, e.g.:
5997
5998 ldm r14!, {r0-r13,pc}
5999
6000 which would need to be rewritten as:
6001
6002 ldm rN!, {r0-r14}
6003
6004 but that can't work, because there's no free register for N.
6005
6006 Solve this by turning off the writeback bit, and emulating
6007 writeback manually in the cleanup routine. */
6008
6009 if (writeback)
6010 insn &= ~(1 << 21);
6011
6012 new_regmask = (1 << num_in_list) - 1;
6013
6014 if (debug_displaced)
6015 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6016 "{..., pc}: original reg list %.4x, modified "
6017 "list %.4x\n"), rn, writeback ? "!" : "",
6018 (int) insn & 0xffff, new_regmask);
6019
6020 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6021
6022 dsc->cleanup = &cleanup_block_load_pc;
6023 }
6024 }
6025 else
6026 {
6027 /* STM of a list of registers which includes PC. Run the instruction
6028 as-is, but out of line: this will store the wrong value for the PC,
6029 so we must manually fix up the memory in the cleanup routine.
6030 Doing things this way has the advantage that we can auto-detect
6031 the offset of the PC write (which is architecture-dependent) in
6032 the cleanup routine. */
6033 dsc->modinsn[0] = insn;
6034
6035 dsc->cleanup = &cleanup_block_store_pc;
6036 }
6037
6038 return 0;
6039 }
6040
6041 static int
6042 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6043 struct regcache *regs,
6044 arm_displaced_step_closure *dsc)
6045 {
6046 int rn = bits (insn1, 0, 3);
6047 int load = bit (insn1, 4);
6048 int writeback = bit (insn1, 5);
6049
6050 /* Block transfers which don't mention PC can be run directly
6051 out-of-line. */
6052 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6053 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6054
6055 if (rn == ARM_PC_REGNUM)
6056 {
6057 warning (_("displaced: Unpredictable LDM or STM with "
6058 "base register r15"));
6059 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6060 "unpredictable ldm/stm", dsc);
6061 }
6062
6063 if (debug_displaced)
6064 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6065 "%.4x%.4x\n", insn1, insn2);
6066
6067 /* Clear bit 13, since it should be always zero. */
6068 dsc->u.block.regmask = (insn2 & 0xdfff);
6069 dsc->u.block.rn = rn;
6070
6071 dsc->u.block.load = load;
6072 dsc->u.block.user = 0;
6073 dsc->u.block.increment = bit (insn1, 7);
6074 dsc->u.block.before = bit (insn1, 8);
6075 dsc->u.block.writeback = writeback;
6076 dsc->u.block.cond = INST_AL;
6077 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6078
6079 if (load)
6080 {
6081 if (dsc->u.block.regmask == 0xffff)
6082 {
6083 /* This branch is impossible to happen. */
6084 gdb_assert (0);
6085 }
6086 else
6087 {
6088 unsigned int regmask = dsc->u.block.regmask;
6089 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6090 unsigned int i;
6091
6092 for (i = 0; i < num_in_list; i++)
6093 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6094
6095 if (writeback)
6096 insn1 &= ~(1 << 5);
6097
6098 new_regmask = (1 << num_in_list) - 1;
6099
6100 if (debug_displaced)
6101 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6102 "{..., pc}: original reg list %.4x, modified "
6103 "list %.4x\n"), rn, writeback ? "!" : "",
6104 (int) dsc->u.block.regmask, new_regmask);
6105
6106 dsc->modinsn[0] = insn1;
6107 dsc->modinsn[1] = (new_regmask & 0xffff);
6108 dsc->numinsns = 2;
6109
6110 dsc->cleanup = &cleanup_block_load_pc;
6111 }
6112 }
6113 else
6114 {
6115 dsc->modinsn[0] = insn1;
6116 dsc->modinsn[1] = insn2;
6117 dsc->numinsns = 2;
6118 dsc->cleanup = &cleanup_block_store_pc;
6119 }
6120 return 0;
6121 }
6122
6123 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6124 This is used to avoid a dependency on BFD's bfd_endian enum. */
6125
6126 ULONGEST
6127 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6128 int byte_order)
6129 {
6130 return read_memory_unsigned_integer (memaddr, len,
6131 (enum bfd_endian) byte_order);
6132 }
6133
6134 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6135
6136 CORE_ADDR
6137 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6138 CORE_ADDR val)
6139 {
6140 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6141 }
6142
6143 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6144
6145 static CORE_ADDR
6146 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6147 {
6148 return 0;
6149 }
6150
6151 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6152
6153 int
6154 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6155 {
6156 return arm_is_thumb (self->regcache);
6157 }
6158
6159 /* single_step() is called just before we want to resume the inferior,
6160 if we want to single-step it but there is no hardware or kernel
6161 single-step support. We find the target of the coming instructions
6162 and breakpoint them. */
6163
6164 std::vector<CORE_ADDR>
6165 arm_software_single_step (struct regcache *regcache)
6166 {
6167 struct gdbarch *gdbarch = regcache->arch ();
6168 struct arm_get_next_pcs next_pcs_ctx;
6169
6170 arm_get_next_pcs_ctor (&next_pcs_ctx,
6171 &arm_get_next_pcs_ops,
6172 gdbarch_byte_order (gdbarch),
6173 gdbarch_byte_order_for_code (gdbarch),
6174 0,
6175 regcache);
6176
6177 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6178
6179 for (CORE_ADDR &pc_ref : next_pcs)
6180 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6181
6182 return next_pcs;
6183 }
6184
6185 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6186 for Linux, where some SVC instructions must be treated specially. */
6187
6188 static void
6189 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6190 arm_displaced_step_closure *dsc)
6191 {
6192 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6193
6194 if (debug_displaced)
6195 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6196 "%.8lx\n", (unsigned long) resume_addr);
6197
6198 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6199 }
6200
6201
6202 /* Common copy routine for svc instruction. */
6203
6204 static int
6205 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6206 arm_displaced_step_closure *dsc)
6207 {
6208 /* Preparation: none.
6209 Insn: unmodified svc.
6210 Cleanup: pc <- insn_addr + insn_size. */
6211
6212 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6213 instruction. */
6214 dsc->wrote_to_pc = 1;
6215
6216 /* Allow OS-specific code to override SVC handling. */
6217 if (dsc->u.svc.copy_svc_os)
6218 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6219 else
6220 {
6221 dsc->cleanup = &cleanup_svc;
6222 return 0;
6223 }
6224 }
6225
6226 static int
6227 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6228 struct regcache *regs, arm_displaced_step_closure *dsc)
6229 {
6230
6231 if (debug_displaced)
6232 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6233 (unsigned long) insn);
6234
6235 dsc->modinsn[0] = insn;
6236
6237 return install_svc (gdbarch, regs, dsc);
6238 }
6239
6240 static int
6241 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6242 struct regcache *regs, arm_displaced_step_closure *dsc)
6243 {
6244
6245 if (debug_displaced)
6246 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6247 insn);
6248
6249 dsc->modinsn[0] = insn;
6250
6251 return install_svc (gdbarch, regs, dsc);
6252 }
6253
6254 /* Copy undefined instructions. */
6255
6256 static int
6257 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6258 arm_displaced_step_closure *dsc)
6259 {
6260 if (debug_displaced)
6261 fprintf_unfiltered (gdb_stdlog,
6262 "displaced: copying undefined insn %.8lx\n",
6263 (unsigned long) insn);
6264
6265 dsc->modinsn[0] = insn;
6266
6267 return 0;
6268 }
6269
6270 static int
6271 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6272 arm_displaced_step_closure *dsc)
6273 {
6274
6275 if (debug_displaced)
6276 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6277 "%.4x %.4x\n", (unsigned short) insn1,
6278 (unsigned short) insn2);
6279
6280 dsc->modinsn[0] = insn1;
6281 dsc->modinsn[1] = insn2;
6282 dsc->numinsns = 2;
6283
6284 return 0;
6285 }
6286
6287 /* Copy unpredictable instructions. */
6288
6289 static int
6290 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6291 arm_displaced_step_closure *dsc)
6292 {
6293 if (debug_displaced)
6294 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6295 "%.8lx\n", (unsigned long) insn);
6296
6297 dsc->modinsn[0] = insn;
6298
6299 return 0;
6300 }
6301
6302 /* The decode_* functions are instruction decoding helpers. They mostly follow
6303 the presentation in the ARM ARM. */
6304
6305 static int
6306 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6307 struct regcache *regs,
6308 arm_displaced_step_closure *dsc)
6309 {
6310 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6311 unsigned int rn = bits (insn, 16, 19);
6312
6313 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6314 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6315 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6316 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6317 else if ((op1 & 0x60) == 0x20)
6318 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6319 else if ((op1 & 0x71) == 0x40)
6320 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6321 dsc);
6322 else if ((op1 & 0x77) == 0x41)
6323 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6324 else if ((op1 & 0x77) == 0x45)
6325 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6326 else if ((op1 & 0x77) == 0x51)
6327 {
6328 if (rn != 0xf)
6329 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6330 else
6331 return arm_copy_unpred (gdbarch, insn, dsc);
6332 }
6333 else if ((op1 & 0x77) == 0x55)
6334 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6335 else if (op1 == 0x57)
6336 switch (op2)
6337 {
6338 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6339 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6340 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6341 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6342 default: return arm_copy_unpred (gdbarch, insn, dsc);
6343 }
6344 else if ((op1 & 0x63) == 0x43)
6345 return arm_copy_unpred (gdbarch, insn, dsc);
6346 else if ((op2 & 0x1) == 0x0)
6347 switch (op1 & ~0x80)
6348 {
6349 case 0x61:
6350 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6351 case 0x65:
6352 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6353 case 0x71: case 0x75:
6354 /* pld/pldw reg. */
6355 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6356 case 0x63: case 0x67: case 0x73: case 0x77:
6357 return arm_copy_unpred (gdbarch, insn, dsc);
6358 default:
6359 return arm_copy_undef (gdbarch, insn, dsc);
6360 }
6361 else
6362 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6363 }
6364
6365 static int
6366 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6367 struct regcache *regs,
6368 arm_displaced_step_closure *dsc)
6369 {
6370 if (bit (insn, 27) == 0)
6371 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6372 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6373 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6374 {
6375 case 0x0: case 0x2:
6376 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6377
6378 case 0x1: case 0x3:
6379 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6380
6381 case 0x4: case 0x5: case 0x6: case 0x7:
6382 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6383
6384 case 0x8:
6385 switch ((insn & 0xe00000) >> 21)
6386 {
6387 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6388 /* stc/stc2. */
6389 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6390
6391 case 0x2:
6392 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6393
6394 default:
6395 return arm_copy_undef (gdbarch, insn, dsc);
6396 }
6397
6398 case 0x9:
6399 {
6400 int rn_f = (bits (insn, 16, 19) == 0xf);
6401 switch ((insn & 0xe00000) >> 21)
6402 {
6403 case 0x1: case 0x3:
6404 /* ldc/ldc2 imm (undefined for rn == pc). */
6405 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6406 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6407
6408 case 0x2:
6409 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6410
6411 case 0x4: case 0x5: case 0x6: case 0x7:
6412 /* ldc/ldc2 lit (undefined for rn != pc). */
6413 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6414 : arm_copy_undef (gdbarch, insn, dsc);
6415
6416 default:
6417 return arm_copy_undef (gdbarch, insn, dsc);
6418 }
6419 }
6420
6421 case 0xa:
6422 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6423
6424 case 0xb:
6425 if (bits (insn, 16, 19) == 0xf)
6426 /* ldc/ldc2 lit. */
6427 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6428 else
6429 return arm_copy_undef (gdbarch, insn, dsc);
6430
6431 case 0xc:
6432 if (bit (insn, 4))
6433 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6434 else
6435 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6436
6437 case 0xd:
6438 if (bit (insn, 4))
6439 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6440 else
6441 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6442
6443 default:
6444 return arm_copy_undef (gdbarch, insn, dsc);
6445 }
6446 }
6447
6448 /* Decode miscellaneous instructions in dp/misc encoding space. */
6449
6450 static int
6451 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6452 struct regcache *regs,
6453 arm_displaced_step_closure *dsc)
6454 {
6455 unsigned int op2 = bits (insn, 4, 6);
6456 unsigned int op = bits (insn, 21, 22);
6457
6458 switch (op2)
6459 {
6460 case 0x0:
6461 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6462
6463 case 0x1:
6464 if (op == 0x1) /* bx. */
6465 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6466 else if (op == 0x3)
6467 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6468 else
6469 return arm_copy_undef (gdbarch, insn, dsc);
6470
6471 case 0x2:
6472 if (op == 0x1)
6473 /* Not really supported. */
6474 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6475 else
6476 return arm_copy_undef (gdbarch, insn, dsc);
6477
6478 case 0x3:
6479 if (op == 0x1)
6480 return arm_copy_bx_blx_reg (gdbarch, insn,
6481 regs, dsc); /* blx register. */
6482 else
6483 return arm_copy_undef (gdbarch, insn, dsc);
6484
6485 case 0x5:
6486 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6487
6488 case 0x7:
6489 if (op == 0x1)
6490 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6491 else if (op == 0x3)
6492 /* Not really supported. */
6493 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6494 /* Fall through. */
6495
6496 default:
6497 return arm_copy_undef (gdbarch, insn, dsc);
6498 }
6499 }
6500
6501 static int
6502 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6503 struct regcache *regs,
6504 arm_displaced_step_closure *dsc)
6505 {
6506 if (bit (insn, 25))
6507 switch (bits (insn, 20, 24))
6508 {
6509 case 0x10:
6510 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6511
6512 case 0x14:
6513 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6514
6515 case 0x12: case 0x16:
6516 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6517
6518 default:
6519 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6520 }
6521 else
6522 {
6523 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6524
6525 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6526 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6527 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6528 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6529 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6530 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6531 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6532 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6533 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6534 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6535 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6536 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6537 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6538 /* 2nd arg means "unprivileged". */
6539 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6540 dsc);
6541 }
6542
6543 /* Should be unreachable. */
6544 return 1;
6545 }
6546
6547 static int
6548 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6549 struct regcache *regs,
6550 arm_displaced_step_closure *dsc)
6551 {
6552 int a = bit (insn, 25), b = bit (insn, 4);
6553 uint32_t op1 = bits (insn, 20, 24);
6554
6555 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6556 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6557 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6558 else if ((!a && (op1 & 0x17) == 0x02)
6559 || (a && (op1 & 0x17) == 0x02 && !b))
6560 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6561 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6562 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6563 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6564 else if ((!a && (op1 & 0x17) == 0x03)
6565 || (a && (op1 & 0x17) == 0x03 && !b))
6566 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6567 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6568 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6569 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6570 else if ((!a && (op1 & 0x17) == 0x06)
6571 || (a && (op1 & 0x17) == 0x06 && !b))
6572 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6573 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6574 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6575 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6576 else if ((!a && (op1 & 0x17) == 0x07)
6577 || (a && (op1 & 0x17) == 0x07 && !b))
6578 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6579
6580 /* Should be unreachable. */
6581 return 1;
6582 }
6583
6584 static int
6585 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6586 arm_displaced_step_closure *dsc)
6587 {
6588 switch (bits (insn, 20, 24))
6589 {
6590 case 0x00: case 0x01: case 0x02: case 0x03:
6591 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6592
6593 case 0x04: case 0x05: case 0x06: case 0x07:
6594 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6595
6596 case 0x08: case 0x09: case 0x0a: case 0x0b:
6597 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6598 return arm_copy_unmodified (gdbarch, insn,
6599 "decode/pack/unpack/saturate/reverse", dsc);
6600
6601 case 0x18:
6602 if (bits (insn, 5, 7) == 0) /* op2. */
6603 {
6604 if (bits (insn, 12, 15) == 0xf)
6605 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6606 else
6607 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6608 }
6609 else
6610 return arm_copy_undef (gdbarch, insn, dsc);
6611
6612 case 0x1a: case 0x1b:
6613 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6614 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6615 else
6616 return arm_copy_undef (gdbarch, insn, dsc);
6617
6618 case 0x1c: case 0x1d:
6619 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6620 {
6621 if (bits (insn, 0, 3) == 0xf)
6622 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6623 else
6624 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6625 }
6626 else
6627 return arm_copy_undef (gdbarch, insn, dsc);
6628
6629 case 0x1e: case 0x1f:
6630 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6631 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6632 else
6633 return arm_copy_undef (gdbarch, insn, dsc);
6634 }
6635
6636 /* Should be unreachable. */
6637 return 1;
6638 }
6639
6640 static int
6641 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6642 struct regcache *regs,
6643 arm_displaced_step_closure *dsc)
6644 {
6645 if (bit (insn, 25))
6646 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6647 else
6648 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6649 }
6650
6651 static int
6652 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6653 struct regcache *regs,
6654 arm_displaced_step_closure *dsc)
6655 {
6656 unsigned int opcode = bits (insn, 20, 24);
6657
6658 switch (opcode)
6659 {
6660 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6661 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6662
6663 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6664 case 0x12: case 0x16:
6665 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6666
6667 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6668 case 0x13: case 0x17:
6669 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6670
6671 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6672 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6673 /* Note: no writeback for these instructions. Bit 25 will always be
6674 zero though (via caller), so the following works OK. */
6675 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6676 }
6677
6678 /* Should be unreachable. */
6679 return 1;
6680 }
6681
6682 /* Decode shifted register instructions. */
6683
6684 static int
6685 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6686 uint16_t insn2, struct regcache *regs,
6687 arm_displaced_step_closure *dsc)
6688 {
6689 /* PC is only allowed to be used in instruction MOV. */
6690
6691 unsigned int op = bits (insn1, 5, 8);
6692 unsigned int rn = bits (insn1, 0, 3);
6693
6694 if (op == 0x2 && rn == 0xf) /* MOV */
6695 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6696 else
6697 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6698 "dp (shift reg)", dsc);
6699 }
6700
6701
6702 /* Decode extension register load/store. Exactly the same as
6703 arm_decode_ext_reg_ld_st. */
6704
6705 static int
6706 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6707 uint16_t insn2, struct regcache *regs,
6708 arm_displaced_step_closure *dsc)
6709 {
6710 unsigned int opcode = bits (insn1, 4, 8);
6711
6712 switch (opcode)
6713 {
6714 case 0x04: case 0x05:
6715 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6716 "vfp/neon vmov", dsc);
6717
6718 case 0x08: case 0x0c: /* 01x00 */
6719 case 0x0a: case 0x0e: /* 01x10 */
6720 case 0x12: case 0x16: /* 10x10 */
6721 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6722 "vfp/neon vstm/vpush", dsc);
6723
6724 case 0x09: case 0x0d: /* 01x01 */
6725 case 0x0b: case 0x0f: /* 01x11 */
6726 case 0x13: case 0x17: /* 10x11 */
6727 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6728 "vfp/neon vldm/vpop", dsc);
6729
6730 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6731 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6732 "vstr", dsc);
6733 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6734 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6735 }
6736
6737 /* Should be unreachable. */
6738 return 1;
6739 }
6740
6741 static int
6742 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6743 struct regcache *regs, arm_displaced_step_closure *dsc)
6744 {
6745 unsigned int op1 = bits (insn, 20, 25);
6746 int op = bit (insn, 4);
6747 unsigned int coproc = bits (insn, 8, 11);
6748
6749 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6750 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6751 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6752 && (coproc & 0xe) != 0xa)
6753 /* stc/stc2. */
6754 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6755 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6756 && (coproc & 0xe) != 0xa)
6757 /* ldc/ldc2 imm/lit. */
6758 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6759 else if ((op1 & 0x3e) == 0x00)
6760 return arm_copy_undef (gdbarch, insn, dsc);
6761 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6762 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6763 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6764 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6765 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6766 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6767 else if ((op1 & 0x30) == 0x20 && !op)
6768 {
6769 if ((coproc & 0xe) == 0xa)
6770 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6771 else
6772 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6773 }
6774 else if ((op1 & 0x30) == 0x20 && op)
6775 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6776 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6777 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6778 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6779 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6780 else if ((op1 & 0x30) == 0x30)
6781 return arm_copy_svc (gdbarch, insn, regs, dsc);
6782 else
6783 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6784 }
6785
6786 static int
6787 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6788 uint16_t insn2, struct regcache *regs,
6789 arm_displaced_step_closure *dsc)
6790 {
6791 unsigned int coproc = bits (insn2, 8, 11);
6792 unsigned int bit_5_8 = bits (insn1, 5, 8);
6793 unsigned int bit_9 = bit (insn1, 9);
6794 unsigned int bit_4 = bit (insn1, 4);
6795
6796 if (bit_9 == 0)
6797 {
6798 if (bit_5_8 == 2)
6799 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6800 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6801 dsc);
6802 else if (bit_5_8 == 0) /* UNDEFINED. */
6803 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6804 else
6805 {
6806 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6807 if ((coproc & 0xe) == 0xa)
6808 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6809 dsc);
6810 else /* coproc is not 101x. */
6811 {
6812 if (bit_4 == 0) /* STC/STC2. */
6813 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6814 "stc/stc2", dsc);
6815 else /* LDC/LDC2 {literal, immediate}. */
6816 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6817 regs, dsc);
6818 }
6819 }
6820 }
6821 else
6822 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6823
6824 return 0;
6825 }
6826
6827 static void
6828 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6829 arm_displaced_step_closure *dsc, int rd)
6830 {
6831 /* ADR Rd, #imm
6832
6833 Rewrite as:
6834
6835 Preparation: Rd <- PC
6836 Insn: ADD Rd, #imm
6837 Cleanup: Null.
6838 */
6839
6840 /* Rd <- PC */
6841 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6842 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6843 }
6844
6845 static int
6846 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6847 arm_displaced_step_closure *dsc,
6848 int rd, unsigned int imm)
6849 {
6850
6851 /* Encoding T2: ADDS Rd, #imm */
6852 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6853
6854 install_pc_relative (gdbarch, regs, dsc, rd);
6855
6856 return 0;
6857 }
6858
6859 static int
6860 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6861 struct regcache *regs,
6862 arm_displaced_step_closure *dsc)
6863 {
6864 unsigned int rd = bits (insn, 8, 10);
6865 unsigned int imm8 = bits (insn, 0, 7);
6866
6867 if (debug_displaced)
6868 fprintf_unfiltered (gdb_stdlog,
6869 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6870 rd, imm8, insn);
6871
6872 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6873 }
6874
6875 static int
6876 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6877 uint16_t insn2, struct regcache *regs,
6878 arm_displaced_step_closure *dsc)
6879 {
6880 unsigned int rd = bits (insn2, 8, 11);
6881 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6882 extract raw immediate encoding rather than computing immediate. When
6883 generating ADD or SUB instruction, we can simply perform OR operation to
6884 set immediate into ADD. */
6885 unsigned int imm_3_8 = insn2 & 0x70ff;
6886 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6887
6888 if (debug_displaced)
6889 fprintf_unfiltered (gdb_stdlog,
6890 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6891 rd, imm_i, imm_3_8, insn1, insn2);
6892
6893 if (bit (insn1, 7)) /* Encoding T2 */
6894 {
6895 /* Encoding T3: SUB Rd, Rd, #imm */
6896 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6897 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6898 }
6899 else /* Encoding T3 */
6900 {
6901 /* Encoding T3: ADD Rd, Rd, #imm */
6902 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6903 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6904 }
6905 dsc->numinsns = 2;
6906
6907 install_pc_relative (gdbarch, regs, dsc, rd);
6908
6909 return 0;
6910 }
6911
6912 static int
6913 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6914 struct regcache *regs,
6915 arm_displaced_step_closure *dsc)
6916 {
6917 unsigned int rt = bits (insn1, 8, 10);
6918 unsigned int pc;
6919 int imm8 = (bits (insn1, 0, 7) << 2);
6920
6921 /* LDR Rd, #imm8
6922
6923 Rwrite as:
6924
6925 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6926
6927 Insn: LDR R0, [R2, R3];
6928 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6929
6930 if (debug_displaced)
6931 fprintf_unfiltered (gdb_stdlog,
6932 "displaced: copying thumb ldr r%d [pc #%d]\n"
6933 , rt, imm8);
6934
6935 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6936 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6937 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6938 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6939 /* The assembler calculates the required value of the offset from the
6940 Align(PC,4) value of this instruction to the label. */
6941 pc = pc & 0xfffffffc;
6942
6943 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6944 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6945
6946 dsc->rd = rt;
6947 dsc->u.ldst.xfersize = 4;
6948 dsc->u.ldst.rn = 0;
6949 dsc->u.ldst.immed = 0;
6950 dsc->u.ldst.writeback = 0;
6951 dsc->u.ldst.restore_r4 = 0;
6952
6953 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6954
6955 dsc->cleanup = &cleanup_load;
6956
6957 return 0;
6958 }
6959
6960 /* Copy Thumb cbnz/cbz instruction. */
6961
6962 static int
6963 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6964 struct regcache *regs,
6965 arm_displaced_step_closure *dsc)
6966 {
6967 int non_zero = bit (insn1, 11);
6968 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6969 CORE_ADDR from = dsc->insn_addr;
6970 int rn = bits (insn1, 0, 2);
6971 int rn_val = displaced_read_reg (regs, dsc, rn);
6972
6973 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
6974 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6975 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6976 condition is false, let it be, cleanup_branch will do nothing. */
6977 if (dsc->u.branch.cond)
6978 {
6979 dsc->u.branch.cond = INST_AL;
6980 dsc->u.branch.dest = from + 4 + imm5;
6981 }
6982 else
6983 dsc->u.branch.dest = from + 2;
6984
6985 dsc->u.branch.link = 0;
6986 dsc->u.branch.exchange = 0;
6987
6988 if (debug_displaced)
6989 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
6990 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
6991 rn, rn_val, insn1, dsc->u.branch.dest);
6992
6993 dsc->modinsn[0] = THUMB_NOP;
6994
6995 dsc->cleanup = &cleanup_branch;
6996 return 0;
6997 }
6998
6999 /* Copy Table Branch Byte/Halfword */
7000 static int
7001 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7002 uint16_t insn2, struct regcache *regs,
7003 arm_displaced_step_closure *dsc)
7004 {
7005 ULONGEST rn_val, rm_val;
7006 int is_tbh = bit (insn2, 4);
7007 CORE_ADDR halfwords = 0;
7008 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7009
7010 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7011 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7012
7013 if (is_tbh)
7014 {
7015 gdb_byte buf[2];
7016
7017 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7018 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7019 }
7020 else
7021 {
7022 gdb_byte buf[1];
7023
7024 target_read_memory (rn_val + rm_val, buf, 1);
7025 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7026 }
7027
7028 if (debug_displaced)
7029 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7030 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7031 (unsigned int) rn_val, (unsigned int) rm_val,
7032 (unsigned int) halfwords);
7033
7034 dsc->u.branch.cond = INST_AL;
7035 dsc->u.branch.link = 0;
7036 dsc->u.branch.exchange = 0;
7037 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7038
7039 dsc->cleanup = &cleanup_branch;
7040
7041 return 0;
7042 }
7043
7044 static void
7045 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7046 arm_displaced_step_closure *dsc)
7047 {
7048 /* PC <- r7 */
7049 int val = displaced_read_reg (regs, dsc, 7);
7050 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7051
7052 /* r7 <- r8 */
7053 val = displaced_read_reg (regs, dsc, 8);
7054 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7055
7056 /* r8 <- tmp[0] */
7057 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7058
7059 }
7060
7061 static int
7062 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7063 struct regcache *regs,
7064 arm_displaced_step_closure *dsc)
7065 {
7066 dsc->u.block.regmask = insn1 & 0x00ff;
7067
7068 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7069 to :
7070
7071 (1) register list is full, that is, r0-r7 are used.
7072 Prepare: tmp[0] <- r8
7073
7074 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7075 MOV r8, r7; Move value of r7 to r8;
7076 POP {r7}; Store PC value into r7.
7077
7078 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7079
7080 (2) register list is not full, supposing there are N registers in
7081 register list (except PC, 0 <= N <= 7).
7082 Prepare: for each i, 0 - N, tmp[i] <- ri.
7083
7084 POP {r0, r1, ...., rN};
7085
7086 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7087 from tmp[] properly.
7088 */
7089 if (debug_displaced)
7090 fprintf_unfiltered (gdb_stdlog,
7091 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7092 dsc->u.block.regmask, insn1);
7093
7094 if (dsc->u.block.regmask == 0xff)
7095 {
7096 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7097
7098 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7099 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7100 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7101
7102 dsc->numinsns = 3;
7103 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7104 }
7105 else
7106 {
7107 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
7108 unsigned int i;
7109 unsigned int new_regmask;
7110
7111 for (i = 0; i < num_in_list + 1; i++)
7112 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7113
7114 new_regmask = (1 << (num_in_list + 1)) - 1;
7115
7116 if (debug_displaced)
7117 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7118 "{..., pc}: original reg list %.4x,"
7119 " modified list %.4x\n"),
7120 (int) dsc->u.block.regmask, new_regmask);
7121
7122 dsc->u.block.regmask |= 0x8000;
7123 dsc->u.block.writeback = 0;
7124 dsc->u.block.cond = INST_AL;
7125
7126 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7127
7128 dsc->cleanup = &cleanup_block_load_pc;
7129 }
7130
7131 return 0;
7132 }
7133
7134 static void
7135 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7136 struct regcache *regs,
7137 arm_displaced_step_closure *dsc)
7138 {
7139 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7140 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7141 int err = 0;
7142
7143 /* 16-bit thumb instructions. */
7144 switch (op_bit_12_15)
7145 {
7146 /* Shift (imme), add, subtract, move and compare. */
7147 case 0: case 1: case 2: case 3:
7148 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7149 "shift/add/sub/mov/cmp",
7150 dsc);
7151 break;
7152 case 4:
7153 switch (op_bit_10_11)
7154 {
7155 case 0: /* Data-processing */
7156 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7157 "data-processing",
7158 dsc);
7159 break;
7160 case 1: /* Special data instructions and branch and exchange. */
7161 {
7162 unsigned short op = bits (insn1, 7, 9);
7163 if (op == 6 || op == 7) /* BX or BLX */
7164 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7165 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7166 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7167 else
7168 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7169 dsc);
7170 }
7171 break;
7172 default: /* LDR (literal) */
7173 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7174 }
7175 break;
7176 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7177 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7178 break;
7179 case 10:
7180 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7181 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7182 else /* Generate SP-relative address */
7183 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7184 break;
7185 case 11: /* Misc 16-bit instructions */
7186 {
7187 switch (bits (insn1, 8, 11))
7188 {
7189 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7190 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7191 break;
7192 case 12: case 13: /* POP */
7193 if (bit (insn1, 8)) /* PC is in register list. */
7194 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7195 else
7196 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7197 break;
7198 case 15: /* If-Then, and hints */
7199 if (bits (insn1, 0, 3))
7200 /* If-Then makes up to four following instructions conditional.
7201 IT instruction itself is not conditional, so handle it as a
7202 common unmodified instruction. */
7203 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7204 dsc);
7205 else
7206 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7207 break;
7208 default:
7209 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7210 }
7211 }
7212 break;
7213 case 12:
7214 if (op_bit_10_11 < 2) /* Store multiple registers */
7215 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7216 else /* Load multiple registers */
7217 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7218 break;
7219 case 13: /* Conditional branch and supervisor call */
7220 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7221 err = thumb_copy_b (gdbarch, insn1, dsc);
7222 else
7223 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7224 break;
7225 case 14: /* Unconditional branch */
7226 err = thumb_copy_b (gdbarch, insn1, dsc);
7227 break;
7228 default:
7229 err = 1;
7230 }
7231
7232 if (err)
7233 internal_error (__FILE__, __LINE__,
7234 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7235 }
7236
7237 static int
7238 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7239 uint16_t insn1, uint16_t insn2,
7240 struct regcache *regs,
7241 arm_displaced_step_closure *dsc)
7242 {
7243 int rt = bits (insn2, 12, 15);
7244 int rn = bits (insn1, 0, 3);
7245 int op1 = bits (insn1, 7, 8);
7246
7247 switch (bits (insn1, 5, 6))
7248 {
7249 case 0: /* Load byte and memory hints */
7250 if (rt == 0xf) /* PLD/PLI */
7251 {
7252 if (rn == 0xf)
7253 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7254 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7255 else
7256 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7257 "pli/pld", dsc);
7258 }
7259 else
7260 {
7261 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7262 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7263 1);
7264 else
7265 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7266 "ldrb{reg, immediate}/ldrbt",
7267 dsc);
7268 }
7269
7270 break;
7271 case 1: /* Load halfword and memory hints. */
7272 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7273 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7274 "pld/unalloc memhint", dsc);
7275 else
7276 {
7277 if (rn == 0xf)
7278 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7279 2);
7280 else
7281 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7282 "ldrh/ldrht", dsc);
7283 }
7284 break;
7285 case 2: /* Load word */
7286 {
7287 int insn2_bit_8_11 = bits (insn2, 8, 11);
7288
7289 if (rn == 0xf)
7290 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7291 else if (op1 == 0x1) /* Encoding T3 */
7292 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7293 0, 1);
7294 else /* op1 == 0x0 */
7295 {
7296 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7297 /* LDR (immediate) */
7298 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7299 dsc, bit (insn2, 8), 1);
7300 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7301 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7302 "ldrt", dsc);
7303 else
7304 /* LDR (register) */
7305 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7306 dsc, 0, 0);
7307 }
7308 break;
7309 }
7310 default:
7311 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7312 break;
7313 }
7314 return 0;
7315 }
7316
7317 static void
7318 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7319 uint16_t insn2, struct regcache *regs,
7320 arm_displaced_step_closure *dsc)
7321 {
7322 int err = 0;
7323 unsigned short op = bit (insn2, 15);
7324 unsigned int op1 = bits (insn1, 11, 12);
7325
7326 switch (op1)
7327 {
7328 case 1:
7329 {
7330 switch (bits (insn1, 9, 10))
7331 {
7332 case 0:
7333 if (bit (insn1, 6))
7334 {
7335 /* Load/store {dual, exclusive}, table branch. */
7336 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7337 && bits (insn2, 5, 7) == 0)
7338 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7339 dsc);
7340 else
7341 /* PC is not allowed to use in load/store {dual, exclusive}
7342 instructions. */
7343 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7344 "load/store dual/ex", dsc);
7345 }
7346 else /* load/store multiple */
7347 {
7348 switch (bits (insn1, 7, 8))
7349 {
7350 case 0: case 3: /* SRS, RFE */
7351 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7352 "srs/rfe", dsc);
7353 break;
7354 case 1: case 2: /* LDM/STM/PUSH/POP */
7355 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7356 break;
7357 }
7358 }
7359 break;
7360
7361 case 1:
7362 /* Data-processing (shift register). */
7363 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7364 dsc);
7365 break;
7366 default: /* Coprocessor instructions. */
7367 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7368 break;
7369 }
7370 break;
7371 }
7372 case 2: /* op1 = 2 */
7373 if (op) /* Branch and misc control. */
7374 {
7375 if (bit (insn2, 14) /* BLX/BL */
7376 || bit (insn2, 12) /* Unconditional branch */
7377 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7378 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7379 else
7380 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7381 "misc ctrl", dsc);
7382 }
7383 else
7384 {
7385 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7386 {
7387 int dp_op = bits (insn1, 4, 8);
7388 int rn = bits (insn1, 0, 3);
7389 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7390 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7391 regs, dsc);
7392 else
7393 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7394 "dp/pb", dsc);
7395 }
7396 else /* Data processing (modified immediate) */
7397 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7398 "dp/mi", dsc);
7399 }
7400 break;
7401 case 3: /* op1 = 3 */
7402 switch (bits (insn1, 9, 10))
7403 {
7404 case 0:
7405 if (bit (insn1, 4))
7406 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7407 regs, dsc);
7408 else /* NEON Load/Store and Store single data item */
7409 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7410 "neon elt/struct load/store",
7411 dsc);
7412 break;
7413 case 1: /* op1 = 3, bits (9, 10) == 1 */
7414 switch (bits (insn1, 7, 8))
7415 {
7416 case 0: case 1: /* Data processing (register) */
7417 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7418 "dp(reg)", dsc);
7419 break;
7420 case 2: /* Multiply and absolute difference */
7421 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7422 "mul/mua/diff", dsc);
7423 break;
7424 case 3: /* Long multiply and divide */
7425 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7426 "lmul/lmua", dsc);
7427 break;
7428 }
7429 break;
7430 default: /* Coprocessor instructions */
7431 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7432 break;
7433 }
7434 break;
7435 default:
7436 err = 1;
7437 }
7438
7439 if (err)
7440 internal_error (__FILE__, __LINE__,
7441 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7442
7443 }
7444
7445 static void
7446 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7447 struct regcache *regs,
7448 arm_displaced_step_closure *dsc)
7449 {
7450 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7451 uint16_t insn1
7452 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7453
7454 if (debug_displaced)
7455 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7456 "at %.8lx\n", insn1, (unsigned long) from);
7457
7458 dsc->is_thumb = 1;
7459 dsc->insn_size = thumb_insn_size (insn1);
7460 if (thumb_insn_size (insn1) == 4)
7461 {
7462 uint16_t insn2
7463 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7464 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7465 }
7466 else
7467 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7468 }
7469
7470 void
7471 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7472 CORE_ADDR to, struct regcache *regs,
7473 arm_displaced_step_closure *dsc)
7474 {
7475 int err = 0;
7476 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7477 uint32_t insn;
7478
7479 /* Most displaced instructions use a 1-instruction scratch space, so set this
7480 here and override below if/when necessary. */
7481 dsc->numinsns = 1;
7482 dsc->insn_addr = from;
7483 dsc->scratch_base = to;
7484 dsc->cleanup = NULL;
7485 dsc->wrote_to_pc = 0;
7486
7487 if (!displaced_in_arm_mode (regs))
7488 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7489
7490 dsc->is_thumb = 0;
7491 dsc->insn_size = 4;
7492 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7493 if (debug_displaced)
7494 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7495 "at %.8lx\n", (unsigned long) insn,
7496 (unsigned long) from);
7497
7498 if ((insn & 0xf0000000) == 0xf0000000)
7499 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7500 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7501 {
7502 case 0x0: case 0x1: case 0x2: case 0x3:
7503 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7504 break;
7505
7506 case 0x4: case 0x5: case 0x6:
7507 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7508 break;
7509
7510 case 0x7:
7511 err = arm_decode_media (gdbarch, insn, dsc);
7512 break;
7513
7514 case 0x8: case 0x9: case 0xa: case 0xb:
7515 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7516 break;
7517
7518 case 0xc: case 0xd: case 0xe: case 0xf:
7519 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7520 break;
7521 }
7522
7523 if (err)
7524 internal_error (__FILE__, __LINE__,
7525 _("arm_process_displaced_insn: Instruction decode error"));
7526 }
7527
7528 /* Actually set up the scratch space for a displaced instruction. */
7529
7530 void
7531 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7532 CORE_ADDR to, arm_displaced_step_closure *dsc)
7533 {
7534 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7535 unsigned int i, len, offset;
7536 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7537 int size = dsc->is_thumb? 2 : 4;
7538 const gdb_byte *bkp_insn;
7539
7540 offset = 0;
7541 /* Poke modified instruction(s). */
7542 for (i = 0; i < dsc->numinsns; i++)
7543 {
7544 if (debug_displaced)
7545 {
7546 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7547 if (size == 4)
7548 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7549 dsc->modinsn[i]);
7550 else if (size == 2)
7551 fprintf_unfiltered (gdb_stdlog, "%.4x",
7552 (unsigned short)dsc->modinsn[i]);
7553
7554 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7555 (unsigned long) to + offset);
7556
7557 }
7558 write_memory_unsigned_integer (to + offset, size,
7559 byte_order_for_code,
7560 dsc->modinsn[i]);
7561 offset += size;
7562 }
7563
7564 /* Choose the correct breakpoint instruction. */
7565 if (dsc->is_thumb)
7566 {
7567 bkp_insn = tdep->thumb_breakpoint;
7568 len = tdep->thumb_breakpoint_size;
7569 }
7570 else
7571 {
7572 bkp_insn = tdep->arm_breakpoint;
7573 len = tdep->arm_breakpoint_size;
7574 }
7575
7576 /* Put breakpoint afterwards. */
7577 write_memory (to + offset, bkp_insn, len);
7578
7579 if (debug_displaced)
7580 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7581 paddress (gdbarch, from), paddress (gdbarch, to));
7582 }
7583
7584 /* Entry point for cleaning things up after a displaced instruction has been
7585 single-stepped. */
7586
7587 void
7588 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7589 struct displaced_step_closure *dsc_,
7590 CORE_ADDR from, CORE_ADDR to,
7591 struct regcache *regs)
7592 {
7593 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7594
7595 if (dsc->cleanup)
7596 dsc->cleanup (gdbarch, regs, dsc);
7597
7598 if (!dsc->wrote_to_pc)
7599 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7600 dsc->insn_addr + dsc->insn_size);
7601
7602 }
7603
7604 #include "bfd-in2.h"
7605 #include "libcoff.h"
7606
7607 static int
7608 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7609 {
7610 gdb_disassembler *di
7611 = static_cast<gdb_disassembler *>(info->application_data);
7612 struct gdbarch *gdbarch = di->arch ();
7613
7614 if (arm_pc_is_thumb (gdbarch, memaddr))
7615 {
7616 static asymbol *asym;
7617 static combined_entry_type ce;
7618 static struct coff_symbol_struct csym;
7619 static struct bfd fake_bfd;
7620 static bfd_target fake_target;
7621
7622 if (csym.native == NULL)
7623 {
7624 /* Create a fake symbol vector containing a Thumb symbol.
7625 This is solely so that the code in print_insn_little_arm()
7626 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7627 the presence of a Thumb symbol and switch to decoding
7628 Thumb instructions. */
7629
7630 fake_target.flavour = bfd_target_coff_flavour;
7631 fake_bfd.xvec = &fake_target;
7632 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7633 csym.native = &ce;
7634 csym.symbol.the_bfd = &fake_bfd;
7635 csym.symbol.name = "fake";
7636 asym = (asymbol *) & csym;
7637 }
7638
7639 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7640 info->symbols = &asym;
7641 }
7642 else
7643 info->symbols = NULL;
7644
7645 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7646 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7647 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7648 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7649 in default_print_insn. */
7650 if (exec_bfd != NULL)
7651 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7652
7653 return default_print_insn (memaddr, info);
7654 }
7655
7656 /* The following define instruction sequences that will cause ARM
7657 cpu's to take an undefined instruction trap. These are used to
7658 signal a breakpoint to GDB.
7659
7660 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7661 modes. A different instruction is required for each mode. The ARM
7662 cpu's can also be big or little endian. Thus four different
7663 instructions are needed to support all cases.
7664
7665 Note: ARMv4 defines several new instructions that will take the
7666 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7667 not in fact add the new instructions. The new undefined
7668 instructions in ARMv4 are all instructions that had no defined
7669 behaviour in earlier chips. There is no guarantee that they will
7670 raise an exception, but may be treated as NOP's. In practice, it
7671 may only safe to rely on instructions matching:
7672
7673 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7674 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7675 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7676
7677 Even this may only true if the condition predicate is true. The
7678 following use a condition predicate of ALWAYS so it is always TRUE.
7679
7680 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7681 and NetBSD all use a software interrupt rather than an undefined
7682 instruction to force a trap. This can be handled by by the
7683 abi-specific code during establishment of the gdbarch vector. */
7684
7685 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7686 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7687 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7688 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7689
7690 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7691 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7692 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7693 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7694
7695 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7696
7697 static int
7698 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7699 {
7700 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7701 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7702
7703 if (arm_pc_is_thumb (gdbarch, *pcptr))
7704 {
7705 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7706
7707 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7708 check whether we are replacing a 32-bit instruction. */
7709 if (tdep->thumb2_breakpoint != NULL)
7710 {
7711 gdb_byte buf[2];
7712
7713 if (target_read_memory (*pcptr, buf, 2) == 0)
7714 {
7715 unsigned short inst1;
7716
7717 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7718 if (thumb_insn_size (inst1) == 4)
7719 return ARM_BP_KIND_THUMB2;
7720 }
7721 }
7722
7723 return ARM_BP_KIND_THUMB;
7724 }
7725 else
7726 return ARM_BP_KIND_ARM;
7727
7728 }
7729
7730 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7731
7732 static const gdb_byte *
7733 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7734 {
7735 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7736
7737 switch (kind)
7738 {
7739 case ARM_BP_KIND_ARM:
7740 *size = tdep->arm_breakpoint_size;
7741 return tdep->arm_breakpoint;
7742 case ARM_BP_KIND_THUMB:
7743 *size = tdep->thumb_breakpoint_size;
7744 return tdep->thumb_breakpoint;
7745 case ARM_BP_KIND_THUMB2:
7746 *size = tdep->thumb2_breakpoint_size;
7747 return tdep->thumb2_breakpoint;
7748 default:
7749 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7750 }
7751 }
7752
7753 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7754
7755 static int
7756 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7757 struct regcache *regcache,
7758 CORE_ADDR *pcptr)
7759 {
7760 gdb_byte buf[4];
7761
7762 /* Check the memory pointed by PC is readable. */
7763 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7764 {
7765 struct arm_get_next_pcs next_pcs_ctx;
7766
7767 arm_get_next_pcs_ctor (&next_pcs_ctx,
7768 &arm_get_next_pcs_ops,
7769 gdbarch_byte_order (gdbarch),
7770 gdbarch_byte_order_for_code (gdbarch),
7771 0,
7772 regcache);
7773
7774 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7775
7776 /* If MEMADDR is the next instruction of current pc, do the
7777 software single step computation, and get the thumb mode by
7778 the destination address. */
7779 for (CORE_ADDR pc : next_pcs)
7780 {
7781 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7782 {
7783 if (IS_THUMB_ADDR (pc))
7784 {
7785 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7786 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7787 }
7788 else
7789 return ARM_BP_KIND_ARM;
7790 }
7791 }
7792 }
7793
7794 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7795 }
7796
7797 /* Extract from an array REGBUF containing the (raw) register state a
7798 function return value of type TYPE, and copy that, in virtual
7799 format, into VALBUF. */
7800
7801 static void
7802 arm_extract_return_value (struct type *type, struct regcache *regs,
7803 gdb_byte *valbuf)
7804 {
7805 struct gdbarch *gdbarch = regs->arch ();
7806 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7807
7808 if (TYPE_CODE_FLT == TYPE_CODE (type))
7809 {
7810 switch (gdbarch_tdep (gdbarch)->fp_model)
7811 {
7812 case ARM_FLOAT_FPA:
7813 {
7814 /* The value is in register F0 in internal format. We need to
7815 extract the raw value and then convert it to the desired
7816 internal type. */
7817 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7818
7819 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7820 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7821 valbuf, type);
7822 }
7823 break;
7824
7825 case ARM_FLOAT_SOFT_FPA:
7826 case ARM_FLOAT_SOFT_VFP:
7827 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7828 not using the VFP ABI code. */
7829 case ARM_FLOAT_VFP:
7830 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7831 if (TYPE_LENGTH (type) > 4)
7832 regs->cooked_read (ARM_A1_REGNUM + 1,
7833 valbuf + ARM_INT_REGISTER_SIZE);
7834 break;
7835
7836 default:
7837 internal_error (__FILE__, __LINE__,
7838 _("arm_extract_return_value: "
7839 "Floating point model not supported"));
7840 break;
7841 }
7842 }
7843 else if (TYPE_CODE (type) == TYPE_CODE_INT
7844 || TYPE_CODE (type) == TYPE_CODE_CHAR
7845 || TYPE_CODE (type) == TYPE_CODE_BOOL
7846 || TYPE_CODE (type) == TYPE_CODE_PTR
7847 || TYPE_IS_REFERENCE (type)
7848 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7849 {
7850 /* If the type is a plain integer, then the access is
7851 straight-forward. Otherwise we have to play around a bit
7852 more. */
7853 int len = TYPE_LENGTH (type);
7854 int regno = ARM_A1_REGNUM;
7855 ULONGEST tmp;
7856
7857 while (len > 0)
7858 {
7859 /* By using store_unsigned_integer we avoid having to do
7860 anything special for small big-endian values. */
7861 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7862 store_unsigned_integer (valbuf,
7863 (len > ARM_INT_REGISTER_SIZE
7864 ? ARM_INT_REGISTER_SIZE : len),
7865 byte_order, tmp);
7866 len -= ARM_INT_REGISTER_SIZE;
7867 valbuf += ARM_INT_REGISTER_SIZE;
7868 }
7869 }
7870 else
7871 {
7872 /* For a structure or union the behaviour is as if the value had
7873 been stored to word-aligned memory and then loaded into
7874 registers with 32-bit load instruction(s). */
7875 int len = TYPE_LENGTH (type);
7876 int regno = ARM_A1_REGNUM;
7877 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
7878
7879 while (len > 0)
7880 {
7881 regs->cooked_read (regno++, tmpbuf);
7882 memcpy (valbuf, tmpbuf,
7883 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
7884 len -= ARM_INT_REGISTER_SIZE;
7885 valbuf += ARM_INT_REGISTER_SIZE;
7886 }
7887 }
7888 }
7889
7890
7891 /* Will a function return an aggregate type in memory or in a
7892 register? Return 0 if an aggregate type can be returned in a
7893 register, 1 if it must be returned in memory. */
7894
7895 static int
7896 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7897 {
7898 enum type_code code;
7899
7900 type = check_typedef (type);
7901
7902 /* Simple, non-aggregate types (ie not including vectors and
7903 complex) are always returned in a register (or registers). */
7904 code = TYPE_CODE (type);
7905 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7906 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7907 return 0;
7908
7909 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7910 {
7911 /* Vector values should be returned using ARM registers if they
7912 are not over 16 bytes. */
7913 return (TYPE_LENGTH (type) > 16);
7914 }
7915
7916 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7917 {
7918 /* The AAPCS says all aggregates not larger than a word are returned
7919 in a register. */
7920 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
7921 return 0;
7922
7923 return 1;
7924 }
7925 else
7926 {
7927 int nRc;
7928
7929 /* All aggregate types that won't fit in a register must be returned
7930 in memory. */
7931 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
7932 return 1;
7933
7934 /* In the ARM ABI, "integer" like aggregate types are returned in
7935 registers. For an aggregate type to be integer like, its size
7936 must be less than or equal to ARM_INT_REGISTER_SIZE and the
7937 offset of each addressable subfield must be zero. Note that bit
7938 fields are not addressable, and all addressable subfields of
7939 unions always start at offset zero.
7940
7941 This function is based on the behaviour of GCC 2.95.1.
7942 See: gcc/arm.c: arm_return_in_memory() for details.
7943
7944 Note: All versions of GCC before GCC 2.95.2 do not set up the
7945 parameters correctly for a function returning the following
7946 structure: struct { float f;}; This should be returned in memory,
7947 not a register. Richard Earnshaw sent me a patch, but I do not
7948 know of any way to detect if a function like the above has been
7949 compiled with the correct calling convention. */
7950
7951 /* Assume all other aggregate types can be returned in a register.
7952 Run a check for structures, unions and arrays. */
7953 nRc = 0;
7954
7955 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7956 {
7957 int i;
7958 /* Need to check if this struct/union is "integer" like. For
7959 this to be true, its size must be less than or equal to
7960 ARM_INT_REGISTER_SIZE and the offset of each addressable
7961 subfield must be zero. Note that bit fields are not
7962 addressable, and unions always start at offset zero. If any
7963 of the subfields is a floating point type, the struct/union
7964 cannot be an integer type. */
7965
7966 /* For each field in the object, check:
7967 1) Is it FP? --> yes, nRc = 1;
7968 2) Is it addressable (bitpos != 0) and
7969 not packed (bitsize == 0)?
7970 --> yes, nRc = 1
7971 */
7972
7973 for (i = 0; i < TYPE_NFIELDS (type); i++)
7974 {
7975 enum type_code field_type_code;
7976
7977 field_type_code
7978 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7979 i)));
7980
7981 /* Is it a floating point type field? */
7982 if (field_type_code == TYPE_CODE_FLT)
7983 {
7984 nRc = 1;
7985 break;
7986 }
7987
7988 /* If bitpos != 0, then we have to care about it. */
7989 if (TYPE_FIELD_BITPOS (type, i) != 0)
7990 {
7991 /* Bitfields are not addressable. If the field bitsize is
7992 zero, then the field is not packed. Hence it cannot be
7993 a bitfield or any other packed type. */
7994 if (TYPE_FIELD_BITSIZE (type, i) == 0)
7995 {
7996 nRc = 1;
7997 break;
7998 }
7999 }
8000 }
8001 }
8002
8003 return nRc;
8004 }
8005 }
8006
8007 /* Write into appropriate registers a function return value of type
8008 TYPE, given in virtual format. */
8009
8010 static void
8011 arm_store_return_value (struct type *type, struct regcache *regs,
8012 const gdb_byte *valbuf)
8013 {
8014 struct gdbarch *gdbarch = regs->arch ();
8015 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8016
8017 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8018 {
8019 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8020
8021 switch (gdbarch_tdep (gdbarch)->fp_model)
8022 {
8023 case ARM_FLOAT_FPA:
8024
8025 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8026 regs->cooked_write (ARM_F0_REGNUM, buf);
8027 break;
8028
8029 case ARM_FLOAT_SOFT_FPA:
8030 case ARM_FLOAT_SOFT_VFP:
8031 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8032 not using the VFP ABI code. */
8033 case ARM_FLOAT_VFP:
8034 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8035 if (TYPE_LENGTH (type) > 4)
8036 regs->cooked_write (ARM_A1_REGNUM + 1,
8037 valbuf + ARM_INT_REGISTER_SIZE);
8038 break;
8039
8040 default:
8041 internal_error (__FILE__, __LINE__,
8042 _("arm_store_return_value: Floating "
8043 "point model not supported"));
8044 break;
8045 }
8046 }
8047 else if (TYPE_CODE (type) == TYPE_CODE_INT
8048 || TYPE_CODE (type) == TYPE_CODE_CHAR
8049 || TYPE_CODE (type) == TYPE_CODE_BOOL
8050 || TYPE_CODE (type) == TYPE_CODE_PTR
8051 || TYPE_IS_REFERENCE (type)
8052 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8053 {
8054 if (TYPE_LENGTH (type) <= 4)
8055 {
8056 /* Values of one word or less are zero/sign-extended and
8057 returned in r0. */
8058 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8059 LONGEST val = unpack_long (type, valbuf);
8060
8061 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8062 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8063 }
8064 else
8065 {
8066 /* Integral values greater than one word are stored in consecutive
8067 registers starting with r0. This will always be a multiple of
8068 the regiser size. */
8069 int len = TYPE_LENGTH (type);
8070 int regno = ARM_A1_REGNUM;
8071
8072 while (len > 0)
8073 {
8074 regs->cooked_write (regno++, valbuf);
8075 len -= ARM_INT_REGISTER_SIZE;
8076 valbuf += ARM_INT_REGISTER_SIZE;
8077 }
8078 }
8079 }
8080 else
8081 {
8082 /* For a structure or union the behaviour is as if the value had
8083 been stored to word-aligned memory and then loaded into
8084 registers with 32-bit load instruction(s). */
8085 int len = TYPE_LENGTH (type);
8086 int regno = ARM_A1_REGNUM;
8087 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8088
8089 while (len > 0)
8090 {
8091 memcpy (tmpbuf, valbuf,
8092 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8093 regs->cooked_write (regno++, tmpbuf);
8094 len -= ARM_INT_REGISTER_SIZE;
8095 valbuf += ARM_INT_REGISTER_SIZE;
8096 }
8097 }
8098 }
8099
8100
8101 /* Handle function return values. */
8102
8103 static enum return_value_convention
8104 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8105 struct type *valtype, struct regcache *regcache,
8106 gdb_byte *readbuf, const gdb_byte *writebuf)
8107 {
8108 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8109 struct type *func_type = function ? value_type (function) : NULL;
8110 enum arm_vfp_cprc_base_type vfp_base_type;
8111 int vfp_base_count;
8112
8113 if (arm_vfp_abi_for_function (gdbarch, func_type)
8114 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8115 {
8116 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8117 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8118 int i;
8119 for (i = 0; i < vfp_base_count; i++)
8120 {
8121 if (reg_char == 'q')
8122 {
8123 if (writebuf)
8124 arm_neon_quad_write (gdbarch, regcache, i,
8125 writebuf + i * unit_length);
8126
8127 if (readbuf)
8128 arm_neon_quad_read (gdbarch, regcache, i,
8129 readbuf + i * unit_length);
8130 }
8131 else
8132 {
8133 char name_buf[4];
8134 int regnum;
8135
8136 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8137 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8138 strlen (name_buf));
8139 if (writebuf)
8140 regcache->cooked_write (regnum, writebuf + i * unit_length);
8141 if (readbuf)
8142 regcache->cooked_read (regnum, readbuf + i * unit_length);
8143 }
8144 }
8145 return RETURN_VALUE_REGISTER_CONVENTION;
8146 }
8147
8148 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8149 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8150 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8151 {
8152 if (tdep->struct_return == pcc_struct_return
8153 || arm_return_in_memory (gdbarch, valtype))
8154 return RETURN_VALUE_STRUCT_CONVENTION;
8155 }
8156 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8157 {
8158 if (arm_return_in_memory (gdbarch, valtype))
8159 return RETURN_VALUE_STRUCT_CONVENTION;
8160 }
8161
8162 if (writebuf)
8163 arm_store_return_value (valtype, regcache, writebuf);
8164
8165 if (readbuf)
8166 arm_extract_return_value (valtype, regcache, readbuf);
8167
8168 return RETURN_VALUE_REGISTER_CONVENTION;
8169 }
8170
8171
8172 static int
8173 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8174 {
8175 struct gdbarch *gdbarch = get_frame_arch (frame);
8176 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8177 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8178 CORE_ADDR jb_addr;
8179 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8180
8181 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8182
8183 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8184 ARM_INT_REGISTER_SIZE))
8185 return 0;
8186
8187 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8188 return 1;
8189 }
8190 /* A call to cmse secure entry function "foo" at "a" is modified by
8191 GNU ld as "b".
8192 a) bl xxxx <foo>
8193
8194 <foo>
8195 xxxx:
8196
8197 b) bl yyyy <__acle_se_foo>
8198
8199 section .gnu.sgstubs:
8200 <foo>
8201 yyyy: sg // secure gateway
8202 b.w xxxx <__acle_se_foo> // original_branch_dest
8203
8204 <__acle_se_foo>
8205 xxxx:
8206
8207 When the control at "b", the pc contains "yyyy" (sg address) which is a
8208 trampoline and does not exist in source code. This function returns the
8209 target pc "xxxx". For more details please refer to section 5.4
8210 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8211 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8212 document on www.developer.arm.com. */
8213
8214 static CORE_ADDR
8215 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8216 {
8217 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8218 char *target_name = (char *) alloca (target_len);
8219 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8220
8221 struct bound_minimal_symbol minsym
8222 = lookup_minimal_symbol (target_name, NULL, objfile);
8223
8224 if (minsym.minsym != nullptr)
8225 return BMSYMBOL_VALUE_ADDRESS (minsym);
8226
8227 return 0;
8228 }
8229
8230 /* Return true when SEC points to ".gnu.sgstubs" section. */
8231
8232 static bool
8233 arm_is_sgstubs_section (struct obj_section *sec)
8234 {
8235 return (sec != nullptr
8236 && sec->the_bfd_section != nullptr
8237 && sec->the_bfd_section->name != nullptr
8238 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8239 }
8240
8241 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8242 return the target PC. Otherwise return 0. */
8243
8244 CORE_ADDR
8245 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8246 {
8247 const char *name;
8248 int namelen;
8249 CORE_ADDR start_addr;
8250
8251 /* Find the starting address and name of the function containing the PC. */
8252 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8253 {
8254 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8255 check here. */
8256 start_addr = arm_skip_bx_reg (frame, pc);
8257 if (start_addr != 0)
8258 return start_addr;
8259
8260 return 0;
8261 }
8262
8263 /* If PC is in a Thumb call or return stub, return the address of the
8264 target PC, which is in a register. The thunk functions are called
8265 _call_via_xx, where x is the register name. The possible names
8266 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8267 functions, named __ARM_call_via_r[0-7]. */
8268 if (startswith (name, "_call_via_")
8269 || startswith (name, "__ARM_call_via_"))
8270 {
8271 /* Use the name suffix to determine which register contains the
8272 target PC. */
8273 static const char *table[15] =
8274 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8275 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8276 };
8277 int regno;
8278 int offset = strlen (name) - 2;
8279
8280 for (regno = 0; regno <= 14; regno++)
8281 if (strcmp (&name[offset], table[regno]) == 0)
8282 return get_frame_register_unsigned (frame, regno);
8283 }
8284
8285 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8286 non-interworking calls to foo. We could decode the stubs
8287 to find the target but it's easier to use the symbol table. */
8288 namelen = strlen (name);
8289 if (name[0] == '_' && name[1] == '_'
8290 && ((namelen > 2 + strlen ("_from_thumb")
8291 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8292 || (namelen > 2 + strlen ("_from_arm")
8293 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8294 {
8295 char *target_name;
8296 int target_len = namelen - 2;
8297 struct bound_minimal_symbol minsym;
8298 struct objfile *objfile;
8299 struct obj_section *sec;
8300
8301 if (name[namelen - 1] == 'b')
8302 target_len -= strlen ("_from_thumb");
8303 else
8304 target_len -= strlen ("_from_arm");
8305
8306 target_name = (char *) alloca (target_len + 1);
8307 memcpy (target_name, name + 2, target_len);
8308 target_name[target_len] = '\0';
8309
8310 sec = find_pc_section (pc);
8311 objfile = (sec == NULL) ? NULL : sec->objfile;
8312 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8313 if (minsym.minsym != NULL)
8314 return BMSYMBOL_VALUE_ADDRESS (minsym);
8315 else
8316 return 0;
8317 }
8318
8319 struct obj_section *section = find_pc_section (pc);
8320
8321 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8322 if (arm_is_sgstubs_section (section))
8323 return arm_skip_cmse_entry (pc, name, section->objfile);
8324
8325 return 0; /* not a stub */
8326 }
8327
8328 static void
8329 arm_update_current_architecture (void)
8330 {
8331 struct gdbarch_info info;
8332
8333 /* If the current architecture is not ARM, we have nothing to do. */
8334 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8335 return;
8336
8337 /* Update the architecture. */
8338 gdbarch_info_init (&info);
8339
8340 if (!gdbarch_update_p (info))
8341 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8342 }
8343
8344 static void
8345 set_fp_model_sfunc (const char *args, int from_tty,
8346 struct cmd_list_element *c)
8347 {
8348 int fp_model;
8349
8350 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8351 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8352 {
8353 arm_fp_model = (enum arm_float_model) fp_model;
8354 break;
8355 }
8356
8357 if (fp_model == ARM_FLOAT_LAST)
8358 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8359 current_fp_model);
8360
8361 arm_update_current_architecture ();
8362 }
8363
8364 static void
8365 show_fp_model (struct ui_file *file, int from_tty,
8366 struct cmd_list_element *c, const char *value)
8367 {
8368 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8369
8370 if (arm_fp_model == ARM_FLOAT_AUTO
8371 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8372 fprintf_filtered (file, _("\
8373 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8374 fp_model_strings[tdep->fp_model]);
8375 else
8376 fprintf_filtered (file, _("\
8377 The current ARM floating point model is \"%s\".\n"),
8378 fp_model_strings[arm_fp_model]);
8379 }
8380
8381 static void
8382 arm_set_abi (const char *args, int from_tty,
8383 struct cmd_list_element *c)
8384 {
8385 int arm_abi;
8386
8387 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8388 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8389 {
8390 arm_abi_global = (enum arm_abi_kind) arm_abi;
8391 break;
8392 }
8393
8394 if (arm_abi == ARM_ABI_LAST)
8395 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8396 arm_abi_string);
8397
8398 arm_update_current_architecture ();
8399 }
8400
8401 static void
8402 arm_show_abi (struct ui_file *file, int from_tty,
8403 struct cmd_list_element *c, const char *value)
8404 {
8405 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8406
8407 if (arm_abi_global == ARM_ABI_AUTO
8408 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8409 fprintf_filtered (file, _("\
8410 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8411 arm_abi_strings[tdep->arm_abi]);
8412 else
8413 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8414 arm_abi_string);
8415 }
8416
8417 static void
8418 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8419 struct cmd_list_element *c, const char *value)
8420 {
8421 fprintf_filtered (file,
8422 _("The current execution mode assumed "
8423 "(when symbols are unavailable) is \"%s\".\n"),
8424 arm_fallback_mode_string);
8425 }
8426
8427 static void
8428 arm_show_force_mode (struct ui_file *file, int from_tty,
8429 struct cmd_list_element *c, const char *value)
8430 {
8431 fprintf_filtered (file,
8432 _("The current execution mode assumed "
8433 "(even when symbols are available) is \"%s\".\n"),
8434 arm_force_mode_string);
8435 }
8436
8437 /* If the user changes the register disassembly style used for info
8438 register and other commands, we have to also switch the style used
8439 in opcodes for disassembly output. This function is run in the "set
8440 arm disassembly" command, and does that. */
8441
8442 static void
8443 set_disassembly_style_sfunc (const char *args, int from_tty,
8444 struct cmd_list_element *c)
8445 {
8446 /* Convert the short style name into the long style name (eg, reg-names-*)
8447 before calling the generic set_disassembler_options() function. */
8448 std::string long_name = std::string ("reg-names-") + disassembly_style;
8449 set_disassembler_options (&long_name[0]);
8450 }
8451
8452 static void
8453 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8454 struct cmd_list_element *c, const char *value)
8455 {
8456 struct gdbarch *gdbarch = get_current_arch ();
8457 char *options = get_disassembler_options (gdbarch);
8458 const char *style = "";
8459 int len = 0;
8460 const char *opt;
8461
8462 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8463 if (CONST_STRNEQ (opt, "reg-names-"))
8464 {
8465 style = &opt[strlen ("reg-names-")];
8466 len = strcspn (style, ",");
8467 }
8468
8469 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8470 }
8471 \f
8472 /* Return the ARM register name corresponding to register I. */
8473 static const char *
8474 arm_register_name (struct gdbarch *gdbarch, int i)
8475 {
8476 const int num_regs = gdbarch_num_regs (gdbarch);
8477
8478 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8479 && i >= num_regs && i < num_regs + 32)
8480 {
8481 static const char *const vfp_pseudo_names[] = {
8482 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8483 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8484 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8485 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8486 };
8487
8488 return vfp_pseudo_names[i - num_regs];
8489 }
8490
8491 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8492 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8493 {
8494 static const char *const neon_pseudo_names[] = {
8495 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8496 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8497 };
8498
8499 return neon_pseudo_names[i - num_regs - 32];
8500 }
8501
8502 if (i >= ARRAY_SIZE (arm_register_names))
8503 /* These registers are only supported on targets which supply
8504 an XML description. */
8505 return "";
8506
8507 return arm_register_names[i];
8508 }
8509
8510 /* Test whether the coff symbol specific value corresponds to a Thumb
8511 function. */
8512
8513 static int
8514 coff_sym_is_thumb (int val)
8515 {
8516 return (val == C_THUMBEXT
8517 || val == C_THUMBSTAT
8518 || val == C_THUMBEXTFUNC
8519 || val == C_THUMBSTATFUNC
8520 || val == C_THUMBLABEL);
8521 }
8522
8523 /* arm_coff_make_msymbol_special()
8524 arm_elf_make_msymbol_special()
8525
8526 These functions test whether the COFF or ELF symbol corresponds to
8527 an address in thumb code, and set a "special" bit in a minimal
8528 symbol to indicate that it does. */
8529
8530 static void
8531 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8532 {
8533 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8534
8535 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8536 == ST_BRANCH_TO_THUMB)
8537 MSYMBOL_SET_SPECIAL (msym);
8538 }
8539
8540 static void
8541 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8542 {
8543 if (coff_sym_is_thumb (val))
8544 MSYMBOL_SET_SPECIAL (msym);
8545 }
8546
8547 static void
8548 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8549 asymbol *sym)
8550 {
8551 const char *name = bfd_asymbol_name (sym);
8552 struct arm_per_bfd *data;
8553 struct arm_mapping_symbol new_map_sym;
8554
8555 gdb_assert (name[0] == '$');
8556 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8557 return;
8558
8559 data = arm_bfd_data_key.get (objfile->obfd);
8560 if (data == NULL)
8561 data = arm_bfd_data_key.emplace (objfile->obfd,
8562 objfile->obfd->section_count);
8563 arm_mapping_symbol_vec &map
8564 = data->section_maps[bfd_asymbol_section (sym)->index];
8565
8566 new_map_sym.value = sym->value;
8567 new_map_sym.type = name[1];
8568
8569 /* Insert at the end, the vector will be sorted on first use. */
8570 map.push_back (new_map_sym);
8571 }
8572
8573 static void
8574 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8575 {
8576 struct gdbarch *gdbarch = regcache->arch ();
8577 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8578
8579 /* If necessary, set the T bit. */
8580 if (arm_apcs_32)
8581 {
8582 ULONGEST val, t_bit;
8583 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8584 t_bit = arm_psr_thumb_bit (gdbarch);
8585 if (arm_pc_is_thumb (gdbarch, pc))
8586 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8587 val | t_bit);
8588 else
8589 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8590 val & ~t_bit);
8591 }
8592 }
8593
8594 /* Read the contents of a NEON quad register, by reading from two
8595 double registers. This is used to implement the quad pseudo
8596 registers, and for argument passing in case the quad registers are
8597 missing; vectors are passed in quad registers when using the VFP
8598 ABI, even if a NEON unit is not present. REGNUM is the index of
8599 the quad register, in [0, 15]. */
8600
8601 static enum register_status
8602 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8603 int regnum, gdb_byte *buf)
8604 {
8605 char name_buf[4];
8606 gdb_byte reg_buf[8];
8607 int offset, double_regnum;
8608 enum register_status status;
8609
8610 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8611 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8612 strlen (name_buf));
8613
8614 /* d0 is always the least significant half of q0. */
8615 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8616 offset = 8;
8617 else
8618 offset = 0;
8619
8620 status = regcache->raw_read (double_regnum, reg_buf);
8621 if (status != REG_VALID)
8622 return status;
8623 memcpy (buf + offset, reg_buf, 8);
8624
8625 offset = 8 - offset;
8626 status = regcache->raw_read (double_regnum + 1, reg_buf);
8627 if (status != REG_VALID)
8628 return status;
8629 memcpy (buf + offset, reg_buf, 8);
8630
8631 return REG_VALID;
8632 }
8633
8634 static enum register_status
8635 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8636 int regnum, gdb_byte *buf)
8637 {
8638 const int num_regs = gdbarch_num_regs (gdbarch);
8639 char name_buf[4];
8640 gdb_byte reg_buf[8];
8641 int offset, double_regnum;
8642
8643 gdb_assert (regnum >= num_regs);
8644 regnum -= num_regs;
8645
8646 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8647 /* Quad-precision register. */
8648 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8649 else
8650 {
8651 enum register_status status;
8652
8653 /* Single-precision register. */
8654 gdb_assert (regnum < 32);
8655
8656 /* s0 is always the least significant half of d0. */
8657 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8658 offset = (regnum & 1) ? 0 : 4;
8659 else
8660 offset = (regnum & 1) ? 4 : 0;
8661
8662 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8663 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8664 strlen (name_buf));
8665
8666 status = regcache->raw_read (double_regnum, reg_buf);
8667 if (status == REG_VALID)
8668 memcpy (buf, reg_buf + offset, 4);
8669 return status;
8670 }
8671 }
8672
8673 /* Store the contents of BUF to a NEON quad register, by writing to
8674 two double registers. This is used to implement the quad pseudo
8675 registers, and for argument passing in case the quad registers are
8676 missing; vectors are passed in quad registers when using the VFP
8677 ABI, even if a NEON unit is not present. REGNUM is the index
8678 of the quad register, in [0, 15]. */
8679
8680 static void
8681 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8682 int regnum, const gdb_byte *buf)
8683 {
8684 char name_buf[4];
8685 int offset, double_regnum;
8686
8687 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8688 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8689 strlen (name_buf));
8690
8691 /* d0 is always the least significant half of q0. */
8692 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8693 offset = 8;
8694 else
8695 offset = 0;
8696
8697 regcache->raw_write (double_regnum, buf + offset);
8698 offset = 8 - offset;
8699 regcache->raw_write (double_regnum + 1, buf + offset);
8700 }
8701
8702 static void
8703 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8704 int regnum, const gdb_byte *buf)
8705 {
8706 const int num_regs = gdbarch_num_regs (gdbarch);
8707 char name_buf[4];
8708 gdb_byte reg_buf[8];
8709 int offset, double_regnum;
8710
8711 gdb_assert (regnum >= num_regs);
8712 regnum -= num_regs;
8713
8714 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8715 /* Quad-precision register. */
8716 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8717 else
8718 {
8719 /* Single-precision register. */
8720 gdb_assert (regnum < 32);
8721
8722 /* s0 is always the least significant half of d0. */
8723 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8724 offset = (regnum & 1) ? 0 : 4;
8725 else
8726 offset = (regnum & 1) ? 4 : 0;
8727
8728 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8729 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8730 strlen (name_buf));
8731
8732 regcache->raw_read (double_regnum, reg_buf);
8733 memcpy (reg_buf + offset, buf, 4);
8734 regcache->raw_write (double_regnum, reg_buf);
8735 }
8736 }
8737
8738 static struct value *
8739 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8740 {
8741 const int *reg_p = (const int *) baton;
8742 return value_of_register (*reg_p, frame);
8743 }
8744 \f
8745 static enum gdb_osabi
8746 arm_elf_osabi_sniffer (bfd *abfd)
8747 {
8748 unsigned int elfosabi;
8749 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8750
8751 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8752
8753 if (elfosabi == ELFOSABI_ARM)
8754 /* GNU tools use this value. Check note sections in this case,
8755 as well. */
8756 bfd_map_over_sections (abfd,
8757 generic_elf_osabi_sniff_abi_tag_sections,
8758 &osabi);
8759
8760 /* Anything else will be handled by the generic ELF sniffer. */
8761 return osabi;
8762 }
8763
8764 static int
8765 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8766 struct reggroup *group)
8767 {
8768 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8769 this, FPS register belongs to save_regroup, restore_reggroup, and
8770 all_reggroup, of course. */
8771 if (regnum == ARM_FPS_REGNUM)
8772 return (group == float_reggroup
8773 || group == save_reggroup
8774 || group == restore_reggroup
8775 || group == all_reggroup);
8776 else
8777 return default_register_reggroup_p (gdbarch, regnum, group);
8778 }
8779
8780 /* For backward-compatibility we allow two 'g' packet lengths with
8781 the remote protocol depending on whether FPA registers are
8782 supplied. M-profile targets do not have FPA registers, but some
8783 stubs already exist in the wild which use a 'g' packet which
8784 supplies them albeit with dummy values. The packet format which
8785 includes FPA registers should be considered deprecated for
8786 M-profile targets. */
8787
8788 static void
8789 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8790 {
8791 if (gdbarch_tdep (gdbarch)->is_m)
8792 {
8793 const target_desc *tdesc;
8794
8795 /* If we know from the executable this is an M-profile target,
8796 cater for remote targets whose register set layout is the
8797 same as the FPA layout. */
8798 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8799 register_remote_g_packet_guess (gdbarch,
8800 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
8801 tdesc);
8802
8803 /* The regular M-profile layout. */
8804 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
8805 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
8806 tdesc);
8807
8808 /* M-profile plus M4F VFP. */
8809 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
8810 register_remote_g_packet_guess (gdbarch,
8811 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
8812 tdesc);
8813 }
8814
8815 /* Otherwise we don't have a useful guess. */
8816 }
8817
8818 /* Implement the code_of_frame_writable gdbarch method. */
8819
8820 static int
8821 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8822 {
8823 if (gdbarch_tdep (gdbarch)->is_m
8824 && get_frame_type (frame) == SIGTRAMP_FRAME)
8825 {
8826 /* M-profile exception frames return to some magic PCs, where
8827 isn't writable at all. */
8828 return 0;
8829 }
8830 else
8831 return 1;
8832 }
8833
8834 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8835 to be postfixed by a version (eg armv7hl). */
8836
8837 static const char *
8838 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8839 {
8840 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8841 return "arm(v[^- ]*)?";
8842 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8843 }
8844
8845 /* Initialize the current architecture based on INFO. If possible,
8846 re-use an architecture from ARCHES, which is a list of
8847 architectures already created during this debugging session.
8848
8849 Called e.g. at program startup, when reading a core file, and when
8850 reading a binary file. */
8851
8852 static struct gdbarch *
8853 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8854 {
8855 struct gdbarch_tdep *tdep;
8856 struct gdbarch *gdbarch;
8857 struct gdbarch_list *best_arch;
8858 enum arm_abi_kind arm_abi = arm_abi_global;
8859 enum arm_float_model fp_model = arm_fp_model;
8860 struct tdesc_arch_data *tdesc_data = NULL;
8861 int i;
8862 bool is_m = false;
8863 int vfp_register_count = 0;
8864 bool have_vfp_pseudos = false, have_neon_pseudos = false;
8865 bool have_wmmx_registers = false;
8866 bool have_neon = false;
8867 bool have_fpa_registers = true;
8868 const struct target_desc *tdesc = info.target_desc;
8869
8870 /* If we have an object to base this architecture on, try to determine
8871 its ABI. */
8872
8873 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8874 {
8875 int ei_osabi, e_flags;
8876
8877 switch (bfd_get_flavour (info.abfd))
8878 {
8879 case bfd_target_coff_flavour:
8880 /* Assume it's an old APCS-style ABI. */
8881 /* XXX WinCE? */
8882 arm_abi = ARM_ABI_APCS;
8883 break;
8884
8885 case bfd_target_elf_flavour:
8886 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8887 e_flags = elf_elfheader (info.abfd)->e_flags;
8888
8889 if (ei_osabi == ELFOSABI_ARM)
8890 {
8891 /* GNU tools used to use this value, but do not for EABI
8892 objects. There's nowhere to tag an EABI version
8893 anyway, so assume APCS. */
8894 arm_abi = ARM_ABI_APCS;
8895 }
8896 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8897 {
8898 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8899
8900 switch (eabi_ver)
8901 {
8902 case EF_ARM_EABI_UNKNOWN:
8903 /* Assume GNU tools. */
8904 arm_abi = ARM_ABI_APCS;
8905 break;
8906
8907 case EF_ARM_EABI_VER4:
8908 case EF_ARM_EABI_VER5:
8909 arm_abi = ARM_ABI_AAPCS;
8910 /* EABI binaries default to VFP float ordering.
8911 They may also contain build attributes that can
8912 be used to identify if the VFP argument-passing
8913 ABI is in use. */
8914 if (fp_model == ARM_FLOAT_AUTO)
8915 {
8916 #ifdef HAVE_ELF
8917 switch (bfd_elf_get_obj_attr_int (info.abfd,
8918 OBJ_ATTR_PROC,
8919 Tag_ABI_VFP_args))
8920 {
8921 case AEABI_VFP_args_base:
8922 /* "The user intended FP parameter/result
8923 passing to conform to AAPCS, base
8924 variant". */
8925 fp_model = ARM_FLOAT_SOFT_VFP;
8926 break;
8927 case AEABI_VFP_args_vfp:
8928 /* "The user intended FP parameter/result
8929 passing to conform to AAPCS, VFP
8930 variant". */
8931 fp_model = ARM_FLOAT_VFP;
8932 break;
8933 case AEABI_VFP_args_toolchain:
8934 /* "The user intended FP parameter/result
8935 passing to conform to tool chain-specific
8936 conventions" - we don't know any such
8937 conventions, so leave it as "auto". */
8938 break;
8939 case AEABI_VFP_args_compatible:
8940 /* "Code is compatible with both the base
8941 and VFP variants; the user did not permit
8942 non-variadic functions to pass FP
8943 parameters/results" - leave it as
8944 "auto". */
8945 break;
8946 default:
8947 /* Attribute value not mentioned in the
8948 November 2012 ABI, so leave it as
8949 "auto". */
8950 break;
8951 }
8952 #else
8953 fp_model = ARM_FLOAT_SOFT_VFP;
8954 #endif
8955 }
8956 break;
8957
8958 default:
8959 /* Leave it as "auto". */
8960 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8961 break;
8962 }
8963
8964 #ifdef HAVE_ELF
8965 /* Detect M-profile programs. This only works if the
8966 executable file includes build attributes; GCC does
8967 copy them to the executable, but e.g. RealView does
8968 not. */
8969 int attr_arch
8970 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8971 Tag_CPU_arch);
8972 int attr_profile
8973 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8974 Tag_CPU_arch_profile);
8975
8976 /* GCC specifies the profile for v6-M; RealView only
8977 specifies the profile for architectures starting with
8978 V7 (as opposed to architectures with a tag
8979 numerically greater than TAG_CPU_ARCH_V7). */
8980 if (!tdesc_has_registers (tdesc)
8981 && (attr_arch == TAG_CPU_ARCH_V6_M
8982 || attr_arch == TAG_CPU_ARCH_V6S_M
8983 || attr_profile == 'M'))
8984 is_m = true;
8985 #endif
8986 }
8987
8988 if (fp_model == ARM_FLOAT_AUTO)
8989 {
8990 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
8991 {
8992 case 0:
8993 /* Leave it as "auto". Strictly speaking this case
8994 means FPA, but almost nobody uses that now, and
8995 many toolchains fail to set the appropriate bits
8996 for the floating-point model they use. */
8997 break;
8998 case EF_ARM_SOFT_FLOAT:
8999 fp_model = ARM_FLOAT_SOFT_FPA;
9000 break;
9001 case EF_ARM_VFP_FLOAT:
9002 fp_model = ARM_FLOAT_VFP;
9003 break;
9004 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9005 fp_model = ARM_FLOAT_SOFT_VFP;
9006 break;
9007 }
9008 }
9009
9010 if (e_flags & EF_ARM_BE8)
9011 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9012
9013 break;
9014
9015 default:
9016 /* Leave it as "auto". */
9017 break;
9018 }
9019 }
9020
9021 /* Check any target description for validity. */
9022 if (tdesc_has_registers (tdesc))
9023 {
9024 /* For most registers we require GDB's default names; but also allow
9025 the numeric names for sp / lr / pc, as a convenience. */
9026 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9027 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9028 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9029
9030 const struct tdesc_feature *feature;
9031 int valid_p;
9032
9033 feature = tdesc_find_feature (tdesc,
9034 "org.gnu.gdb.arm.core");
9035 if (feature == NULL)
9036 {
9037 feature = tdesc_find_feature (tdesc,
9038 "org.gnu.gdb.arm.m-profile");
9039 if (feature == NULL)
9040 return NULL;
9041 else
9042 is_m = true;
9043 }
9044
9045 tdesc_data = tdesc_data_alloc ();
9046
9047 valid_p = 1;
9048 for (i = 0; i < ARM_SP_REGNUM; i++)
9049 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9050 arm_register_names[i]);
9051 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9052 ARM_SP_REGNUM,
9053 arm_sp_names);
9054 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9055 ARM_LR_REGNUM,
9056 arm_lr_names);
9057 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9058 ARM_PC_REGNUM,
9059 arm_pc_names);
9060 if (is_m)
9061 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9062 ARM_PS_REGNUM, "xpsr");
9063 else
9064 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9065 ARM_PS_REGNUM, "cpsr");
9066
9067 if (!valid_p)
9068 {
9069 tdesc_data_cleanup (tdesc_data);
9070 return NULL;
9071 }
9072
9073 feature = tdesc_find_feature (tdesc,
9074 "org.gnu.gdb.arm.fpa");
9075 if (feature != NULL)
9076 {
9077 valid_p = 1;
9078 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9079 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9080 arm_register_names[i]);
9081 if (!valid_p)
9082 {
9083 tdesc_data_cleanup (tdesc_data);
9084 return NULL;
9085 }
9086 }
9087 else
9088 have_fpa_registers = false;
9089
9090 feature = tdesc_find_feature (tdesc,
9091 "org.gnu.gdb.xscale.iwmmxt");
9092 if (feature != NULL)
9093 {
9094 static const char *const iwmmxt_names[] = {
9095 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9096 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9097 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9098 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9099 };
9100
9101 valid_p = 1;
9102 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9103 valid_p
9104 &= tdesc_numbered_register (feature, tdesc_data, i,
9105 iwmmxt_names[i - ARM_WR0_REGNUM]);
9106
9107 /* Check for the control registers, but do not fail if they
9108 are missing. */
9109 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9110 tdesc_numbered_register (feature, tdesc_data, i,
9111 iwmmxt_names[i - ARM_WR0_REGNUM]);
9112
9113 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9114 valid_p
9115 &= tdesc_numbered_register (feature, tdesc_data, i,
9116 iwmmxt_names[i - ARM_WR0_REGNUM]);
9117
9118 if (!valid_p)
9119 {
9120 tdesc_data_cleanup (tdesc_data);
9121 return NULL;
9122 }
9123
9124 have_wmmx_registers = true;
9125 }
9126
9127 /* If we have a VFP unit, check whether the single precision registers
9128 are present. If not, then we will synthesize them as pseudo
9129 registers. */
9130 feature = tdesc_find_feature (tdesc,
9131 "org.gnu.gdb.arm.vfp");
9132 if (feature != NULL)
9133 {
9134 static const char *const vfp_double_names[] = {
9135 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9136 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9137 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9138 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9139 };
9140
9141 /* Require the double precision registers. There must be either
9142 16 or 32. */
9143 valid_p = 1;
9144 for (i = 0; i < 32; i++)
9145 {
9146 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9147 ARM_D0_REGNUM + i,
9148 vfp_double_names[i]);
9149 if (!valid_p)
9150 break;
9151 }
9152 if (!valid_p && i == 16)
9153 valid_p = 1;
9154
9155 /* Also require FPSCR. */
9156 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9157 ARM_FPSCR_REGNUM, "fpscr");
9158 if (!valid_p)
9159 {
9160 tdesc_data_cleanup (tdesc_data);
9161 return NULL;
9162 }
9163
9164 if (tdesc_unnumbered_register (feature, "s0") == 0)
9165 have_vfp_pseudos = true;
9166
9167 vfp_register_count = i;
9168
9169 /* If we have VFP, also check for NEON. The architecture allows
9170 NEON without VFP (integer vector operations only), but GDB
9171 does not support that. */
9172 feature = tdesc_find_feature (tdesc,
9173 "org.gnu.gdb.arm.neon");
9174 if (feature != NULL)
9175 {
9176 /* NEON requires 32 double-precision registers. */
9177 if (i != 32)
9178 {
9179 tdesc_data_cleanup (tdesc_data);
9180 return NULL;
9181 }
9182
9183 /* If there are quad registers defined by the stub, use
9184 their type; otherwise (normally) provide them with
9185 the default type. */
9186 if (tdesc_unnumbered_register (feature, "q0") == 0)
9187 have_neon_pseudos = true;
9188
9189 have_neon = true;
9190 }
9191 }
9192 }
9193
9194 /* If there is already a candidate, use it. */
9195 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9196 best_arch != NULL;
9197 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9198 {
9199 if (arm_abi != ARM_ABI_AUTO
9200 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9201 continue;
9202
9203 if (fp_model != ARM_FLOAT_AUTO
9204 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9205 continue;
9206
9207 /* There are various other properties in tdep that we do not
9208 need to check here: those derived from a target description,
9209 since gdbarches with a different target description are
9210 automatically disqualified. */
9211
9212 /* Do check is_m, though, since it might come from the binary. */
9213 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9214 continue;
9215
9216 /* Found a match. */
9217 break;
9218 }
9219
9220 if (best_arch != NULL)
9221 {
9222 if (tdesc_data != NULL)
9223 tdesc_data_cleanup (tdesc_data);
9224 return best_arch->gdbarch;
9225 }
9226
9227 tdep = XCNEW (struct gdbarch_tdep);
9228 gdbarch = gdbarch_alloc (&info, tdep);
9229
9230 /* Record additional information about the architecture we are defining.
9231 These are gdbarch discriminators, like the OSABI. */
9232 tdep->arm_abi = arm_abi;
9233 tdep->fp_model = fp_model;
9234 tdep->is_m = is_m;
9235 tdep->have_fpa_registers = have_fpa_registers;
9236 tdep->have_wmmx_registers = have_wmmx_registers;
9237 gdb_assert (vfp_register_count == 0
9238 || vfp_register_count == 16
9239 || vfp_register_count == 32);
9240 tdep->vfp_register_count = vfp_register_count;
9241 tdep->have_vfp_pseudos = have_vfp_pseudos;
9242 tdep->have_neon_pseudos = have_neon_pseudos;
9243 tdep->have_neon = have_neon;
9244
9245 arm_register_g_packet_guesses (gdbarch);
9246
9247 /* Breakpoints. */
9248 switch (info.byte_order_for_code)
9249 {
9250 case BFD_ENDIAN_BIG:
9251 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9252 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9253 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9254 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9255
9256 break;
9257
9258 case BFD_ENDIAN_LITTLE:
9259 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9260 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9261 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9262 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9263
9264 break;
9265
9266 default:
9267 internal_error (__FILE__, __LINE__,
9268 _("arm_gdbarch_init: bad byte order for float format"));
9269 }
9270
9271 /* On ARM targets char defaults to unsigned. */
9272 set_gdbarch_char_signed (gdbarch, 0);
9273
9274 /* wchar_t is unsigned under the AAPCS. */
9275 if (tdep->arm_abi == ARM_ABI_AAPCS)
9276 set_gdbarch_wchar_signed (gdbarch, 0);
9277 else
9278 set_gdbarch_wchar_signed (gdbarch, 1);
9279
9280 /* Compute type alignment. */
9281 set_gdbarch_type_align (gdbarch, arm_type_align);
9282
9283 /* Note: for displaced stepping, this includes the breakpoint, and one word
9284 of additional scratch space. This setting isn't used for anything beside
9285 displaced stepping at present. */
9286 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9287
9288 /* This should be low enough for everything. */
9289 tdep->lowest_pc = 0x20;
9290 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9291
9292 /* The default, for both APCS and AAPCS, is to return small
9293 structures in registers. */
9294 tdep->struct_return = reg_struct_return;
9295
9296 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9297 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9298
9299 if (is_m)
9300 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9301
9302 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9303
9304 frame_base_set_default (gdbarch, &arm_normal_base);
9305
9306 /* Address manipulation. */
9307 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9308
9309 /* Advance PC across function entry code. */
9310 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9311
9312 /* Detect whether PC is at a point where the stack has been destroyed. */
9313 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9314
9315 /* Skip trampolines. */
9316 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9317
9318 /* The stack grows downward. */
9319 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9320
9321 /* Breakpoint manipulation. */
9322 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9323 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9324 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9325 arm_breakpoint_kind_from_current_state);
9326
9327 /* Information about registers, etc. */
9328 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9329 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9330 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9331 set_gdbarch_register_type (gdbarch, arm_register_type);
9332 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9333
9334 /* This "info float" is FPA-specific. Use the generic version if we
9335 do not have FPA. */
9336 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9337 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9338
9339 /* Internal <-> external register number maps. */
9340 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9341 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9342
9343 set_gdbarch_register_name (gdbarch, arm_register_name);
9344
9345 /* Returning results. */
9346 set_gdbarch_return_value (gdbarch, arm_return_value);
9347
9348 /* Disassembly. */
9349 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9350
9351 /* Minsymbol frobbing. */
9352 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9353 set_gdbarch_coff_make_msymbol_special (gdbarch,
9354 arm_coff_make_msymbol_special);
9355 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9356
9357 /* Thumb-2 IT block support. */
9358 set_gdbarch_adjust_breakpoint_address (gdbarch,
9359 arm_adjust_breakpoint_address);
9360
9361 /* Virtual tables. */
9362 set_gdbarch_vbit_in_delta (gdbarch, 1);
9363
9364 /* Hook in the ABI-specific overrides, if they have been registered. */
9365 gdbarch_init_osabi (info, gdbarch);
9366
9367 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9368
9369 /* Add some default predicates. */
9370 if (is_m)
9371 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9372 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9373 dwarf2_append_unwinders (gdbarch);
9374 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9375 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9376 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9377
9378 /* Now we have tuned the configuration, set a few final things,
9379 based on what the OS ABI has told us. */
9380
9381 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9382 binaries are always marked. */
9383 if (tdep->arm_abi == ARM_ABI_AUTO)
9384 tdep->arm_abi = ARM_ABI_APCS;
9385
9386 /* Watchpoints are not steppable. */
9387 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9388
9389 /* We used to default to FPA for generic ARM, but almost nobody
9390 uses that now, and we now provide a way for the user to force
9391 the model. So default to the most useful variant. */
9392 if (tdep->fp_model == ARM_FLOAT_AUTO)
9393 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9394
9395 if (tdep->jb_pc >= 0)
9396 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9397
9398 /* Floating point sizes and format. */
9399 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9400 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9401 {
9402 set_gdbarch_double_format
9403 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9404 set_gdbarch_long_double_format
9405 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9406 }
9407 else
9408 {
9409 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9410 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9411 }
9412
9413 if (have_vfp_pseudos)
9414 {
9415 /* NOTE: These are the only pseudo registers used by
9416 the ARM target at the moment. If more are added, a
9417 little more care in numbering will be needed. */
9418
9419 int num_pseudos = 32;
9420 if (have_neon_pseudos)
9421 num_pseudos += 16;
9422 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9423 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9424 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9425 }
9426
9427 if (tdesc_data)
9428 {
9429 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9430
9431 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9432
9433 /* Override tdesc_register_type to adjust the types of VFP
9434 registers for NEON. */
9435 set_gdbarch_register_type (gdbarch, arm_register_type);
9436 }
9437
9438 /* Add standard register aliases. We add aliases even for those
9439 names which are used by the current architecture - it's simpler,
9440 and does no harm, since nothing ever lists user registers. */
9441 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9442 user_reg_add (gdbarch, arm_register_aliases[i].name,
9443 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9444
9445 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9446 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9447
9448 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9449
9450 return gdbarch;
9451 }
9452
9453 static void
9454 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9455 {
9456 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9457
9458 if (tdep == NULL)
9459 return;
9460
9461 fprintf_unfiltered (file, _("arm_dump_tdep: fp_model = %i\n"),
9462 (int) tdep->fp_model);
9463 fprintf_unfiltered (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
9464 (int) tdep->have_fpa_registers);
9465 fprintf_unfiltered (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
9466 (int) tdep->have_wmmx_registers);
9467 fprintf_unfiltered (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
9468 (int) tdep->vfp_register_count);
9469 fprintf_unfiltered (file, _("arm_dump_tdep: have_vfp_pseudos = %i\n"),
9470 (int) tdep->have_vfp_pseudos);
9471 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon_pseudos = %i\n"),
9472 (int) tdep->have_neon_pseudos);
9473 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon = %i\n"),
9474 (int) tdep->have_neon);
9475 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
9476 (unsigned long) tdep->lowest_pc);
9477 }
9478
9479 #if GDB_SELF_TEST
9480 namespace selftests
9481 {
9482 static void arm_record_test (void);
9483 }
9484 #endif
9485
9486 void _initialize_arm_tdep ();
9487 void
9488 _initialize_arm_tdep ()
9489 {
9490 long length;
9491 int i, j;
9492 char regdesc[1024], *rdptr = regdesc;
9493 size_t rest = sizeof (regdesc);
9494
9495 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9496
9497 /* Add ourselves to objfile event chain. */
9498 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9499
9500 /* Register an ELF OS ABI sniffer for ARM binaries. */
9501 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9502 bfd_target_elf_flavour,
9503 arm_elf_osabi_sniffer);
9504
9505 /* Add root prefix command for all "set arm"/"show arm" commands. */
9506 add_basic_prefix_cmd ("arm", no_class,
9507 _("Various ARM-specific commands."),
9508 &setarmcmdlist, "set arm ", 0, &setlist);
9509
9510 add_show_prefix_cmd ("arm", no_class,
9511 _("Various ARM-specific commands."),
9512 &showarmcmdlist, "show arm ", 0, &showlist);
9513
9514
9515 arm_disassembler_options = xstrdup ("reg-names-std");
9516 const disasm_options_t *disasm_options
9517 = &disassembler_options_arm ()->options;
9518 int num_disassembly_styles = 0;
9519 for (i = 0; disasm_options->name[i] != NULL; i++)
9520 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9521 num_disassembly_styles++;
9522
9523 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9524 valid_disassembly_styles = XNEWVEC (const char *,
9525 num_disassembly_styles + 1);
9526 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9527 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9528 {
9529 size_t offset = strlen ("reg-names-");
9530 const char *style = disasm_options->name[i];
9531 valid_disassembly_styles[j++] = &style[offset];
9532 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9533 disasm_options->description[i]);
9534 rdptr += length;
9535 rest -= length;
9536 }
9537 /* Mark the end of valid options. */
9538 valid_disassembly_styles[num_disassembly_styles] = NULL;
9539
9540 /* Create the help text. */
9541 std::string helptext = string_printf ("%s%s%s",
9542 _("The valid values are:\n"),
9543 regdesc,
9544 _("The default is \"std\"."));
9545
9546 add_setshow_enum_cmd("disassembler", no_class,
9547 valid_disassembly_styles, &disassembly_style,
9548 _("Set the disassembly style."),
9549 _("Show the disassembly style."),
9550 helptext.c_str (),
9551 set_disassembly_style_sfunc,
9552 show_disassembly_style_sfunc,
9553 &setarmcmdlist, &showarmcmdlist);
9554
9555 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9556 _("Set usage of ARM 32-bit mode."),
9557 _("Show usage of ARM 32-bit mode."),
9558 _("When off, a 26-bit PC will be used."),
9559 NULL,
9560 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9561 mode is %s. */
9562 &setarmcmdlist, &showarmcmdlist);
9563
9564 /* Add a command to allow the user to force the FPU model. */
9565 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9566 _("Set the floating point type."),
9567 _("Show the floating point type."),
9568 _("auto - Determine the FP typefrom the OS-ABI.\n\
9569 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9570 fpa - FPA co-processor (GCC compiled).\n\
9571 softvfp - Software FP with pure-endian doubles.\n\
9572 vfp - VFP co-processor."),
9573 set_fp_model_sfunc, show_fp_model,
9574 &setarmcmdlist, &showarmcmdlist);
9575
9576 /* Add a command to allow the user to force the ABI. */
9577 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9578 _("Set the ABI."),
9579 _("Show the ABI."),
9580 NULL, arm_set_abi, arm_show_abi,
9581 &setarmcmdlist, &showarmcmdlist);
9582
9583 /* Add two commands to allow the user to force the assumed
9584 execution mode. */
9585 add_setshow_enum_cmd ("fallback-mode", class_support,
9586 arm_mode_strings, &arm_fallback_mode_string,
9587 _("Set the mode assumed when symbols are unavailable."),
9588 _("Show the mode assumed when symbols are unavailable."),
9589 NULL, NULL, arm_show_fallback_mode,
9590 &setarmcmdlist, &showarmcmdlist);
9591 add_setshow_enum_cmd ("force-mode", class_support,
9592 arm_mode_strings, &arm_force_mode_string,
9593 _("Set the mode assumed even when symbols are available."),
9594 _("Show the mode assumed even when symbols are available."),
9595 NULL, NULL, arm_show_force_mode,
9596 &setarmcmdlist, &showarmcmdlist);
9597
9598 /* Debugging flag. */
9599 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9600 _("Set ARM debugging."),
9601 _("Show ARM debugging."),
9602 _("When on, arm-specific debugging is enabled."),
9603 NULL,
9604 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9605 &setdebuglist, &showdebuglist);
9606
9607 #if GDB_SELF_TEST
9608 selftests::register_test ("arm-record", selftests::arm_record_test);
9609 #endif
9610
9611 }
9612
9613 /* ARM-reversible process record data structures. */
9614
9615 #define ARM_INSN_SIZE_BYTES 4
9616 #define THUMB_INSN_SIZE_BYTES 2
9617 #define THUMB2_INSN_SIZE_BYTES 4
9618
9619
9620 /* Position of the bit within a 32-bit ARM instruction
9621 that defines whether the instruction is a load or store. */
9622 #define INSN_S_L_BIT_NUM 20
9623
9624 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9625 do \
9626 { \
9627 unsigned int reg_len = LENGTH; \
9628 if (reg_len) \
9629 { \
9630 REGS = XNEWVEC (uint32_t, reg_len); \
9631 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9632 } \
9633 } \
9634 while (0)
9635
9636 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9637 do \
9638 { \
9639 unsigned int mem_len = LENGTH; \
9640 if (mem_len) \
9641 { \
9642 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9643 memcpy(&MEMS->len, &RECORD_BUF[0], \
9644 sizeof(struct arm_mem_r) * LENGTH); \
9645 } \
9646 } \
9647 while (0)
9648
9649 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9650 #define INSN_RECORDED(ARM_RECORD) \
9651 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9652
9653 /* ARM memory record structure. */
9654 struct arm_mem_r
9655 {
9656 uint32_t len; /* Record length. */
9657 uint32_t addr; /* Memory address. */
9658 };
9659
9660 /* ARM instruction record contains opcode of current insn
9661 and execution state (before entry to decode_insn()),
9662 contains list of to-be-modified registers and
9663 memory blocks (on return from decode_insn()). */
9664
9665 typedef struct insn_decode_record_t
9666 {
9667 struct gdbarch *gdbarch;
9668 struct regcache *regcache;
9669 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9670 uint32_t arm_insn; /* Should accommodate thumb. */
9671 uint32_t cond; /* Condition code. */
9672 uint32_t opcode; /* Insn opcode. */
9673 uint32_t decode; /* Insn decode bits. */
9674 uint32_t mem_rec_count; /* No of mem records. */
9675 uint32_t reg_rec_count; /* No of reg records. */
9676 uint32_t *arm_regs; /* Registers to be saved for this record. */
9677 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9678 } insn_decode_record;
9679
9680
9681 /* Checks ARM SBZ and SBO mandatory fields. */
9682
9683 static int
9684 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9685 {
9686 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9687
9688 if (!len)
9689 return 1;
9690
9691 if (!sbo)
9692 ones = ~ones;
9693
9694 while (ones)
9695 {
9696 if (!(ones & sbo))
9697 {
9698 return 0;
9699 }
9700 ones = ones >> 1;
9701 }
9702 return 1;
9703 }
9704
9705 enum arm_record_result
9706 {
9707 ARM_RECORD_SUCCESS = 0,
9708 ARM_RECORD_FAILURE = 1
9709 };
9710
9711 typedef enum
9712 {
9713 ARM_RECORD_STRH=1,
9714 ARM_RECORD_STRD
9715 } arm_record_strx_t;
9716
9717 typedef enum
9718 {
9719 ARM_RECORD=1,
9720 THUMB_RECORD,
9721 THUMB2_RECORD
9722 } record_type_t;
9723
9724
9725 static int
9726 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9727 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9728 {
9729
9730 struct regcache *reg_cache = arm_insn_r->regcache;
9731 ULONGEST u_regval[2]= {0};
9732
9733 uint32_t reg_src1 = 0, reg_src2 = 0;
9734 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9735
9736 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9737 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9738
9739 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9740 {
9741 /* 1) Handle misc store, immediate offset. */
9742 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9743 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9744 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9745 regcache_raw_read_unsigned (reg_cache, reg_src1,
9746 &u_regval[0]);
9747 if (ARM_PC_REGNUM == reg_src1)
9748 {
9749 /* If R15 was used as Rn, hence current PC+8. */
9750 u_regval[0] = u_regval[0] + 8;
9751 }
9752 offset_8 = (immed_high << 4) | immed_low;
9753 /* Calculate target store address. */
9754 if (14 == arm_insn_r->opcode)
9755 {
9756 tgt_mem_addr = u_regval[0] + offset_8;
9757 }
9758 else
9759 {
9760 tgt_mem_addr = u_regval[0] - offset_8;
9761 }
9762 if (ARM_RECORD_STRH == str_type)
9763 {
9764 record_buf_mem[0] = 2;
9765 record_buf_mem[1] = tgt_mem_addr;
9766 arm_insn_r->mem_rec_count = 1;
9767 }
9768 else if (ARM_RECORD_STRD == str_type)
9769 {
9770 record_buf_mem[0] = 4;
9771 record_buf_mem[1] = tgt_mem_addr;
9772 record_buf_mem[2] = 4;
9773 record_buf_mem[3] = tgt_mem_addr + 4;
9774 arm_insn_r->mem_rec_count = 2;
9775 }
9776 }
9777 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9778 {
9779 /* 2) Store, register offset. */
9780 /* Get Rm. */
9781 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9782 /* Get Rn. */
9783 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9784 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9785 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9786 if (15 == reg_src2)
9787 {
9788 /* If R15 was used as Rn, hence current PC+8. */
9789 u_regval[0] = u_regval[0] + 8;
9790 }
9791 /* Calculate target store address, Rn +/- Rm, register offset. */
9792 if (12 == arm_insn_r->opcode)
9793 {
9794 tgt_mem_addr = u_regval[0] + u_regval[1];
9795 }
9796 else
9797 {
9798 tgt_mem_addr = u_regval[1] - u_regval[0];
9799 }
9800 if (ARM_RECORD_STRH == str_type)
9801 {
9802 record_buf_mem[0] = 2;
9803 record_buf_mem[1] = tgt_mem_addr;
9804 arm_insn_r->mem_rec_count = 1;
9805 }
9806 else if (ARM_RECORD_STRD == str_type)
9807 {
9808 record_buf_mem[0] = 4;
9809 record_buf_mem[1] = tgt_mem_addr;
9810 record_buf_mem[2] = 4;
9811 record_buf_mem[3] = tgt_mem_addr + 4;
9812 arm_insn_r->mem_rec_count = 2;
9813 }
9814 }
9815 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9816 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9817 {
9818 /* 3) Store, immediate pre-indexed. */
9819 /* 5) Store, immediate post-indexed. */
9820 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9821 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9822 offset_8 = (immed_high << 4) | immed_low;
9823 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9824 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9825 /* Calculate target store address, Rn +/- Rm, register offset. */
9826 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9827 {
9828 tgt_mem_addr = u_regval[0] + offset_8;
9829 }
9830 else
9831 {
9832 tgt_mem_addr = u_regval[0] - offset_8;
9833 }
9834 if (ARM_RECORD_STRH == str_type)
9835 {
9836 record_buf_mem[0] = 2;
9837 record_buf_mem[1] = tgt_mem_addr;
9838 arm_insn_r->mem_rec_count = 1;
9839 }
9840 else if (ARM_RECORD_STRD == str_type)
9841 {
9842 record_buf_mem[0] = 4;
9843 record_buf_mem[1] = tgt_mem_addr;
9844 record_buf_mem[2] = 4;
9845 record_buf_mem[3] = tgt_mem_addr + 4;
9846 arm_insn_r->mem_rec_count = 2;
9847 }
9848 /* Record Rn also as it changes. */
9849 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9850 arm_insn_r->reg_rec_count = 1;
9851 }
9852 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9853 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9854 {
9855 /* 4) Store, register pre-indexed. */
9856 /* 6) Store, register post -indexed. */
9857 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9858 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9859 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9860 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9861 /* Calculate target store address, Rn +/- Rm, register offset. */
9862 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9863 {
9864 tgt_mem_addr = u_regval[0] + u_regval[1];
9865 }
9866 else
9867 {
9868 tgt_mem_addr = u_regval[1] - u_regval[0];
9869 }
9870 if (ARM_RECORD_STRH == str_type)
9871 {
9872 record_buf_mem[0] = 2;
9873 record_buf_mem[1] = tgt_mem_addr;
9874 arm_insn_r->mem_rec_count = 1;
9875 }
9876 else if (ARM_RECORD_STRD == str_type)
9877 {
9878 record_buf_mem[0] = 4;
9879 record_buf_mem[1] = tgt_mem_addr;
9880 record_buf_mem[2] = 4;
9881 record_buf_mem[3] = tgt_mem_addr + 4;
9882 arm_insn_r->mem_rec_count = 2;
9883 }
9884 /* Record Rn also as it changes. */
9885 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9886 arm_insn_r->reg_rec_count = 1;
9887 }
9888 return 0;
9889 }
9890
9891 /* Handling ARM extension space insns. */
9892
9893 static int
9894 arm_record_extension_space (insn_decode_record *arm_insn_r)
9895 {
9896 int ret = 0; /* Return value: -1:record failure ; 0:success */
9897 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9898 uint32_t record_buf[8], record_buf_mem[8];
9899 uint32_t reg_src1 = 0;
9900 struct regcache *reg_cache = arm_insn_r->regcache;
9901 ULONGEST u_regval = 0;
9902
9903 gdb_assert (!INSN_RECORDED(arm_insn_r));
9904 /* Handle unconditional insn extension space. */
9905
9906 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9907 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9908 if (arm_insn_r->cond)
9909 {
9910 /* PLD has no affect on architectural state, it just affects
9911 the caches. */
9912 if (5 == ((opcode1 & 0xE0) >> 5))
9913 {
9914 /* BLX(1) */
9915 record_buf[0] = ARM_PS_REGNUM;
9916 record_buf[1] = ARM_LR_REGNUM;
9917 arm_insn_r->reg_rec_count = 2;
9918 }
9919 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9920 }
9921
9922
9923 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9924 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9925 {
9926 ret = -1;
9927 /* Undefined instruction on ARM V5; need to handle if later
9928 versions define it. */
9929 }
9930
9931 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9932 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9933 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9934
9935 /* Handle arithmetic insn extension space. */
9936 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9937 && !INSN_RECORDED(arm_insn_r))
9938 {
9939 /* Handle MLA(S) and MUL(S). */
9940 if (in_inclusive_range (insn_op1, 0U, 3U))
9941 {
9942 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9943 record_buf[1] = ARM_PS_REGNUM;
9944 arm_insn_r->reg_rec_count = 2;
9945 }
9946 else if (in_inclusive_range (insn_op1, 4U, 15U))
9947 {
9948 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9949 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9950 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9951 record_buf[2] = ARM_PS_REGNUM;
9952 arm_insn_r->reg_rec_count = 3;
9953 }
9954 }
9955
9956 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9957 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9958 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9959
9960 /* Handle control insn extension space. */
9961
9962 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9963 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9964 {
9965 if (!bit (arm_insn_r->arm_insn,25))
9966 {
9967 if (!bits (arm_insn_r->arm_insn, 4, 7))
9968 {
9969 if ((0 == insn_op1) || (2 == insn_op1))
9970 {
9971 /* MRS. */
9972 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9973 arm_insn_r->reg_rec_count = 1;
9974 }
9975 else if (1 == insn_op1)
9976 {
9977 /* CSPR is going to be changed. */
9978 record_buf[0] = ARM_PS_REGNUM;
9979 arm_insn_r->reg_rec_count = 1;
9980 }
9981 else if (3 == insn_op1)
9982 {
9983 /* SPSR is going to be changed. */
9984 /* We need to get SPSR value, which is yet to be done. */
9985 return -1;
9986 }
9987 }
9988 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9989 {
9990 if (1 == insn_op1)
9991 {
9992 /* BX. */
9993 record_buf[0] = ARM_PS_REGNUM;
9994 arm_insn_r->reg_rec_count = 1;
9995 }
9996 else if (3 == insn_op1)
9997 {
9998 /* CLZ. */
9999 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10000 arm_insn_r->reg_rec_count = 1;
10001 }
10002 }
10003 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10004 {
10005 /* BLX. */
10006 record_buf[0] = ARM_PS_REGNUM;
10007 record_buf[1] = ARM_LR_REGNUM;
10008 arm_insn_r->reg_rec_count = 2;
10009 }
10010 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10011 {
10012 /* QADD, QSUB, QDADD, QDSUB */
10013 record_buf[0] = ARM_PS_REGNUM;
10014 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10015 arm_insn_r->reg_rec_count = 2;
10016 }
10017 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10018 {
10019 /* BKPT. */
10020 record_buf[0] = ARM_PS_REGNUM;
10021 record_buf[1] = ARM_LR_REGNUM;
10022 arm_insn_r->reg_rec_count = 2;
10023
10024 /* Save SPSR also;how? */
10025 return -1;
10026 }
10027 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10028 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10029 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10030 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10031 )
10032 {
10033 if (0 == insn_op1 || 1 == insn_op1)
10034 {
10035 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10036 /* We dont do optimization for SMULW<y> where we
10037 need only Rd. */
10038 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10039 record_buf[1] = ARM_PS_REGNUM;
10040 arm_insn_r->reg_rec_count = 2;
10041 }
10042 else if (2 == insn_op1)
10043 {
10044 /* SMLAL<x><y>. */
10045 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10046 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10047 arm_insn_r->reg_rec_count = 2;
10048 }
10049 else if (3 == insn_op1)
10050 {
10051 /* SMUL<x><y>. */
10052 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10053 arm_insn_r->reg_rec_count = 1;
10054 }
10055 }
10056 }
10057 else
10058 {
10059 /* MSR : immediate form. */
10060 if (1 == insn_op1)
10061 {
10062 /* CSPR is going to be changed. */
10063 record_buf[0] = ARM_PS_REGNUM;
10064 arm_insn_r->reg_rec_count = 1;
10065 }
10066 else if (3 == insn_op1)
10067 {
10068 /* SPSR is going to be changed. */
10069 /* we need to get SPSR value, which is yet to be done */
10070 return -1;
10071 }
10072 }
10073 }
10074
10075 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10076 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10077 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10078
10079 /* Handle load/store insn extension space. */
10080
10081 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10082 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10083 && !INSN_RECORDED(arm_insn_r))
10084 {
10085 /* SWP/SWPB. */
10086 if (0 == insn_op1)
10087 {
10088 /* These insn, changes register and memory as well. */
10089 /* SWP or SWPB insn. */
10090 /* Get memory address given by Rn. */
10091 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10092 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10093 /* SWP insn ?, swaps word. */
10094 if (8 == arm_insn_r->opcode)
10095 {
10096 record_buf_mem[0] = 4;
10097 }
10098 else
10099 {
10100 /* SWPB insn, swaps only byte. */
10101 record_buf_mem[0] = 1;
10102 }
10103 record_buf_mem[1] = u_regval;
10104 arm_insn_r->mem_rec_count = 1;
10105 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10106 arm_insn_r->reg_rec_count = 1;
10107 }
10108 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10109 {
10110 /* STRH. */
10111 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10112 ARM_RECORD_STRH);
10113 }
10114 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10115 {
10116 /* LDRD. */
10117 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10118 record_buf[1] = record_buf[0] + 1;
10119 arm_insn_r->reg_rec_count = 2;
10120 }
10121 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10122 {
10123 /* STRD. */
10124 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10125 ARM_RECORD_STRD);
10126 }
10127 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10128 {
10129 /* LDRH, LDRSB, LDRSH. */
10130 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10131 arm_insn_r->reg_rec_count = 1;
10132 }
10133
10134 }
10135
10136 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10137 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10138 && !INSN_RECORDED(arm_insn_r))
10139 {
10140 ret = -1;
10141 /* Handle coprocessor insn extension space. */
10142 }
10143
10144 /* To be done for ARMv5 and later; as of now we return -1. */
10145 if (-1 == ret)
10146 return ret;
10147
10148 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10149 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10150
10151 return ret;
10152 }
10153
10154 /* Handling opcode 000 insns. */
10155
10156 static int
10157 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10158 {
10159 struct regcache *reg_cache = arm_insn_r->regcache;
10160 uint32_t record_buf[8], record_buf_mem[8];
10161 ULONGEST u_regval[2] = {0};
10162
10163 uint32_t reg_src1 = 0;
10164 uint32_t opcode1 = 0;
10165
10166 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10167 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10168 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10169
10170 if (!((opcode1 & 0x19) == 0x10))
10171 {
10172 /* Data-processing (register) and Data-processing (register-shifted
10173 register */
10174 /* Out of 11 shifter operands mode, all the insn modifies destination
10175 register, which is specified by 13-16 decode. */
10176 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10177 record_buf[1] = ARM_PS_REGNUM;
10178 arm_insn_r->reg_rec_count = 2;
10179 }
10180 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10181 {
10182 /* Miscellaneous instructions */
10183
10184 if (3 == arm_insn_r->decode && 0x12 == opcode1
10185 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10186 {
10187 /* Handle BLX, branch and link/exchange. */
10188 if (9 == arm_insn_r->opcode)
10189 {
10190 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10191 and R14 stores the return address. */
10192 record_buf[0] = ARM_PS_REGNUM;
10193 record_buf[1] = ARM_LR_REGNUM;
10194 arm_insn_r->reg_rec_count = 2;
10195 }
10196 }
10197 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10198 {
10199 /* Handle enhanced software breakpoint insn, BKPT. */
10200 /* CPSR is changed to be executed in ARM state, disabling normal
10201 interrupts, entering abort mode. */
10202 /* According to high vector configuration PC is set. */
10203 /* user hit breakpoint and type reverse, in
10204 that case, we need to go back with previous CPSR and
10205 Program Counter. */
10206 record_buf[0] = ARM_PS_REGNUM;
10207 record_buf[1] = ARM_LR_REGNUM;
10208 arm_insn_r->reg_rec_count = 2;
10209
10210 /* Save SPSR also; how? */
10211 return -1;
10212 }
10213 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10214 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10215 {
10216 /* Handle BX, branch and link/exchange. */
10217 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10218 record_buf[0] = ARM_PS_REGNUM;
10219 arm_insn_r->reg_rec_count = 1;
10220 }
10221 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10222 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10223 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10224 {
10225 /* Count leading zeros: CLZ. */
10226 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10227 arm_insn_r->reg_rec_count = 1;
10228 }
10229 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10230 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10231 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10232 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10233 {
10234 /* Handle MRS insn. */
10235 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10236 arm_insn_r->reg_rec_count = 1;
10237 }
10238 }
10239 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10240 {
10241 /* Multiply and multiply-accumulate */
10242
10243 /* Handle multiply instructions. */
10244 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10245 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10246 {
10247 /* Handle MLA and MUL. */
10248 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10249 record_buf[1] = ARM_PS_REGNUM;
10250 arm_insn_r->reg_rec_count = 2;
10251 }
10252 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10253 {
10254 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10255 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10256 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10257 record_buf[2] = ARM_PS_REGNUM;
10258 arm_insn_r->reg_rec_count = 3;
10259 }
10260 }
10261 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10262 {
10263 /* Synchronization primitives */
10264
10265 /* Handling SWP, SWPB. */
10266 /* These insn, changes register and memory as well. */
10267 /* SWP or SWPB insn. */
10268
10269 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10270 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10271 /* SWP insn ?, swaps word. */
10272 if (8 == arm_insn_r->opcode)
10273 {
10274 record_buf_mem[0] = 4;
10275 }
10276 else
10277 {
10278 /* SWPB insn, swaps only byte. */
10279 record_buf_mem[0] = 1;
10280 }
10281 record_buf_mem[1] = u_regval[0];
10282 arm_insn_r->mem_rec_count = 1;
10283 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10284 arm_insn_r->reg_rec_count = 1;
10285 }
10286 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10287 || 15 == arm_insn_r->decode)
10288 {
10289 if ((opcode1 & 0x12) == 2)
10290 {
10291 /* Extra load/store (unprivileged) */
10292 return -1;
10293 }
10294 else
10295 {
10296 /* Extra load/store */
10297 switch (bits (arm_insn_r->arm_insn, 5, 6))
10298 {
10299 case 1:
10300 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10301 {
10302 /* STRH (register), STRH (immediate) */
10303 arm_record_strx (arm_insn_r, &record_buf[0],
10304 &record_buf_mem[0], ARM_RECORD_STRH);
10305 }
10306 else if ((opcode1 & 0x05) == 0x1)
10307 {
10308 /* LDRH (register) */
10309 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10310 arm_insn_r->reg_rec_count = 1;
10311
10312 if (bit (arm_insn_r->arm_insn, 21))
10313 {
10314 /* Write back to Rn. */
10315 record_buf[arm_insn_r->reg_rec_count++]
10316 = bits (arm_insn_r->arm_insn, 16, 19);
10317 }
10318 }
10319 else if ((opcode1 & 0x05) == 0x5)
10320 {
10321 /* LDRH (immediate), LDRH (literal) */
10322 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10323
10324 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10325 arm_insn_r->reg_rec_count = 1;
10326
10327 if (rn != 15)
10328 {
10329 /*LDRH (immediate) */
10330 if (bit (arm_insn_r->arm_insn, 21))
10331 {
10332 /* Write back to Rn. */
10333 record_buf[arm_insn_r->reg_rec_count++] = rn;
10334 }
10335 }
10336 }
10337 else
10338 return -1;
10339 break;
10340 case 2:
10341 if ((opcode1 & 0x05) == 0x0)
10342 {
10343 /* LDRD (register) */
10344 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10345 record_buf[1] = record_buf[0] + 1;
10346 arm_insn_r->reg_rec_count = 2;
10347
10348 if (bit (arm_insn_r->arm_insn, 21))
10349 {
10350 /* Write back to Rn. */
10351 record_buf[arm_insn_r->reg_rec_count++]
10352 = bits (arm_insn_r->arm_insn, 16, 19);
10353 }
10354 }
10355 else if ((opcode1 & 0x05) == 0x1)
10356 {
10357 /* LDRSB (register) */
10358 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10359 arm_insn_r->reg_rec_count = 1;
10360
10361 if (bit (arm_insn_r->arm_insn, 21))
10362 {
10363 /* Write back to Rn. */
10364 record_buf[arm_insn_r->reg_rec_count++]
10365 = bits (arm_insn_r->arm_insn, 16, 19);
10366 }
10367 }
10368 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10369 {
10370 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10371 LDRSB (literal) */
10372 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10373
10374 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10375 arm_insn_r->reg_rec_count = 1;
10376
10377 if (rn != 15)
10378 {
10379 /*LDRD (immediate), LDRSB (immediate) */
10380 if (bit (arm_insn_r->arm_insn, 21))
10381 {
10382 /* Write back to Rn. */
10383 record_buf[arm_insn_r->reg_rec_count++] = rn;
10384 }
10385 }
10386 }
10387 else
10388 return -1;
10389 break;
10390 case 3:
10391 if ((opcode1 & 0x05) == 0x0)
10392 {
10393 /* STRD (register) */
10394 arm_record_strx (arm_insn_r, &record_buf[0],
10395 &record_buf_mem[0], ARM_RECORD_STRD);
10396 }
10397 else if ((opcode1 & 0x05) == 0x1)
10398 {
10399 /* LDRSH (register) */
10400 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10401 arm_insn_r->reg_rec_count = 1;
10402
10403 if (bit (arm_insn_r->arm_insn, 21))
10404 {
10405 /* Write back to Rn. */
10406 record_buf[arm_insn_r->reg_rec_count++]
10407 = bits (arm_insn_r->arm_insn, 16, 19);
10408 }
10409 }
10410 else if ((opcode1 & 0x05) == 0x4)
10411 {
10412 /* STRD (immediate) */
10413 arm_record_strx (arm_insn_r, &record_buf[0],
10414 &record_buf_mem[0], ARM_RECORD_STRD);
10415 }
10416 else if ((opcode1 & 0x05) == 0x5)
10417 {
10418 /* LDRSH (immediate), LDRSH (literal) */
10419 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10420 arm_insn_r->reg_rec_count = 1;
10421
10422 if (bit (arm_insn_r->arm_insn, 21))
10423 {
10424 /* Write back to Rn. */
10425 record_buf[arm_insn_r->reg_rec_count++]
10426 = bits (arm_insn_r->arm_insn, 16, 19);
10427 }
10428 }
10429 else
10430 return -1;
10431 break;
10432 default:
10433 return -1;
10434 }
10435 }
10436 }
10437 else
10438 {
10439 return -1;
10440 }
10441
10442 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10443 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10444 return 0;
10445 }
10446
10447 /* Handling opcode 001 insns. */
10448
10449 static int
10450 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10451 {
10452 uint32_t record_buf[8], record_buf_mem[8];
10453
10454 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10455 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10456
10457 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10458 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10459 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10460 )
10461 {
10462 /* Handle MSR insn. */
10463 if (9 == arm_insn_r->opcode)
10464 {
10465 /* CSPR is going to be changed. */
10466 record_buf[0] = ARM_PS_REGNUM;
10467 arm_insn_r->reg_rec_count = 1;
10468 }
10469 else
10470 {
10471 /* SPSR is going to be changed. */
10472 }
10473 }
10474 else if (arm_insn_r->opcode <= 15)
10475 {
10476 /* Normal data processing insns. */
10477 /* Out of 11 shifter operands mode, all the insn modifies destination
10478 register, which is specified by 13-16 decode. */
10479 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10480 record_buf[1] = ARM_PS_REGNUM;
10481 arm_insn_r->reg_rec_count = 2;
10482 }
10483 else
10484 {
10485 return -1;
10486 }
10487
10488 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10489 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10490 return 0;
10491 }
10492
10493 static int
10494 arm_record_media (insn_decode_record *arm_insn_r)
10495 {
10496 uint32_t record_buf[8];
10497
10498 switch (bits (arm_insn_r->arm_insn, 22, 24))
10499 {
10500 case 0:
10501 /* Parallel addition and subtraction, signed */
10502 case 1:
10503 /* Parallel addition and subtraction, unsigned */
10504 case 2:
10505 case 3:
10506 /* Packing, unpacking, saturation and reversal */
10507 {
10508 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10509
10510 record_buf[arm_insn_r->reg_rec_count++] = rd;
10511 }
10512 break;
10513
10514 case 4:
10515 case 5:
10516 /* Signed multiplies */
10517 {
10518 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10519 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10520
10521 record_buf[arm_insn_r->reg_rec_count++] = rd;
10522 if (op1 == 0x0)
10523 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10524 else if (op1 == 0x4)
10525 record_buf[arm_insn_r->reg_rec_count++]
10526 = bits (arm_insn_r->arm_insn, 12, 15);
10527 }
10528 break;
10529
10530 case 6:
10531 {
10532 if (bit (arm_insn_r->arm_insn, 21)
10533 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10534 {
10535 /* SBFX */
10536 record_buf[arm_insn_r->reg_rec_count++]
10537 = bits (arm_insn_r->arm_insn, 12, 15);
10538 }
10539 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10540 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10541 {
10542 /* USAD8 and USADA8 */
10543 record_buf[arm_insn_r->reg_rec_count++]
10544 = bits (arm_insn_r->arm_insn, 16, 19);
10545 }
10546 }
10547 break;
10548
10549 case 7:
10550 {
10551 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10552 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10553 {
10554 /* Permanently UNDEFINED */
10555 return -1;
10556 }
10557 else
10558 {
10559 /* BFC, BFI and UBFX */
10560 record_buf[arm_insn_r->reg_rec_count++]
10561 = bits (arm_insn_r->arm_insn, 12, 15);
10562 }
10563 }
10564 break;
10565
10566 default:
10567 return -1;
10568 }
10569
10570 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10571
10572 return 0;
10573 }
10574
10575 /* Handle ARM mode instructions with opcode 010. */
10576
10577 static int
10578 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10579 {
10580 struct regcache *reg_cache = arm_insn_r->regcache;
10581
10582 uint32_t reg_base , reg_dest;
10583 uint32_t offset_12, tgt_mem_addr;
10584 uint32_t record_buf[8], record_buf_mem[8];
10585 unsigned char wback;
10586 ULONGEST u_regval;
10587
10588 /* Calculate wback. */
10589 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10590 || (bit (arm_insn_r->arm_insn, 21) == 1);
10591
10592 arm_insn_r->reg_rec_count = 0;
10593 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10594
10595 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10596 {
10597 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10598 and LDRT. */
10599
10600 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10601 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10602
10603 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10604 preceeds a LDR instruction having R15 as reg_base, it
10605 emulates a branch and link instruction, and hence we need to save
10606 CPSR and PC as well. */
10607 if (ARM_PC_REGNUM == reg_dest)
10608 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10609
10610 /* If wback is true, also save the base register, which is going to be
10611 written to. */
10612 if (wback)
10613 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10614 }
10615 else
10616 {
10617 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10618
10619 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10620 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10621
10622 /* Handle bit U. */
10623 if (bit (arm_insn_r->arm_insn, 23))
10624 {
10625 /* U == 1: Add the offset. */
10626 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10627 }
10628 else
10629 {
10630 /* U == 0: subtract the offset. */
10631 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10632 }
10633
10634 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10635 bytes. */
10636 if (bit (arm_insn_r->arm_insn, 22))
10637 {
10638 /* STRB and STRBT: 1 byte. */
10639 record_buf_mem[0] = 1;
10640 }
10641 else
10642 {
10643 /* STR and STRT: 4 bytes. */
10644 record_buf_mem[0] = 4;
10645 }
10646
10647 /* Handle bit P. */
10648 if (bit (arm_insn_r->arm_insn, 24))
10649 record_buf_mem[1] = tgt_mem_addr;
10650 else
10651 record_buf_mem[1] = (uint32_t) u_regval;
10652
10653 arm_insn_r->mem_rec_count = 1;
10654
10655 /* If wback is true, also save the base register, which is going to be
10656 written to. */
10657 if (wback)
10658 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10659 }
10660
10661 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10662 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10663 return 0;
10664 }
10665
10666 /* Handling opcode 011 insns. */
10667
10668 static int
10669 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10670 {
10671 struct regcache *reg_cache = arm_insn_r->regcache;
10672
10673 uint32_t shift_imm = 0;
10674 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10675 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10676 uint32_t record_buf[8], record_buf_mem[8];
10677
10678 LONGEST s_word;
10679 ULONGEST u_regval[2];
10680
10681 if (bit (arm_insn_r->arm_insn, 4))
10682 return arm_record_media (arm_insn_r);
10683
10684 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10685 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10686
10687 /* Handle enhanced store insns and LDRD DSP insn,
10688 order begins according to addressing modes for store insns
10689 STRH insn. */
10690
10691 /* LDR or STR? */
10692 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10693 {
10694 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10695 /* LDR insn has a capability to do branching, if
10696 MOV LR, PC is preceded by LDR insn having Rn as R15
10697 in that case, it emulates branch and link insn, and hence we
10698 need to save CSPR and PC as well. */
10699 if (15 != reg_dest)
10700 {
10701 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10702 arm_insn_r->reg_rec_count = 1;
10703 }
10704 else
10705 {
10706 record_buf[0] = reg_dest;
10707 record_buf[1] = ARM_PS_REGNUM;
10708 arm_insn_r->reg_rec_count = 2;
10709 }
10710 }
10711 else
10712 {
10713 if (! bits (arm_insn_r->arm_insn, 4, 11))
10714 {
10715 /* Store insn, register offset and register pre-indexed,
10716 register post-indexed. */
10717 /* Get Rm. */
10718 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10719 /* Get Rn. */
10720 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10721 regcache_raw_read_unsigned (reg_cache, reg_src1
10722 , &u_regval[0]);
10723 regcache_raw_read_unsigned (reg_cache, reg_src2
10724 , &u_regval[1]);
10725 if (15 == reg_src2)
10726 {
10727 /* If R15 was used as Rn, hence current PC+8. */
10728 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10729 u_regval[0] = u_regval[0] + 8;
10730 }
10731 /* Calculate target store address, Rn +/- Rm, register offset. */
10732 /* U == 1. */
10733 if (bit (arm_insn_r->arm_insn, 23))
10734 {
10735 tgt_mem_addr = u_regval[0] + u_regval[1];
10736 }
10737 else
10738 {
10739 tgt_mem_addr = u_regval[1] - u_regval[0];
10740 }
10741
10742 switch (arm_insn_r->opcode)
10743 {
10744 /* STR. */
10745 case 8:
10746 case 12:
10747 /* STR. */
10748 case 9:
10749 case 13:
10750 /* STRT. */
10751 case 1:
10752 case 5:
10753 /* STR. */
10754 case 0:
10755 case 4:
10756 record_buf_mem[0] = 4;
10757 break;
10758
10759 /* STRB. */
10760 case 10:
10761 case 14:
10762 /* STRB. */
10763 case 11:
10764 case 15:
10765 /* STRBT. */
10766 case 3:
10767 case 7:
10768 /* STRB. */
10769 case 2:
10770 case 6:
10771 record_buf_mem[0] = 1;
10772 break;
10773
10774 default:
10775 gdb_assert_not_reached ("no decoding pattern found");
10776 break;
10777 }
10778 record_buf_mem[1] = tgt_mem_addr;
10779 arm_insn_r->mem_rec_count = 1;
10780
10781 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10782 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10783 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10784 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10785 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10786 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10787 )
10788 {
10789 /* Rn is going to be changed in pre-indexed mode and
10790 post-indexed mode as well. */
10791 record_buf[0] = reg_src2;
10792 arm_insn_r->reg_rec_count = 1;
10793 }
10794 }
10795 else
10796 {
10797 /* Store insn, scaled register offset; scaled pre-indexed. */
10798 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10799 /* Get Rm. */
10800 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10801 /* Get Rn. */
10802 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10803 /* Get shift_imm. */
10804 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10805 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10806 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10807 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10808 /* Offset_12 used as shift. */
10809 switch (offset_12)
10810 {
10811 case 0:
10812 /* Offset_12 used as index. */
10813 offset_12 = u_regval[0] << shift_imm;
10814 break;
10815
10816 case 1:
10817 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10818 break;
10819
10820 case 2:
10821 if (!shift_imm)
10822 {
10823 if (bit (u_regval[0], 31))
10824 {
10825 offset_12 = 0xFFFFFFFF;
10826 }
10827 else
10828 {
10829 offset_12 = 0;
10830 }
10831 }
10832 else
10833 {
10834 /* This is arithmetic shift. */
10835 offset_12 = s_word >> shift_imm;
10836 }
10837 break;
10838
10839 case 3:
10840 if (!shift_imm)
10841 {
10842 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10843 &u_regval[1]);
10844 /* Get C flag value and shift it by 31. */
10845 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10846 | (u_regval[0]) >> 1);
10847 }
10848 else
10849 {
10850 offset_12 = (u_regval[0] >> shift_imm) \
10851 | (u_regval[0] <<
10852 (sizeof(uint32_t) - shift_imm));
10853 }
10854 break;
10855
10856 default:
10857 gdb_assert_not_reached ("no decoding pattern found");
10858 break;
10859 }
10860
10861 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10862 /* bit U set. */
10863 if (bit (arm_insn_r->arm_insn, 23))
10864 {
10865 tgt_mem_addr = u_regval[1] + offset_12;
10866 }
10867 else
10868 {
10869 tgt_mem_addr = u_regval[1] - offset_12;
10870 }
10871
10872 switch (arm_insn_r->opcode)
10873 {
10874 /* STR. */
10875 case 8:
10876 case 12:
10877 /* STR. */
10878 case 9:
10879 case 13:
10880 /* STRT. */
10881 case 1:
10882 case 5:
10883 /* STR. */
10884 case 0:
10885 case 4:
10886 record_buf_mem[0] = 4;
10887 break;
10888
10889 /* STRB. */
10890 case 10:
10891 case 14:
10892 /* STRB. */
10893 case 11:
10894 case 15:
10895 /* STRBT. */
10896 case 3:
10897 case 7:
10898 /* STRB. */
10899 case 2:
10900 case 6:
10901 record_buf_mem[0] = 1;
10902 break;
10903
10904 default:
10905 gdb_assert_not_reached ("no decoding pattern found");
10906 break;
10907 }
10908 record_buf_mem[1] = tgt_mem_addr;
10909 arm_insn_r->mem_rec_count = 1;
10910
10911 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10912 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10913 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10914 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10915 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10916 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10917 )
10918 {
10919 /* Rn is going to be changed in register scaled pre-indexed
10920 mode,and scaled post indexed mode. */
10921 record_buf[0] = reg_src2;
10922 arm_insn_r->reg_rec_count = 1;
10923 }
10924 }
10925 }
10926
10927 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10928 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10929 return 0;
10930 }
10931
10932 /* Handle ARM mode instructions with opcode 100. */
10933
10934 static int
10935 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10936 {
10937 struct regcache *reg_cache = arm_insn_r->regcache;
10938 uint32_t register_count = 0, register_bits;
10939 uint32_t reg_base, addr_mode;
10940 uint32_t record_buf[24], record_buf_mem[48];
10941 uint32_t wback;
10942 ULONGEST u_regval;
10943
10944 /* Fetch the list of registers. */
10945 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10946 arm_insn_r->reg_rec_count = 0;
10947
10948 /* Fetch the base register that contains the address we are loading data
10949 to. */
10950 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10951
10952 /* Calculate wback. */
10953 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10954
10955 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10956 {
10957 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10958
10959 /* Find out which registers are going to be loaded from memory. */
10960 while (register_bits)
10961 {
10962 if (register_bits & 0x00000001)
10963 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10964 register_bits = register_bits >> 1;
10965 register_count++;
10966 }
10967
10968
10969 /* If wback is true, also save the base register, which is going to be
10970 written to. */
10971 if (wback)
10972 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10973
10974 /* Save the CPSR register. */
10975 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10976 }
10977 else
10978 {
10979 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10980
10981 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10982
10983 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10984
10985 /* Find out how many registers are going to be stored to memory. */
10986 while (register_bits)
10987 {
10988 if (register_bits & 0x00000001)
10989 register_count++;
10990 register_bits = register_bits >> 1;
10991 }
10992
10993 switch (addr_mode)
10994 {
10995 /* STMDA (STMED): Decrement after. */
10996 case 0:
10997 record_buf_mem[1] = (uint32_t) u_regval
10998 - register_count * ARM_INT_REGISTER_SIZE + 4;
10999 break;
11000 /* STM (STMIA, STMEA): Increment after. */
11001 case 1:
11002 record_buf_mem[1] = (uint32_t) u_regval;
11003 break;
11004 /* STMDB (STMFD): Decrement before. */
11005 case 2:
11006 record_buf_mem[1] = (uint32_t) u_regval
11007 - register_count * ARM_INT_REGISTER_SIZE;
11008 break;
11009 /* STMIB (STMFA): Increment before. */
11010 case 3:
11011 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
11012 break;
11013 default:
11014 gdb_assert_not_reached ("no decoding pattern found");
11015 break;
11016 }
11017
11018 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
11019 arm_insn_r->mem_rec_count = 1;
11020
11021 /* If wback is true, also save the base register, which is going to be
11022 written to. */
11023 if (wback)
11024 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11025 }
11026
11027 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11028 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11029 return 0;
11030 }
11031
11032 /* Handling opcode 101 insns. */
11033
11034 static int
11035 arm_record_b_bl (insn_decode_record *arm_insn_r)
11036 {
11037 uint32_t record_buf[8];
11038
11039 /* Handle B, BL, BLX(1) insns. */
11040 /* B simply branches so we do nothing here. */
11041 /* Note: BLX(1) doesnt fall here but instead it falls into
11042 extension space. */
11043 if (bit (arm_insn_r->arm_insn, 24))
11044 {
11045 record_buf[0] = ARM_LR_REGNUM;
11046 arm_insn_r->reg_rec_count = 1;
11047 }
11048
11049 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11050
11051 return 0;
11052 }
11053
11054 static int
11055 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11056 {
11057 printf_unfiltered (_("Process record does not support instruction "
11058 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11059 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11060
11061 return -1;
11062 }
11063
11064 /* Record handler for vector data transfer instructions. */
11065
11066 static int
11067 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11068 {
11069 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11070 uint32_t record_buf[4];
11071
11072 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11073 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11074 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11075 bit_l = bit (arm_insn_r->arm_insn, 20);
11076 bit_c = bit (arm_insn_r->arm_insn, 8);
11077
11078 /* Handle VMOV instruction. */
11079 if (bit_l && bit_c)
11080 {
11081 record_buf[0] = reg_t;
11082 arm_insn_r->reg_rec_count = 1;
11083 }
11084 else if (bit_l && !bit_c)
11085 {
11086 /* Handle VMOV instruction. */
11087 if (bits_a == 0x00)
11088 {
11089 record_buf[0] = reg_t;
11090 arm_insn_r->reg_rec_count = 1;
11091 }
11092 /* Handle VMRS instruction. */
11093 else if (bits_a == 0x07)
11094 {
11095 if (reg_t == 15)
11096 reg_t = ARM_PS_REGNUM;
11097
11098 record_buf[0] = reg_t;
11099 arm_insn_r->reg_rec_count = 1;
11100 }
11101 }
11102 else if (!bit_l && !bit_c)
11103 {
11104 /* Handle VMOV instruction. */
11105 if (bits_a == 0x00)
11106 {
11107 record_buf[0] = ARM_D0_REGNUM + reg_v;
11108
11109 arm_insn_r->reg_rec_count = 1;
11110 }
11111 /* Handle VMSR instruction. */
11112 else if (bits_a == 0x07)
11113 {
11114 record_buf[0] = ARM_FPSCR_REGNUM;
11115 arm_insn_r->reg_rec_count = 1;
11116 }
11117 }
11118 else if (!bit_l && bit_c)
11119 {
11120 /* Handle VMOV instruction. */
11121 if (!(bits_a & 0x04))
11122 {
11123 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11124 + ARM_D0_REGNUM;
11125 arm_insn_r->reg_rec_count = 1;
11126 }
11127 /* Handle VDUP instruction. */
11128 else
11129 {
11130 if (bit (arm_insn_r->arm_insn, 21))
11131 {
11132 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11133 record_buf[0] = reg_v + ARM_D0_REGNUM;
11134 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11135 arm_insn_r->reg_rec_count = 2;
11136 }
11137 else
11138 {
11139 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11140 record_buf[0] = reg_v + ARM_D0_REGNUM;
11141 arm_insn_r->reg_rec_count = 1;
11142 }
11143 }
11144 }
11145
11146 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11147 return 0;
11148 }
11149
11150 /* Record handler for extension register load/store instructions. */
11151
11152 static int
11153 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11154 {
11155 uint32_t opcode, single_reg;
11156 uint8_t op_vldm_vstm;
11157 uint32_t record_buf[8], record_buf_mem[128];
11158 ULONGEST u_regval = 0;
11159
11160 struct regcache *reg_cache = arm_insn_r->regcache;
11161
11162 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11163 single_reg = !bit (arm_insn_r->arm_insn, 8);
11164 op_vldm_vstm = opcode & 0x1b;
11165
11166 /* Handle VMOV instructions. */
11167 if ((opcode & 0x1e) == 0x04)
11168 {
11169 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11170 {
11171 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11172 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11173 arm_insn_r->reg_rec_count = 2;
11174 }
11175 else
11176 {
11177 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11178 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11179
11180 if (single_reg)
11181 {
11182 /* The first S register number m is REG_M:M (M is bit 5),
11183 the corresponding D register number is REG_M:M / 2, which
11184 is REG_M. */
11185 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11186 /* The second S register number is REG_M:M + 1, the
11187 corresponding D register number is (REG_M:M + 1) / 2.
11188 IOW, if bit M is 1, the first and second S registers
11189 are mapped to different D registers, otherwise, they are
11190 in the same D register. */
11191 if (bit_m)
11192 {
11193 record_buf[arm_insn_r->reg_rec_count++]
11194 = ARM_D0_REGNUM + reg_m + 1;
11195 }
11196 }
11197 else
11198 {
11199 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11200 arm_insn_r->reg_rec_count = 1;
11201 }
11202 }
11203 }
11204 /* Handle VSTM and VPUSH instructions. */
11205 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11206 || op_vldm_vstm == 0x12)
11207 {
11208 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11209 uint32_t memory_index = 0;
11210
11211 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11212 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11213 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11214 imm_off32 = imm_off8 << 2;
11215 memory_count = imm_off8;
11216
11217 if (bit (arm_insn_r->arm_insn, 23))
11218 start_address = u_regval;
11219 else
11220 start_address = u_regval - imm_off32;
11221
11222 if (bit (arm_insn_r->arm_insn, 21))
11223 {
11224 record_buf[0] = reg_rn;
11225 arm_insn_r->reg_rec_count = 1;
11226 }
11227
11228 while (memory_count > 0)
11229 {
11230 if (single_reg)
11231 {
11232 record_buf_mem[memory_index] = 4;
11233 record_buf_mem[memory_index + 1] = start_address;
11234 start_address = start_address + 4;
11235 memory_index = memory_index + 2;
11236 }
11237 else
11238 {
11239 record_buf_mem[memory_index] = 4;
11240 record_buf_mem[memory_index + 1] = start_address;
11241 record_buf_mem[memory_index + 2] = 4;
11242 record_buf_mem[memory_index + 3] = start_address + 4;
11243 start_address = start_address + 8;
11244 memory_index = memory_index + 4;
11245 }
11246 memory_count--;
11247 }
11248 arm_insn_r->mem_rec_count = (memory_index >> 1);
11249 }
11250 /* Handle VLDM instructions. */
11251 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11252 || op_vldm_vstm == 0x13)
11253 {
11254 uint32_t reg_count, reg_vd;
11255 uint32_t reg_index = 0;
11256 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11257
11258 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11259 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11260
11261 /* REG_VD is the first D register number. If the instruction
11262 loads memory to S registers (SINGLE_REG is TRUE), the register
11263 number is (REG_VD << 1 | bit D), so the corresponding D
11264 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11265 if (!single_reg)
11266 reg_vd = reg_vd | (bit_d << 4);
11267
11268 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11269 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11270
11271 /* If the instruction loads memory to D register, REG_COUNT should
11272 be divided by 2, according to the ARM Architecture Reference
11273 Manual. If the instruction loads memory to S register, divide by
11274 2 as well because two S registers are mapped to D register. */
11275 reg_count = reg_count / 2;
11276 if (single_reg && bit_d)
11277 {
11278 /* Increase the register count if S register list starts from
11279 an odd number (bit d is one). */
11280 reg_count++;
11281 }
11282
11283 while (reg_count > 0)
11284 {
11285 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11286 reg_count--;
11287 }
11288 arm_insn_r->reg_rec_count = reg_index;
11289 }
11290 /* VSTR Vector store register. */
11291 else if ((opcode & 0x13) == 0x10)
11292 {
11293 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11294 uint32_t memory_index = 0;
11295
11296 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11297 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11298 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11299 imm_off32 = imm_off8 << 2;
11300
11301 if (bit (arm_insn_r->arm_insn, 23))
11302 start_address = u_regval + imm_off32;
11303 else
11304 start_address = u_regval - imm_off32;
11305
11306 if (single_reg)
11307 {
11308 record_buf_mem[memory_index] = 4;
11309 record_buf_mem[memory_index + 1] = start_address;
11310 arm_insn_r->mem_rec_count = 1;
11311 }
11312 else
11313 {
11314 record_buf_mem[memory_index] = 4;
11315 record_buf_mem[memory_index + 1] = start_address;
11316 record_buf_mem[memory_index + 2] = 4;
11317 record_buf_mem[memory_index + 3] = start_address + 4;
11318 arm_insn_r->mem_rec_count = 2;
11319 }
11320 }
11321 /* VLDR Vector load register. */
11322 else if ((opcode & 0x13) == 0x11)
11323 {
11324 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11325
11326 if (!single_reg)
11327 {
11328 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11329 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11330 }
11331 else
11332 {
11333 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11334 /* Record register D rather than pseudo register S. */
11335 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11336 }
11337 arm_insn_r->reg_rec_count = 1;
11338 }
11339
11340 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11341 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11342 return 0;
11343 }
11344
11345 /* Record handler for arm/thumb mode VFP data processing instructions. */
11346
11347 static int
11348 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11349 {
11350 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11351 uint32_t record_buf[4];
11352 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11353 enum insn_types curr_insn_type = INSN_INV;
11354
11355 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11356 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11357 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11358 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11359 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11360 bit_d = bit (arm_insn_r->arm_insn, 22);
11361 /* Mask off the "D" bit. */
11362 opc1 = opc1 & ~0x04;
11363
11364 /* Handle VMLA, VMLS. */
11365 if (opc1 == 0x00)
11366 {
11367 if (bit (arm_insn_r->arm_insn, 10))
11368 {
11369 if (bit (arm_insn_r->arm_insn, 6))
11370 curr_insn_type = INSN_T0;
11371 else
11372 curr_insn_type = INSN_T1;
11373 }
11374 else
11375 {
11376 if (dp_op_sz)
11377 curr_insn_type = INSN_T1;
11378 else
11379 curr_insn_type = INSN_T2;
11380 }
11381 }
11382 /* Handle VNMLA, VNMLS, VNMUL. */
11383 else if (opc1 == 0x01)
11384 {
11385 if (dp_op_sz)
11386 curr_insn_type = INSN_T1;
11387 else
11388 curr_insn_type = INSN_T2;
11389 }
11390 /* Handle VMUL. */
11391 else if (opc1 == 0x02 && !(opc3 & 0x01))
11392 {
11393 if (bit (arm_insn_r->arm_insn, 10))
11394 {
11395 if (bit (arm_insn_r->arm_insn, 6))
11396 curr_insn_type = INSN_T0;
11397 else
11398 curr_insn_type = INSN_T1;
11399 }
11400 else
11401 {
11402 if (dp_op_sz)
11403 curr_insn_type = INSN_T1;
11404 else
11405 curr_insn_type = INSN_T2;
11406 }
11407 }
11408 /* Handle VADD, VSUB. */
11409 else if (opc1 == 0x03)
11410 {
11411 if (!bit (arm_insn_r->arm_insn, 9))
11412 {
11413 if (bit (arm_insn_r->arm_insn, 6))
11414 curr_insn_type = INSN_T0;
11415 else
11416 curr_insn_type = INSN_T1;
11417 }
11418 else
11419 {
11420 if (dp_op_sz)
11421 curr_insn_type = INSN_T1;
11422 else
11423 curr_insn_type = INSN_T2;
11424 }
11425 }
11426 /* Handle VDIV. */
11427 else if (opc1 == 0x08)
11428 {
11429 if (dp_op_sz)
11430 curr_insn_type = INSN_T1;
11431 else
11432 curr_insn_type = INSN_T2;
11433 }
11434 /* Handle all other vfp data processing instructions. */
11435 else if (opc1 == 0x0b)
11436 {
11437 /* Handle VMOV. */
11438 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11439 {
11440 if (bit (arm_insn_r->arm_insn, 4))
11441 {
11442 if (bit (arm_insn_r->arm_insn, 6))
11443 curr_insn_type = INSN_T0;
11444 else
11445 curr_insn_type = INSN_T1;
11446 }
11447 else
11448 {
11449 if (dp_op_sz)
11450 curr_insn_type = INSN_T1;
11451 else
11452 curr_insn_type = INSN_T2;
11453 }
11454 }
11455 /* Handle VNEG and VABS. */
11456 else if ((opc2 == 0x01 && opc3 == 0x01)
11457 || (opc2 == 0x00 && opc3 == 0x03))
11458 {
11459 if (!bit (arm_insn_r->arm_insn, 11))
11460 {
11461 if (bit (arm_insn_r->arm_insn, 6))
11462 curr_insn_type = INSN_T0;
11463 else
11464 curr_insn_type = INSN_T1;
11465 }
11466 else
11467 {
11468 if (dp_op_sz)
11469 curr_insn_type = INSN_T1;
11470 else
11471 curr_insn_type = INSN_T2;
11472 }
11473 }
11474 /* Handle VSQRT. */
11475 else if (opc2 == 0x01 && opc3 == 0x03)
11476 {
11477 if (dp_op_sz)
11478 curr_insn_type = INSN_T1;
11479 else
11480 curr_insn_type = INSN_T2;
11481 }
11482 /* Handle VCVT. */
11483 else if (opc2 == 0x07 && opc3 == 0x03)
11484 {
11485 if (!dp_op_sz)
11486 curr_insn_type = INSN_T1;
11487 else
11488 curr_insn_type = INSN_T2;
11489 }
11490 else if (opc3 & 0x01)
11491 {
11492 /* Handle VCVT. */
11493 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11494 {
11495 if (!bit (arm_insn_r->arm_insn, 18))
11496 curr_insn_type = INSN_T2;
11497 else
11498 {
11499 if (dp_op_sz)
11500 curr_insn_type = INSN_T1;
11501 else
11502 curr_insn_type = INSN_T2;
11503 }
11504 }
11505 /* Handle VCVT. */
11506 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11507 {
11508 if (dp_op_sz)
11509 curr_insn_type = INSN_T1;
11510 else
11511 curr_insn_type = INSN_T2;
11512 }
11513 /* Handle VCVTB, VCVTT. */
11514 else if ((opc2 & 0x0e) == 0x02)
11515 curr_insn_type = INSN_T2;
11516 /* Handle VCMP, VCMPE. */
11517 else if ((opc2 & 0x0e) == 0x04)
11518 curr_insn_type = INSN_T3;
11519 }
11520 }
11521
11522 switch (curr_insn_type)
11523 {
11524 case INSN_T0:
11525 reg_vd = reg_vd | (bit_d << 4);
11526 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11527 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11528 arm_insn_r->reg_rec_count = 2;
11529 break;
11530
11531 case INSN_T1:
11532 reg_vd = reg_vd | (bit_d << 4);
11533 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11534 arm_insn_r->reg_rec_count = 1;
11535 break;
11536
11537 case INSN_T2:
11538 reg_vd = (reg_vd << 1) | bit_d;
11539 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11540 arm_insn_r->reg_rec_count = 1;
11541 break;
11542
11543 case INSN_T3:
11544 record_buf[0] = ARM_FPSCR_REGNUM;
11545 arm_insn_r->reg_rec_count = 1;
11546 break;
11547
11548 default:
11549 gdb_assert_not_reached ("no decoding pattern found");
11550 break;
11551 }
11552
11553 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11554 return 0;
11555 }
11556
11557 /* Handling opcode 110 insns. */
11558
11559 static int
11560 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11561 {
11562 uint32_t op1, op1_ebit, coproc;
11563
11564 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11565 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11566 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11567
11568 if ((coproc & 0x0e) == 0x0a)
11569 {
11570 /* Handle extension register ld/st instructions. */
11571 if (!(op1 & 0x20))
11572 return arm_record_exreg_ld_st_insn (arm_insn_r);
11573
11574 /* 64-bit transfers between arm core and extension registers. */
11575 if ((op1 & 0x3e) == 0x04)
11576 return arm_record_exreg_ld_st_insn (arm_insn_r);
11577 }
11578 else
11579 {
11580 /* Handle coprocessor ld/st instructions. */
11581 if (!(op1 & 0x3a))
11582 {
11583 /* Store. */
11584 if (!op1_ebit)
11585 return arm_record_unsupported_insn (arm_insn_r);
11586 else
11587 /* Load. */
11588 return arm_record_unsupported_insn (arm_insn_r);
11589 }
11590
11591 /* Move to coprocessor from two arm core registers. */
11592 if (op1 == 0x4)
11593 return arm_record_unsupported_insn (arm_insn_r);
11594
11595 /* Move to two arm core registers from coprocessor. */
11596 if (op1 == 0x5)
11597 {
11598 uint32_t reg_t[2];
11599
11600 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11601 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11602 arm_insn_r->reg_rec_count = 2;
11603
11604 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11605 return 0;
11606 }
11607 }
11608 return arm_record_unsupported_insn (arm_insn_r);
11609 }
11610
11611 /* Handling opcode 111 insns. */
11612
11613 static int
11614 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11615 {
11616 uint32_t op, op1_ebit, coproc, bits_24_25;
11617 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11618 struct regcache *reg_cache = arm_insn_r->regcache;
11619
11620 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11621 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11622 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11623 op = bit (arm_insn_r->arm_insn, 4);
11624 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11625
11626 /* Handle arm SWI/SVC system call instructions. */
11627 if (bits_24_25 == 0x3)
11628 {
11629 if (tdep->arm_syscall_record != NULL)
11630 {
11631 ULONGEST svc_operand, svc_number;
11632
11633 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11634
11635 if (svc_operand) /* OABI. */
11636 svc_number = svc_operand - 0x900000;
11637 else /* EABI. */
11638 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11639
11640 return tdep->arm_syscall_record (reg_cache, svc_number);
11641 }
11642 else
11643 {
11644 printf_unfiltered (_("no syscall record support\n"));
11645 return -1;
11646 }
11647 }
11648 else if (bits_24_25 == 0x02)
11649 {
11650 if (op)
11651 {
11652 if ((coproc & 0x0e) == 0x0a)
11653 {
11654 /* 8, 16, and 32-bit transfer */
11655 return arm_record_vdata_transfer_insn (arm_insn_r);
11656 }
11657 else
11658 {
11659 if (op1_ebit)
11660 {
11661 /* MRC, MRC2 */
11662 uint32_t record_buf[1];
11663
11664 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11665 if (record_buf[0] == 15)
11666 record_buf[0] = ARM_PS_REGNUM;
11667
11668 arm_insn_r->reg_rec_count = 1;
11669 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11670 record_buf);
11671 return 0;
11672 }
11673 else
11674 {
11675 /* MCR, MCR2 */
11676 return -1;
11677 }
11678 }
11679 }
11680 else
11681 {
11682 if ((coproc & 0x0e) == 0x0a)
11683 {
11684 /* VFP data-processing instructions. */
11685 return arm_record_vfp_data_proc_insn (arm_insn_r);
11686 }
11687 else
11688 {
11689 /* CDP, CDP2 */
11690 return -1;
11691 }
11692 }
11693 }
11694 else
11695 {
11696 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11697
11698 if (op1 == 5)
11699 {
11700 if ((coproc & 0x0e) != 0x0a)
11701 {
11702 /* MRRC, MRRC2 */
11703 return -1;
11704 }
11705 }
11706 else if (op1 == 4 || op1 == 5)
11707 {
11708 if ((coproc & 0x0e) == 0x0a)
11709 {
11710 /* 64-bit transfers between ARM core and extension */
11711 return -1;
11712 }
11713 else if (op1 == 4)
11714 {
11715 /* MCRR, MCRR2 */
11716 return -1;
11717 }
11718 }
11719 else if (op1 == 0 || op1 == 1)
11720 {
11721 /* UNDEFINED */
11722 return -1;
11723 }
11724 else
11725 {
11726 if ((coproc & 0x0e) == 0x0a)
11727 {
11728 /* Extension register load/store */
11729 }
11730 else
11731 {
11732 /* STC, STC2, LDC, LDC2 */
11733 }
11734 return -1;
11735 }
11736 }
11737
11738 return -1;
11739 }
11740
11741 /* Handling opcode 000 insns. */
11742
11743 static int
11744 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11745 {
11746 uint32_t record_buf[8];
11747 uint32_t reg_src1 = 0;
11748
11749 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11750
11751 record_buf[0] = ARM_PS_REGNUM;
11752 record_buf[1] = reg_src1;
11753 thumb_insn_r->reg_rec_count = 2;
11754
11755 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11756
11757 return 0;
11758 }
11759
11760
11761 /* Handling opcode 001 insns. */
11762
11763 static int
11764 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11765 {
11766 uint32_t record_buf[8];
11767 uint32_t reg_src1 = 0;
11768
11769 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11770
11771 record_buf[0] = ARM_PS_REGNUM;
11772 record_buf[1] = reg_src1;
11773 thumb_insn_r->reg_rec_count = 2;
11774
11775 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11776
11777 return 0;
11778 }
11779
11780 /* Handling opcode 010 insns. */
11781
11782 static int
11783 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11784 {
11785 struct regcache *reg_cache = thumb_insn_r->regcache;
11786 uint32_t record_buf[8], record_buf_mem[8];
11787
11788 uint32_t reg_src1 = 0, reg_src2 = 0;
11789 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11790
11791 ULONGEST u_regval[2] = {0};
11792
11793 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11794
11795 if (bit (thumb_insn_r->arm_insn, 12))
11796 {
11797 /* Handle load/store register offset. */
11798 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11799
11800 if (in_inclusive_range (opB, 4U, 7U))
11801 {
11802 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11803 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11804 record_buf[0] = reg_src1;
11805 thumb_insn_r->reg_rec_count = 1;
11806 }
11807 else if (in_inclusive_range (opB, 0U, 2U))
11808 {
11809 /* STR(2), STRB(2), STRH(2) . */
11810 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11811 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11812 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11813 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11814 if (0 == opB)
11815 record_buf_mem[0] = 4; /* STR (2). */
11816 else if (2 == opB)
11817 record_buf_mem[0] = 1; /* STRB (2). */
11818 else if (1 == opB)
11819 record_buf_mem[0] = 2; /* STRH (2). */
11820 record_buf_mem[1] = u_regval[0] + u_regval[1];
11821 thumb_insn_r->mem_rec_count = 1;
11822 }
11823 }
11824 else if (bit (thumb_insn_r->arm_insn, 11))
11825 {
11826 /* Handle load from literal pool. */
11827 /* LDR(3). */
11828 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11829 record_buf[0] = reg_src1;
11830 thumb_insn_r->reg_rec_count = 1;
11831 }
11832 else if (opcode1)
11833 {
11834 /* Special data instructions and branch and exchange */
11835 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11836 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11837 if ((3 == opcode2) && (!opcode3))
11838 {
11839 /* Branch with exchange. */
11840 record_buf[0] = ARM_PS_REGNUM;
11841 thumb_insn_r->reg_rec_count = 1;
11842 }
11843 else
11844 {
11845 /* Format 8; special data processing insns. */
11846 record_buf[0] = ARM_PS_REGNUM;
11847 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11848 | bits (thumb_insn_r->arm_insn, 0, 2));
11849 thumb_insn_r->reg_rec_count = 2;
11850 }
11851 }
11852 else
11853 {
11854 /* Format 5; data processing insns. */
11855 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11856 if (bit (thumb_insn_r->arm_insn, 7))
11857 {
11858 reg_src1 = reg_src1 + 8;
11859 }
11860 record_buf[0] = ARM_PS_REGNUM;
11861 record_buf[1] = reg_src1;
11862 thumb_insn_r->reg_rec_count = 2;
11863 }
11864
11865 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11866 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11867 record_buf_mem);
11868
11869 return 0;
11870 }
11871
11872 /* Handling opcode 001 insns. */
11873
11874 static int
11875 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11876 {
11877 struct regcache *reg_cache = thumb_insn_r->regcache;
11878 uint32_t record_buf[8], record_buf_mem[8];
11879
11880 uint32_t reg_src1 = 0;
11881 uint32_t opcode = 0, immed_5 = 0;
11882
11883 ULONGEST u_regval = 0;
11884
11885 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11886
11887 if (opcode)
11888 {
11889 /* LDR(1). */
11890 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11891 record_buf[0] = reg_src1;
11892 thumb_insn_r->reg_rec_count = 1;
11893 }
11894 else
11895 {
11896 /* STR(1). */
11897 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11898 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11899 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11900 record_buf_mem[0] = 4;
11901 record_buf_mem[1] = u_regval + (immed_5 * 4);
11902 thumb_insn_r->mem_rec_count = 1;
11903 }
11904
11905 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11906 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11907 record_buf_mem);
11908
11909 return 0;
11910 }
11911
11912 /* Handling opcode 100 insns. */
11913
11914 static int
11915 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11916 {
11917 struct regcache *reg_cache = thumb_insn_r->regcache;
11918 uint32_t record_buf[8], record_buf_mem[8];
11919
11920 uint32_t reg_src1 = 0;
11921 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11922
11923 ULONGEST u_regval = 0;
11924
11925 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11926
11927 if (3 == opcode)
11928 {
11929 /* LDR(4). */
11930 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11931 record_buf[0] = reg_src1;
11932 thumb_insn_r->reg_rec_count = 1;
11933 }
11934 else if (1 == opcode)
11935 {
11936 /* LDRH(1). */
11937 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11938 record_buf[0] = reg_src1;
11939 thumb_insn_r->reg_rec_count = 1;
11940 }
11941 else if (2 == opcode)
11942 {
11943 /* STR(3). */
11944 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11945 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11946 record_buf_mem[0] = 4;
11947 record_buf_mem[1] = u_regval + (immed_8 * 4);
11948 thumb_insn_r->mem_rec_count = 1;
11949 }
11950 else if (0 == opcode)
11951 {
11952 /* STRH(1). */
11953 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11954 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11955 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11956 record_buf_mem[0] = 2;
11957 record_buf_mem[1] = u_regval + (immed_5 * 2);
11958 thumb_insn_r->mem_rec_count = 1;
11959 }
11960
11961 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11962 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11963 record_buf_mem);
11964
11965 return 0;
11966 }
11967
11968 /* Handling opcode 101 insns. */
11969
11970 static int
11971 thumb_record_misc (insn_decode_record *thumb_insn_r)
11972 {
11973 struct regcache *reg_cache = thumb_insn_r->regcache;
11974
11975 uint32_t opcode = 0;
11976 uint32_t register_bits = 0, register_count = 0;
11977 uint32_t index = 0, start_address = 0;
11978 uint32_t record_buf[24], record_buf_mem[48];
11979 uint32_t reg_src1;
11980
11981 ULONGEST u_regval = 0;
11982
11983 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11984
11985 if (opcode == 0 || opcode == 1)
11986 {
11987 /* ADR and ADD (SP plus immediate) */
11988
11989 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11990 record_buf[0] = reg_src1;
11991 thumb_insn_r->reg_rec_count = 1;
11992 }
11993 else
11994 {
11995 /* Miscellaneous 16-bit instructions */
11996 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11997
11998 switch (opcode2)
11999 {
12000 case 6:
12001 /* SETEND and CPS */
12002 break;
12003 case 0:
12004 /* ADD/SUB (SP plus immediate) */
12005 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12006 record_buf[0] = ARM_SP_REGNUM;
12007 thumb_insn_r->reg_rec_count = 1;
12008 break;
12009 case 1: /* fall through */
12010 case 3: /* fall through */
12011 case 9: /* fall through */
12012 case 11:
12013 /* CBNZ, CBZ */
12014 break;
12015 case 2:
12016 /* SXTH, SXTB, UXTH, UXTB */
12017 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12018 thumb_insn_r->reg_rec_count = 1;
12019 break;
12020 case 4: /* fall through */
12021 case 5:
12022 /* PUSH. */
12023 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12024 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12025 while (register_bits)
12026 {
12027 if (register_bits & 0x00000001)
12028 register_count++;
12029 register_bits = register_bits >> 1;
12030 }
12031 start_address = u_regval - \
12032 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12033 thumb_insn_r->mem_rec_count = register_count;
12034 while (register_count)
12035 {
12036 record_buf_mem[(register_count * 2) - 1] = start_address;
12037 record_buf_mem[(register_count * 2) - 2] = 4;
12038 start_address = start_address + 4;
12039 register_count--;
12040 }
12041 record_buf[0] = ARM_SP_REGNUM;
12042 thumb_insn_r->reg_rec_count = 1;
12043 break;
12044 case 10:
12045 /* REV, REV16, REVSH */
12046 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12047 thumb_insn_r->reg_rec_count = 1;
12048 break;
12049 case 12: /* fall through */
12050 case 13:
12051 /* POP. */
12052 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12053 while (register_bits)
12054 {
12055 if (register_bits & 0x00000001)
12056 record_buf[index++] = register_count;
12057 register_bits = register_bits >> 1;
12058 register_count++;
12059 }
12060 record_buf[index++] = ARM_PS_REGNUM;
12061 record_buf[index++] = ARM_SP_REGNUM;
12062 thumb_insn_r->reg_rec_count = index;
12063 break;
12064 case 0xe:
12065 /* BKPT insn. */
12066 /* Handle enhanced software breakpoint insn, BKPT. */
12067 /* CPSR is changed to be executed in ARM state, disabling normal
12068 interrupts, entering abort mode. */
12069 /* According to high vector configuration PC is set. */
12070 /* User hits breakpoint and type reverse, in that case, we need to go back with
12071 previous CPSR and Program Counter. */
12072 record_buf[0] = ARM_PS_REGNUM;
12073 record_buf[1] = ARM_LR_REGNUM;
12074 thumb_insn_r->reg_rec_count = 2;
12075 /* We need to save SPSR value, which is not yet done. */
12076 printf_unfiltered (_("Process record does not support instruction "
12077 "0x%0x at address %s.\n"),
12078 thumb_insn_r->arm_insn,
12079 paddress (thumb_insn_r->gdbarch,
12080 thumb_insn_r->this_addr));
12081 return -1;
12082
12083 case 0xf:
12084 /* If-Then, and hints */
12085 break;
12086 default:
12087 return -1;
12088 };
12089 }
12090
12091 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12092 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12093 record_buf_mem);
12094
12095 return 0;
12096 }
12097
12098 /* Handling opcode 110 insns. */
12099
12100 static int
12101 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12102 {
12103 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12104 struct regcache *reg_cache = thumb_insn_r->regcache;
12105
12106 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12107 uint32_t reg_src1 = 0;
12108 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12109 uint32_t index = 0, start_address = 0;
12110 uint32_t record_buf[24], record_buf_mem[48];
12111
12112 ULONGEST u_regval = 0;
12113
12114 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12115 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12116
12117 if (1 == opcode2)
12118 {
12119
12120 /* LDMIA. */
12121 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12122 /* Get Rn. */
12123 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12124 while (register_bits)
12125 {
12126 if (register_bits & 0x00000001)
12127 record_buf[index++] = register_count;
12128 register_bits = register_bits >> 1;
12129 register_count++;
12130 }
12131 record_buf[index++] = reg_src1;
12132 thumb_insn_r->reg_rec_count = index;
12133 }
12134 else if (0 == opcode2)
12135 {
12136 /* It handles both STMIA. */
12137 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12138 /* Get Rn. */
12139 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12140 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12141 while (register_bits)
12142 {
12143 if (register_bits & 0x00000001)
12144 register_count++;
12145 register_bits = register_bits >> 1;
12146 }
12147 start_address = u_regval;
12148 thumb_insn_r->mem_rec_count = register_count;
12149 while (register_count)
12150 {
12151 record_buf_mem[(register_count * 2) - 1] = start_address;
12152 record_buf_mem[(register_count * 2) - 2] = 4;
12153 start_address = start_address + 4;
12154 register_count--;
12155 }
12156 }
12157 else if (0x1F == opcode1)
12158 {
12159 /* Handle arm syscall insn. */
12160 if (tdep->arm_syscall_record != NULL)
12161 {
12162 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12163 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12164 }
12165 else
12166 {
12167 printf_unfiltered (_("no syscall record support\n"));
12168 return -1;
12169 }
12170 }
12171
12172 /* B (1), conditional branch is automatically taken care in process_record,
12173 as PC is saved there. */
12174
12175 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12176 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12177 record_buf_mem);
12178
12179 return ret;
12180 }
12181
12182 /* Handling opcode 111 insns. */
12183
12184 static int
12185 thumb_record_branch (insn_decode_record *thumb_insn_r)
12186 {
12187 uint32_t record_buf[8];
12188 uint32_t bits_h = 0;
12189
12190 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12191
12192 if (2 == bits_h || 3 == bits_h)
12193 {
12194 /* BL */
12195 record_buf[0] = ARM_LR_REGNUM;
12196 thumb_insn_r->reg_rec_count = 1;
12197 }
12198 else if (1 == bits_h)
12199 {
12200 /* BLX(1). */
12201 record_buf[0] = ARM_PS_REGNUM;
12202 record_buf[1] = ARM_LR_REGNUM;
12203 thumb_insn_r->reg_rec_count = 2;
12204 }
12205
12206 /* B(2) is automatically taken care in process_record, as PC is
12207 saved there. */
12208
12209 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12210
12211 return 0;
12212 }
12213
12214 /* Handler for thumb2 load/store multiple instructions. */
12215
12216 static int
12217 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12218 {
12219 struct regcache *reg_cache = thumb2_insn_r->regcache;
12220
12221 uint32_t reg_rn, op;
12222 uint32_t register_bits = 0, register_count = 0;
12223 uint32_t index = 0, start_address = 0;
12224 uint32_t record_buf[24], record_buf_mem[48];
12225
12226 ULONGEST u_regval = 0;
12227
12228 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12229 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12230
12231 if (0 == op || 3 == op)
12232 {
12233 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12234 {
12235 /* Handle RFE instruction. */
12236 record_buf[0] = ARM_PS_REGNUM;
12237 thumb2_insn_r->reg_rec_count = 1;
12238 }
12239 else
12240 {
12241 /* Handle SRS instruction after reading banked SP. */
12242 return arm_record_unsupported_insn (thumb2_insn_r);
12243 }
12244 }
12245 else if (1 == op || 2 == op)
12246 {
12247 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12248 {
12249 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12250 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12251 while (register_bits)
12252 {
12253 if (register_bits & 0x00000001)
12254 record_buf[index++] = register_count;
12255
12256 register_count++;
12257 register_bits = register_bits >> 1;
12258 }
12259 record_buf[index++] = reg_rn;
12260 record_buf[index++] = ARM_PS_REGNUM;
12261 thumb2_insn_r->reg_rec_count = index;
12262 }
12263 else
12264 {
12265 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12266 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12267 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12268 while (register_bits)
12269 {
12270 if (register_bits & 0x00000001)
12271 register_count++;
12272
12273 register_bits = register_bits >> 1;
12274 }
12275
12276 if (1 == op)
12277 {
12278 /* Start address calculation for LDMDB/LDMEA. */
12279 start_address = u_regval;
12280 }
12281 else if (2 == op)
12282 {
12283 /* Start address calculation for LDMDB/LDMEA. */
12284 start_address = u_regval - register_count * 4;
12285 }
12286
12287 thumb2_insn_r->mem_rec_count = register_count;
12288 while (register_count)
12289 {
12290 record_buf_mem[register_count * 2 - 1] = start_address;
12291 record_buf_mem[register_count * 2 - 2] = 4;
12292 start_address = start_address + 4;
12293 register_count--;
12294 }
12295 record_buf[0] = reg_rn;
12296 record_buf[1] = ARM_PS_REGNUM;
12297 thumb2_insn_r->reg_rec_count = 2;
12298 }
12299 }
12300
12301 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12302 record_buf_mem);
12303 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12304 record_buf);
12305 return ARM_RECORD_SUCCESS;
12306 }
12307
12308 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12309 instructions. */
12310
12311 static int
12312 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12313 {
12314 struct regcache *reg_cache = thumb2_insn_r->regcache;
12315
12316 uint32_t reg_rd, reg_rn, offset_imm;
12317 uint32_t reg_dest1, reg_dest2;
12318 uint32_t address, offset_addr;
12319 uint32_t record_buf[8], record_buf_mem[8];
12320 uint32_t op1, op2, op3;
12321
12322 ULONGEST u_regval[2];
12323
12324 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12325 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12326 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12327
12328 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12329 {
12330 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12331 {
12332 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12333 record_buf[0] = reg_dest1;
12334 record_buf[1] = ARM_PS_REGNUM;
12335 thumb2_insn_r->reg_rec_count = 2;
12336 }
12337
12338 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12339 {
12340 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12341 record_buf[2] = reg_dest2;
12342 thumb2_insn_r->reg_rec_count = 3;
12343 }
12344 }
12345 else
12346 {
12347 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12348 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12349
12350 if (0 == op1 && 0 == op2)
12351 {
12352 /* Handle STREX. */
12353 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12354 address = u_regval[0] + (offset_imm * 4);
12355 record_buf_mem[0] = 4;
12356 record_buf_mem[1] = address;
12357 thumb2_insn_r->mem_rec_count = 1;
12358 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12359 record_buf[0] = reg_rd;
12360 thumb2_insn_r->reg_rec_count = 1;
12361 }
12362 else if (1 == op1 && 0 == op2)
12363 {
12364 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12365 record_buf[0] = reg_rd;
12366 thumb2_insn_r->reg_rec_count = 1;
12367 address = u_regval[0];
12368 record_buf_mem[1] = address;
12369
12370 if (4 == op3)
12371 {
12372 /* Handle STREXB. */
12373 record_buf_mem[0] = 1;
12374 thumb2_insn_r->mem_rec_count = 1;
12375 }
12376 else if (5 == op3)
12377 {
12378 /* Handle STREXH. */
12379 record_buf_mem[0] = 2 ;
12380 thumb2_insn_r->mem_rec_count = 1;
12381 }
12382 else if (7 == op3)
12383 {
12384 /* Handle STREXD. */
12385 address = u_regval[0];
12386 record_buf_mem[0] = 4;
12387 record_buf_mem[2] = 4;
12388 record_buf_mem[3] = address + 4;
12389 thumb2_insn_r->mem_rec_count = 2;
12390 }
12391 }
12392 else
12393 {
12394 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12395
12396 if (bit (thumb2_insn_r->arm_insn, 24))
12397 {
12398 if (bit (thumb2_insn_r->arm_insn, 23))
12399 offset_addr = u_regval[0] + (offset_imm * 4);
12400 else
12401 offset_addr = u_regval[0] - (offset_imm * 4);
12402
12403 address = offset_addr;
12404 }
12405 else
12406 address = u_regval[0];
12407
12408 record_buf_mem[0] = 4;
12409 record_buf_mem[1] = address;
12410 record_buf_mem[2] = 4;
12411 record_buf_mem[3] = address + 4;
12412 thumb2_insn_r->mem_rec_count = 2;
12413 record_buf[0] = reg_rn;
12414 thumb2_insn_r->reg_rec_count = 1;
12415 }
12416 }
12417
12418 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12419 record_buf);
12420 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12421 record_buf_mem);
12422 return ARM_RECORD_SUCCESS;
12423 }
12424
12425 /* Handler for thumb2 data processing (shift register and modified immediate)
12426 instructions. */
12427
12428 static int
12429 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12430 {
12431 uint32_t reg_rd, op;
12432 uint32_t record_buf[8];
12433
12434 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12435 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12436
12437 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12438 {
12439 record_buf[0] = ARM_PS_REGNUM;
12440 thumb2_insn_r->reg_rec_count = 1;
12441 }
12442 else
12443 {
12444 record_buf[0] = reg_rd;
12445 record_buf[1] = ARM_PS_REGNUM;
12446 thumb2_insn_r->reg_rec_count = 2;
12447 }
12448
12449 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12450 record_buf);
12451 return ARM_RECORD_SUCCESS;
12452 }
12453
12454 /* Generic handler for thumb2 instructions which effect destination and PS
12455 registers. */
12456
12457 static int
12458 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12459 {
12460 uint32_t reg_rd;
12461 uint32_t record_buf[8];
12462
12463 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12464
12465 record_buf[0] = reg_rd;
12466 record_buf[1] = ARM_PS_REGNUM;
12467 thumb2_insn_r->reg_rec_count = 2;
12468
12469 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12470 record_buf);
12471 return ARM_RECORD_SUCCESS;
12472 }
12473
12474 /* Handler for thumb2 branch and miscellaneous control instructions. */
12475
12476 static int
12477 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12478 {
12479 uint32_t op, op1, op2;
12480 uint32_t record_buf[8];
12481
12482 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12483 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12484 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12485
12486 /* Handle MSR insn. */
12487 if (!(op1 & 0x2) && 0x38 == op)
12488 {
12489 if (!(op2 & 0x3))
12490 {
12491 /* CPSR is going to be changed. */
12492 record_buf[0] = ARM_PS_REGNUM;
12493 thumb2_insn_r->reg_rec_count = 1;
12494 }
12495 else
12496 {
12497 arm_record_unsupported_insn(thumb2_insn_r);
12498 return -1;
12499 }
12500 }
12501 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12502 {
12503 /* BLX. */
12504 record_buf[0] = ARM_PS_REGNUM;
12505 record_buf[1] = ARM_LR_REGNUM;
12506 thumb2_insn_r->reg_rec_count = 2;
12507 }
12508
12509 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12510 record_buf);
12511 return ARM_RECORD_SUCCESS;
12512 }
12513
12514 /* Handler for thumb2 store single data item instructions. */
12515
12516 static int
12517 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12518 {
12519 struct regcache *reg_cache = thumb2_insn_r->regcache;
12520
12521 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12522 uint32_t address, offset_addr;
12523 uint32_t record_buf[8], record_buf_mem[8];
12524 uint32_t op1, op2;
12525
12526 ULONGEST u_regval[2];
12527
12528 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12529 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12530 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12531 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12532
12533 if (bit (thumb2_insn_r->arm_insn, 23))
12534 {
12535 /* T2 encoding. */
12536 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12537 offset_addr = u_regval[0] + offset_imm;
12538 address = offset_addr;
12539 }
12540 else
12541 {
12542 /* T3 encoding. */
12543 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12544 {
12545 /* Handle STRB (register). */
12546 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12547 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12548 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12549 offset_addr = u_regval[1] << shift_imm;
12550 address = u_regval[0] + offset_addr;
12551 }
12552 else
12553 {
12554 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12555 if (bit (thumb2_insn_r->arm_insn, 10))
12556 {
12557 if (bit (thumb2_insn_r->arm_insn, 9))
12558 offset_addr = u_regval[0] + offset_imm;
12559 else
12560 offset_addr = u_regval[0] - offset_imm;
12561
12562 address = offset_addr;
12563 }
12564 else
12565 address = u_regval[0];
12566 }
12567 }
12568
12569 switch (op1)
12570 {
12571 /* Store byte instructions. */
12572 case 4:
12573 case 0:
12574 record_buf_mem[0] = 1;
12575 break;
12576 /* Store half word instructions. */
12577 case 1:
12578 case 5:
12579 record_buf_mem[0] = 2;
12580 break;
12581 /* Store word instructions. */
12582 case 2:
12583 case 6:
12584 record_buf_mem[0] = 4;
12585 break;
12586
12587 default:
12588 gdb_assert_not_reached ("no decoding pattern found");
12589 break;
12590 }
12591
12592 record_buf_mem[1] = address;
12593 thumb2_insn_r->mem_rec_count = 1;
12594 record_buf[0] = reg_rn;
12595 thumb2_insn_r->reg_rec_count = 1;
12596
12597 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12598 record_buf);
12599 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12600 record_buf_mem);
12601 return ARM_RECORD_SUCCESS;
12602 }
12603
12604 /* Handler for thumb2 load memory hints instructions. */
12605
12606 static int
12607 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12608 {
12609 uint32_t record_buf[8];
12610 uint32_t reg_rt, reg_rn;
12611
12612 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12613 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12614
12615 if (ARM_PC_REGNUM != reg_rt)
12616 {
12617 record_buf[0] = reg_rt;
12618 record_buf[1] = reg_rn;
12619 record_buf[2] = ARM_PS_REGNUM;
12620 thumb2_insn_r->reg_rec_count = 3;
12621
12622 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12623 record_buf);
12624 return ARM_RECORD_SUCCESS;
12625 }
12626
12627 return ARM_RECORD_FAILURE;
12628 }
12629
12630 /* Handler for thumb2 load word instructions. */
12631
12632 static int
12633 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12634 {
12635 uint32_t record_buf[8];
12636
12637 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12638 record_buf[1] = ARM_PS_REGNUM;
12639 thumb2_insn_r->reg_rec_count = 2;
12640
12641 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12642 record_buf);
12643 return ARM_RECORD_SUCCESS;
12644 }
12645
12646 /* Handler for thumb2 long multiply, long multiply accumulate, and
12647 divide instructions. */
12648
12649 static int
12650 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12651 {
12652 uint32_t opcode1 = 0, opcode2 = 0;
12653 uint32_t record_buf[8];
12654
12655 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12656 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12657
12658 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12659 {
12660 /* Handle SMULL, UMULL, SMULAL. */
12661 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12662 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12663 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12664 record_buf[2] = ARM_PS_REGNUM;
12665 thumb2_insn_r->reg_rec_count = 3;
12666 }
12667 else if (1 == opcode1 || 3 == opcode2)
12668 {
12669 /* Handle SDIV and UDIV. */
12670 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12671 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12672 record_buf[2] = ARM_PS_REGNUM;
12673 thumb2_insn_r->reg_rec_count = 3;
12674 }
12675 else
12676 return ARM_RECORD_FAILURE;
12677
12678 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12679 record_buf);
12680 return ARM_RECORD_SUCCESS;
12681 }
12682
12683 /* Record handler for thumb32 coprocessor instructions. */
12684
12685 static int
12686 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12687 {
12688 if (bit (thumb2_insn_r->arm_insn, 25))
12689 return arm_record_coproc_data_proc (thumb2_insn_r);
12690 else
12691 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12692 }
12693
12694 /* Record handler for advance SIMD structure load/store instructions. */
12695
12696 static int
12697 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12698 {
12699 struct regcache *reg_cache = thumb2_insn_r->regcache;
12700 uint32_t l_bit, a_bit, b_bits;
12701 uint32_t record_buf[128], record_buf_mem[128];
12702 uint32_t reg_rn, reg_vd, address, f_elem;
12703 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12704 uint8_t f_ebytes;
12705
12706 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12707 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12708 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12709 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12710 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12711 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12712 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12713 f_elem = 8 / f_ebytes;
12714
12715 if (!l_bit)
12716 {
12717 ULONGEST u_regval = 0;
12718 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12719 address = u_regval;
12720
12721 if (!a_bit)
12722 {
12723 /* Handle VST1. */
12724 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12725 {
12726 if (b_bits == 0x07)
12727 bf_regs = 1;
12728 else if (b_bits == 0x0a)
12729 bf_regs = 2;
12730 else if (b_bits == 0x06)
12731 bf_regs = 3;
12732 else if (b_bits == 0x02)
12733 bf_regs = 4;
12734 else
12735 bf_regs = 0;
12736
12737 for (index_r = 0; index_r < bf_regs; index_r++)
12738 {
12739 for (index_e = 0; index_e < f_elem; index_e++)
12740 {
12741 record_buf_mem[index_m++] = f_ebytes;
12742 record_buf_mem[index_m++] = address;
12743 address = address + f_ebytes;
12744 thumb2_insn_r->mem_rec_count += 1;
12745 }
12746 }
12747 }
12748 /* Handle VST2. */
12749 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12750 {
12751 if (b_bits == 0x09 || b_bits == 0x08)
12752 bf_regs = 1;
12753 else if (b_bits == 0x03)
12754 bf_regs = 2;
12755 else
12756 bf_regs = 0;
12757
12758 for (index_r = 0; index_r < bf_regs; index_r++)
12759 for (index_e = 0; index_e < f_elem; index_e++)
12760 {
12761 for (loop_t = 0; loop_t < 2; loop_t++)
12762 {
12763 record_buf_mem[index_m++] = f_ebytes;
12764 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12765 thumb2_insn_r->mem_rec_count += 1;
12766 }
12767 address = address + (2 * f_ebytes);
12768 }
12769 }
12770 /* Handle VST3. */
12771 else if ((b_bits & 0x0e) == 0x04)
12772 {
12773 for (index_e = 0; index_e < f_elem; index_e++)
12774 {
12775 for (loop_t = 0; loop_t < 3; loop_t++)
12776 {
12777 record_buf_mem[index_m++] = f_ebytes;
12778 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12779 thumb2_insn_r->mem_rec_count += 1;
12780 }
12781 address = address + (3 * f_ebytes);
12782 }
12783 }
12784 /* Handle VST4. */
12785 else if (!(b_bits & 0x0e))
12786 {
12787 for (index_e = 0; index_e < f_elem; index_e++)
12788 {
12789 for (loop_t = 0; loop_t < 4; loop_t++)
12790 {
12791 record_buf_mem[index_m++] = f_ebytes;
12792 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12793 thumb2_insn_r->mem_rec_count += 1;
12794 }
12795 address = address + (4 * f_ebytes);
12796 }
12797 }
12798 }
12799 else
12800 {
12801 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12802
12803 if (bft_size == 0x00)
12804 f_ebytes = 1;
12805 else if (bft_size == 0x01)
12806 f_ebytes = 2;
12807 else if (bft_size == 0x02)
12808 f_ebytes = 4;
12809 else
12810 f_ebytes = 0;
12811
12812 /* Handle VST1. */
12813 if (!(b_bits & 0x0b) || b_bits == 0x08)
12814 thumb2_insn_r->mem_rec_count = 1;
12815 /* Handle VST2. */
12816 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12817 thumb2_insn_r->mem_rec_count = 2;
12818 /* Handle VST3. */
12819 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12820 thumb2_insn_r->mem_rec_count = 3;
12821 /* Handle VST4. */
12822 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12823 thumb2_insn_r->mem_rec_count = 4;
12824
12825 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12826 {
12827 record_buf_mem[index_m] = f_ebytes;
12828 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12829 }
12830 }
12831 }
12832 else
12833 {
12834 if (!a_bit)
12835 {
12836 /* Handle VLD1. */
12837 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12838 thumb2_insn_r->reg_rec_count = 1;
12839 /* Handle VLD2. */
12840 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12841 thumb2_insn_r->reg_rec_count = 2;
12842 /* Handle VLD3. */
12843 else if ((b_bits & 0x0e) == 0x04)
12844 thumb2_insn_r->reg_rec_count = 3;
12845 /* Handle VLD4. */
12846 else if (!(b_bits & 0x0e))
12847 thumb2_insn_r->reg_rec_count = 4;
12848 }
12849 else
12850 {
12851 /* Handle VLD1. */
12852 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12853 thumb2_insn_r->reg_rec_count = 1;
12854 /* Handle VLD2. */
12855 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12856 thumb2_insn_r->reg_rec_count = 2;
12857 /* Handle VLD3. */
12858 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12859 thumb2_insn_r->reg_rec_count = 3;
12860 /* Handle VLD4. */
12861 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12862 thumb2_insn_r->reg_rec_count = 4;
12863
12864 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12865 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12866 }
12867 }
12868
12869 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12870 {
12871 record_buf[index_r] = reg_rn;
12872 thumb2_insn_r->reg_rec_count += 1;
12873 }
12874
12875 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12876 record_buf);
12877 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12878 record_buf_mem);
12879 return 0;
12880 }
12881
12882 /* Decodes thumb2 instruction type and invokes its record handler. */
12883
12884 static unsigned int
12885 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12886 {
12887 uint32_t op, op1, op2;
12888
12889 op = bit (thumb2_insn_r->arm_insn, 15);
12890 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12891 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12892
12893 if (op1 == 0x01)
12894 {
12895 if (!(op2 & 0x64 ))
12896 {
12897 /* Load/store multiple instruction. */
12898 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12899 }
12900 else if ((op2 & 0x64) == 0x4)
12901 {
12902 /* Load/store (dual/exclusive) and table branch instruction. */
12903 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12904 }
12905 else if ((op2 & 0x60) == 0x20)
12906 {
12907 /* Data-processing (shifted register). */
12908 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12909 }
12910 else if (op2 & 0x40)
12911 {
12912 /* Co-processor instructions. */
12913 return thumb2_record_coproc_insn (thumb2_insn_r);
12914 }
12915 }
12916 else if (op1 == 0x02)
12917 {
12918 if (op)
12919 {
12920 /* Branches and miscellaneous control instructions. */
12921 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12922 }
12923 else if (op2 & 0x20)
12924 {
12925 /* Data-processing (plain binary immediate) instruction. */
12926 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12927 }
12928 else
12929 {
12930 /* Data-processing (modified immediate). */
12931 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12932 }
12933 }
12934 else if (op1 == 0x03)
12935 {
12936 if (!(op2 & 0x71 ))
12937 {
12938 /* Store single data item. */
12939 return thumb2_record_str_single_data (thumb2_insn_r);
12940 }
12941 else if (!((op2 & 0x71) ^ 0x10))
12942 {
12943 /* Advanced SIMD or structure load/store instructions. */
12944 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12945 }
12946 else if (!((op2 & 0x67) ^ 0x01))
12947 {
12948 /* Load byte, memory hints instruction. */
12949 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12950 }
12951 else if (!((op2 & 0x67) ^ 0x03))
12952 {
12953 /* Load halfword, memory hints instruction. */
12954 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12955 }
12956 else if (!((op2 & 0x67) ^ 0x05))
12957 {
12958 /* Load word instruction. */
12959 return thumb2_record_ld_word (thumb2_insn_r);
12960 }
12961 else if (!((op2 & 0x70) ^ 0x20))
12962 {
12963 /* Data-processing (register) instruction. */
12964 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12965 }
12966 else if (!((op2 & 0x78) ^ 0x30))
12967 {
12968 /* Multiply, multiply accumulate, abs diff instruction. */
12969 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12970 }
12971 else if (!((op2 & 0x78) ^ 0x38))
12972 {
12973 /* Long multiply, long multiply accumulate, and divide. */
12974 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12975 }
12976 else if (op2 & 0x40)
12977 {
12978 /* Co-processor instructions. */
12979 return thumb2_record_coproc_insn (thumb2_insn_r);
12980 }
12981 }
12982
12983 return -1;
12984 }
12985
12986 namespace {
12987 /* Abstract memory reader. */
12988
12989 class abstract_memory_reader
12990 {
12991 public:
12992 /* Read LEN bytes of target memory at address MEMADDR, placing the
12993 results in GDB's memory at BUF. Return true on success. */
12994
12995 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12996 };
12997
12998 /* Instruction reader from real target. */
12999
13000 class instruction_reader : public abstract_memory_reader
13001 {
13002 public:
13003 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13004 {
13005 if (target_read_memory (memaddr, buf, len))
13006 return false;
13007 else
13008 return true;
13009 }
13010 };
13011
13012 } // namespace
13013
13014 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13015 and positive val on failure. */
13016
13017 static int
13018 extract_arm_insn (abstract_memory_reader& reader,
13019 insn_decode_record *insn_record, uint32_t insn_size)
13020 {
13021 gdb_byte buf[insn_size];
13022
13023 memset (&buf[0], 0, insn_size);
13024
13025 if (!reader.read (insn_record->this_addr, buf, insn_size))
13026 return 1;
13027 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13028 insn_size,
13029 gdbarch_byte_order_for_code (insn_record->gdbarch));
13030 return 0;
13031 }
13032
13033 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13034
13035 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13036 dispatch it. */
13037
13038 static int
13039 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13040 record_type_t record_type, uint32_t insn_size)
13041 {
13042
13043 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13044 instruction. */
13045 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13046 {
13047 arm_record_data_proc_misc_ld_str, /* 000. */
13048 arm_record_data_proc_imm, /* 001. */
13049 arm_record_ld_st_imm_offset, /* 010. */
13050 arm_record_ld_st_reg_offset, /* 011. */
13051 arm_record_ld_st_multiple, /* 100. */
13052 arm_record_b_bl, /* 101. */
13053 arm_record_asimd_vfp_coproc, /* 110. */
13054 arm_record_coproc_data_proc /* 111. */
13055 };
13056
13057 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13058 instruction. */
13059 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13060 { \
13061 thumb_record_shift_add_sub, /* 000. */
13062 thumb_record_add_sub_cmp_mov, /* 001. */
13063 thumb_record_ld_st_reg_offset, /* 010. */
13064 thumb_record_ld_st_imm_offset, /* 011. */
13065 thumb_record_ld_st_stack, /* 100. */
13066 thumb_record_misc, /* 101. */
13067 thumb_record_ldm_stm_swi, /* 110. */
13068 thumb_record_branch /* 111. */
13069 };
13070
13071 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13072 uint32_t insn_id = 0;
13073
13074 if (extract_arm_insn (reader, arm_record, insn_size))
13075 {
13076 if (record_debug)
13077 {
13078 printf_unfiltered (_("Process record: error reading memory at "
13079 "addr %s len = %d.\n"),
13080 paddress (arm_record->gdbarch,
13081 arm_record->this_addr), insn_size);
13082 }
13083 return -1;
13084 }
13085 else if (ARM_RECORD == record_type)
13086 {
13087 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13088 insn_id = bits (arm_record->arm_insn, 25, 27);
13089
13090 if (arm_record->cond == 0xf)
13091 ret = arm_record_extension_space (arm_record);
13092 else
13093 {
13094 /* If this insn has fallen into extension space
13095 then we need not decode it anymore. */
13096 ret = arm_handle_insn[insn_id] (arm_record);
13097 }
13098 if (ret != ARM_RECORD_SUCCESS)
13099 {
13100 arm_record_unsupported_insn (arm_record);
13101 ret = -1;
13102 }
13103 }
13104 else if (THUMB_RECORD == record_type)
13105 {
13106 /* As thumb does not have condition codes, we set negative. */
13107 arm_record->cond = -1;
13108 insn_id = bits (arm_record->arm_insn, 13, 15);
13109 ret = thumb_handle_insn[insn_id] (arm_record);
13110 if (ret != ARM_RECORD_SUCCESS)
13111 {
13112 arm_record_unsupported_insn (arm_record);
13113 ret = -1;
13114 }
13115 }
13116 else if (THUMB2_RECORD == record_type)
13117 {
13118 /* As thumb does not have condition codes, we set negative. */
13119 arm_record->cond = -1;
13120
13121 /* Swap first half of 32bit thumb instruction with second half. */
13122 arm_record->arm_insn
13123 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13124
13125 ret = thumb2_record_decode_insn_handler (arm_record);
13126
13127 if (ret != ARM_RECORD_SUCCESS)
13128 {
13129 arm_record_unsupported_insn (arm_record);
13130 ret = -1;
13131 }
13132 }
13133 else
13134 {
13135 /* Throw assertion. */
13136 gdb_assert_not_reached ("not a valid instruction, could not decode");
13137 }
13138
13139 return ret;
13140 }
13141
13142 #if GDB_SELF_TEST
13143 namespace selftests {
13144
13145 /* Provide both 16-bit and 32-bit thumb instructions. */
13146
13147 class instruction_reader_thumb : public abstract_memory_reader
13148 {
13149 public:
13150 template<size_t SIZE>
13151 instruction_reader_thumb (enum bfd_endian endian,
13152 const uint16_t (&insns)[SIZE])
13153 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13154 {}
13155
13156 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13157 {
13158 SELF_CHECK (len == 4 || len == 2);
13159 SELF_CHECK (memaddr % 2 == 0);
13160 SELF_CHECK ((memaddr / 2) < m_insns_size);
13161
13162 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13163 if (len == 4)
13164 {
13165 store_unsigned_integer (&buf[2], 2, m_endian,
13166 m_insns[memaddr / 2 + 1]);
13167 }
13168 return true;
13169 }
13170
13171 private:
13172 enum bfd_endian m_endian;
13173 const uint16_t *m_insns;
13174 size_t m_insns_size;
13175 };
13176
13177 static void
13178 arm_record_test (void)
13179 {
13180 struct gdbarch_info info;
13181 gdbarch_info_init (&info);
13182 info.bfd_arch_info = bfd_scan_arch ("arm");
13183
13184 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13185
13186 SELF_CHECK (gdbarch != NULL);
13187
13188 /* 16-bit Thumb instructions. */
13189 {
13190 insn_decode_record arm_record;
13191
13192 memset (&arm_record, 0, sizeof (insn_decode_record));
13193 arm_record.gdbarch = gdbarch;
13194
13195 static const uint16_t insns[] = {
13196 /* db b2 uxtb r3, r3 */
13197 0xb2db,
13198 /* cd 58 ldr r5, [r1, r3] */
13199 0x58cd,
13200 };
13201
13202 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13203 instruction_reader_thumb reader (endian, insns);
13204 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13205 THUMB_INSN_SIZE_BYTES);
13206
13207 SELF_CHECK (ret == 0);
13208 SELF_CHECK (arm_record.mem_rec_count == 0);
13209 SELF_CHECK (arm_record.reg_rec_count == 1);
13210 SELF_CHECK (arm_record.arm_regs[0] == 3);
13211
13212 arm_record.this_addr += 2;
13213 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13214 THUMB_INSN_SIZE_BYTES);
13215
13216 SELF_CHECK (ret == 0);
13217 SELF_CHECK (arm_record.mem_rec_count == 0);
13218 SELF_CHECK (arm_record.reg_rec_count == 1);
13219 SELF_CHECK (arm_record.arm_regs[0] == 5);
13220 }
13221
13222 /* 32-bit Thumb-2 instructions. */
13223 {
13224 insn_decode_record arm_record;
13225
13226 memset (&arm_record, 0, sizeof (insn_decode_record));
13227 arm_record.gdbarch = gdbarch;
13228
13229 static const uint16_t insns[] = {
13230 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13231 0xee1d, 0x7f70,
13232 };
13233
13234 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13235 instruction_reader_thumb reader (endian, insns);
13236 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13237 THUMB2_INSN_SIZE_BYTES);
13238
13239 SELF_CHECK (ret == 0);
13240 SELF_CHECK (arm_record.mem_rec_count == 0);
13241 SELF_CHECK (arm_record.reg_rec_count == 1);
13242 SELF_CHECK (arm_record.arm_regs[0] == 7);
13243 }
13244 }
13245 } // namespace selftests
13246 #endif /* GDB_SELF_TEST */
13247
13248 /* Cleans up local record registers and memory allocations. */
13249
13250 static void
13251 deallocate_reg_mem (insn_decode_record *record)
13252 {
13253 xfree (record->arm_regs);
13254 xfree (record->arm_mems);
13255 }
13256
13257
13258 /* Parse the current instruction and record the values of the registers and
13259 memory that will be changed in current instruction to record_arch_list".
13260 Return -1 if something is wrong. */
13261
13262 int
13263 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13264 CORE_ADDR insn_addr)
13265 {
13266
13267 uint32_t no_of_rec = 0;
13268 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13269 ULONGEST t_bit = 0, insn_id = 0;
13270
13271 ULONGEST u_regval = 0;
13272
13273 insn_decode_record arm_record;
13274
13275 memset (&arm_record, 0, sizeof (insn_decode_record));
13276 arm_record.regcache = regcache;
13277 arm_record.this_addr = insn_addr;
13278 arm_record.gdbarch = gdbarch;
13279
13280
13281 if (record_debug > 1)
13282 {
13283 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13284 "addr = %s\n",
13285 paddress (gdbarch, arm_record.this_addr));
13286 }
13287
13288 instruction_reader reader;
13289 if (extract_arm_insn (reader, &arm_record, 2))
13290 {
13291 if (record_debug)
13292 {
13293 printf_unfiltered (_("Process record: error reading memory at "
13294 "addr %s len = %d.\n"),
13295 paddress (arm_record.gdbarch,
13296 arm_record.this_addr), 2);
13297 }
13298 return -1;
13299 }
13300
13301 /* Check the insn, whether it is thumb or arm one. */
13302
13303 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13304 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13305
13306
13307 if (!(u_regval & t_bit))
13308 {
13309 /* We are decoding arm insn. */
13310 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13311 }
13312 else
13313 {
13314 insn_id = bits (arm_record.arm_insn, 11, 15);
13315 /* is it thumb2 insn? */
13316 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13317 {
13318 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13319 THUMB2_INSN_SIZE_BYTES);
13320 }
13321 else
13322 {
13323 /* We are decoding thumb insn. */
13324 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13325 THUMB_INSN_SIZE_BYTES);
13326 }
13327 }
13328
13329 if (0 == ret)
13330 {
13331 /* Record registers. */
13332 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13333 if (arm_record.arm_regs)
13334 {
13335 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13336 {
13337 if (record_full_arch_list_add_reg
13338 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13339 ret = -1;
13340 }
13341 }
13342 /* Record memories. */
13343 if (arm_record.arm_mems)
13344 {
13345 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13346 {
13347 if (record_full_arch_list_add_mem
13348 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13349 arm_record.arm_mems[no_of_rec].len))
13350 ret = -1;
13351 }
13352 }
13353
13354 if (record_full_arch_list_add_end ())
13355 ret = -1;
13356 }
13357
13358
13359 deallocate_reg_mem (&arm_record);
13360
13361 return ret;
13362 }
13363
13364 /* See arm-tdep.h. */
13365
13366 const target_desc *
13367 arm_read_description (arm_fp_type fp_type)
13368 {
13369 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13370
13371 if (tdesc == nullptr)
13372 {
13373 tdesc = arm_create_target_description (fp_type);
13374 tdesc_arm_list[fp_type] = tdesc;
13375 }
13376
13377 return tdesc;
13378 }
13379
13380 /* See arm-tdep.h. */
13381
13382 const target_desc *
13383 arm_read_mprofile_description (arm_m_profile_type m_type)
13384 {
13385 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13386
13387 if (tdesc == nullptr)
13388 {
13389 tdesc = arm_create_mprofile_target_description (m_type);
13390 tdesc_arm_mprofile_list[m_type] = tdesc;
13391 }
13392
13393 return tdesc;
13394 }