]> git.ipfire.org Git - thirdparty/binutils-gdb.git/blob - gdb/arm-tdep.c
Arm: Add xml unit tests
[thirdparty/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2-frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48
49 #include "arch/arm.h"
50 #include "arch/arm-get-next-pcs.h"
51 #include "arm-tdep.h"
52 #include "gdb/sim-arm.h"
53
54 #include "elf-bfd.h"
55 #include "coff/internal.h"
56 #include "elf/arm.h"
57
58 #include "gdbsupport/vec.h"
59
60 #include "record.h"
61 #include "record-full.h"
62 #include <algorithm>
63
64 #if GDB_SELF_TEST
65 #include "gdbsupport/selftest.h"
66 #include "aarch32-tdep.h"
67 #endif
68
69 static int arm_debug;
70
71 /* Macros for setting and testing a bit in a minimal symbol that marks
72 it as Thumb function. The MSB of the minimal symbol's "info" field
73 is used for this purpose.
74
75 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
76 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77
78 #define MSYMBOL_SET_SPECIAL(msym) \
79 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80
81 #define MSYMBOL_IS_SPECIAL(msym) \
82 MSYMBOL_TARGET_FLAG_1 (msym)
83
84 struct arm_mapping_symbol
85 {
86 bfd_vma value;
87 char type;
88
89 bool operator< (const arm_mapping_symbol &other) const
90 { return this->value < other.value; }
91 };
92
93 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
94
95 struct arm_per_objfile
96 {
97 explicit arm_per_objfile (size_t num_sections)
98 : section_maps (new arm_mapping_symbol_vec[num_sections]),
99 section_maps_sorted (new bool[num_sections] ())
100 {}
101
102 DISABLE_COPY_AND_ASSIGN (arm_per_objfile);
103
104 /* Information about mapping symbols ($a, $d, $t) in the objfile.
105
106 The format is an array of vectors of arm_mapping_symbols, there is one
107 vector for each section of the objfile (the array is index by BFD section
108 index).
109
110 For each section, the vector of arm_mapping_symbol is sorted by
111 symbol value (address). */
112 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
113
114 /* For each corresponding element of section_maps above, is this vector
115 sorted. */
116 std::unique_ptr<bool[]> section_maps_sorted;
117 };
118
119 /* Per-objfile data used for mapping symbols. */
120 static objfile_key<arm_per_objfile> arm_objfile_data_key;
121
122 /* The list of available "set arm ..." and "show arm ..." commands. */
123 static struct cmd_list_element *setarmcmdlist = NULL;
124 static struct cmd_list_element *showarmcmdlist = NULL;
125
126 /* The type of floating-point to use. Keep this in sync with enum
127 arm_float_model, and the help string in _initialize_arm_tdep. */
128 static const char *const fp_model_strings[] =
129 {
130 "auto",
131 "softfpa",
132 "fpa",
133 "softvfp",
134 "vfp",
135 NULL
136 };
137
138 /* A variable that can be configured by the user. */
139 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
140 static const char *current_fp_model = "auto";
141
142 /* The ABI to use. Keep this in sync with arm_abi_kind. */
143 static const char *const arm_abi_strings[] =
144 {
145 "auto",
146 "APCS",
147 "AAPCS",
148 NULL
149 };
150
151 /* A variable that can be configured by the user. */
152 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
153 static const char *arm_abi_string = "auto";
154
155 /* The execution mode to assume. */
156 static const char *const arm_mode_strings[] =
157 {
158 "auto",
159 "arm",
160 "thumb",
161 NULL
162 };
163
164 static const char *arm_fallback_mode_string = "auto";
165 static const char *arm_force_mode_string = "auto";
166
167 /* The standard register names, and all the valid aliases for them. Note
168 that `fp', `sp' and `pc' are not added in this alias list, because they
169 have been added as builtin user registers in
170 std-regs.c:_initialize_frame_reg. */
171 static const struct
172 {
173 const char *name;
174 int regnum;
175 } arm_register_aliases[] = {
176 /* Basic register numbers. */
177 { "r0", 0 },
178 { "r1", 1 },
179 { "r2", 2 },
180 { "r3", 3 },
181 { "r4", 4 },
182 { "r5", 5 },
183 { "r6", 6 },
184 { "r7", 7 },
185 { "r8", 8 },
186 { "r9", 9 },
187 { "r10", 10 },
188 { "r11", 11 },
189 { "r12", 12 },
190 { "r13", 13 },
191 { "r14", 14 },
192 { "r15", 15 },
193 /* Synonyms (argument and variable registers). */
194 { "a1", 0 },
195 { "a2", 1 },
196 { "a3", 2 },
197 { "a4", 3 },
198 { "v1", 4 },
199 { "v2", 5 },
200 { "v3", 6 },
201 { "v4", 7 },
202 { "v5", 8 },
203 { "v6", 9 },
204 { "v7", 10 },
205 { "v8", 11 },
206 /* Other platform-specific names for r9. */
207 { "sb", 9 },
208 { "tr", 9 },
209 /* Special names. */
210 { "ip", 12 },
211 { "lr", 14 },
212 /* Names used by GCC (not listed in the ARM EABI). */
213 { "sl", 10 },
214 /* A special name from the older ATPCS. */
215 { "wr", 7 },
216 };
217
218 static const char *const arm_register_names[] =
219 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
220 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
221 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
222 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
223 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
224 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
225 "fps", "cpsr" }; /* 24 25 */
226
227 /* Holds the current set of options to be passed to the disassembler. */
228 static char *arm_disassembler_options;
229
230 /* Valid register name styles. */
231 static const char **valid_disassembly_styles;
232
233 /* Disassembly style to use. Default to "std" register names. */
234 static const char *disassembly_style;
235
236 /* All possible arm target descriptors. */
237 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
238 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
239
240 /* This is used to keep the bfd arch_info in sync with the disassembly
241 style. */
242 static void set_disassembly_style_sfunc (const char *, int,
243 struct cmd_list_element *);
244 static void show_disassembly_style_sfunc (struct ui_file *, int,
245 struct cmd_list_element *,
246 const char *);
247
248 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
249 readable_regcache *regcache,
250 int regnum, gdb_byte *buf);
251 static void arm_neon_quad_write (struct gdbarch *gdbarch,
252 struct regcache *regcache,
253 int regnum, const gdb_byte *buf);
254
255 static CORE_ADDR
256 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
257
258
259 /* get_next_pcs operations. */
260 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
261 arm_get_next_pcs_read_memory_unsigned_integer,
262 arm_get_next_pcs_syscall_next_pc,
263 arm_get_next_pcs_addr_bits_remove,
264 arm_get_next_pcs_is_thumb,
265 NULL,
266 };
267
268 struct arm_prologue_cache
269 {
270 /* The stack pointer at the time this frame was created; i.e. the
271 caller's stack pointer when this function was called. It is used
272 to identify this frame. */
273 CORE_ADDR prev_sp;
274
275 /* The frame base for this frame is just prev_sp - frame size.
276 FRAMESIZE is the distance from the frame pointer to the
277 initial stack pointer. */
278
279 int framesize;
280
281 /* The register used to hold the frame pointer for this frame. */
282 int framereg;
283
284 /* Saved register offsets. */
285 struct trad_frame_saved_reg *saved_regs;
286 };
287
288 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
289 CORE_ADDR prologue_start,
290 CORE_ADDR prologue_end,
291 struct arm_prologue_cache *cache);
292
293 /* Architecture version for displaced stepping. This effects the behaviour of
294 certain instructions, and really should not be hard-wired. */
295
296 #define DISPLACED_STEPPING_ARCH_VERSION 5
297
298 /* Set to true if the 32-bit mode is in use. */
299
300 int arm_apcs_32 = 1;
301
302 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
303
304 int
305 arm_psr_thumb_bit (struct gdbarch *gdbarch)
306 {
307 if (gdbarch_tdep (gdbarch)->is_m)
308 return XPSR_T;
309 else
310 return CPSR_T;
311 }
312
313 /* Determine if the processor is currently executing in Thumb mode. */
314
315 int
316 arm_is_thumb (struct regcache *regcache)
317 {
318 ULONGEST cpsr;
319 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
320
321 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
322
323 return (cpsr & t_bit) != 0;
324 }
325
326 /* Determine if FRAME is executing in Thumb mode. */
327
328 int
329 arm_frame_is_thumb (struct frame_info *frame)
330 {
331 CORE_ADDR cpsr;
332 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
333
334 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
335 directly (from a signal frame or dummy frame) or by interpreting
336 the saved LR (from a prologue or DWARF frame). So consult it and
337 trust the unwinders. */
338 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
339
340 return (cpsr & t_bit) != 0;
341 }
342
343 /* Search for the mapping symbol covering MEMADDR. If one is found,
344 return its type. Otherwise, return 0. If START is non-NULL,
345 set *START to the location of the mapping symbol. */
346
347 static char
348 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
349 {
350 struct obj_section *sec;
351
352 /* If there are mapping symbols, consult them. */
353 sec = find_pc_section (memaddr);
354 if (sec != NULL)
355 {
356 arm_per_objfile *data = arm_objfile_data_key.get (sec->objfile);
357 if (data != NULL)
358 {
359 unsigned int section_idx = sec->the_bfd_section->index;
360 arm_mapping_symbol_vec &map
361 = data->section_maps[section_idx];
362
363 /* Sort the vector on first use. */
364 if (!data->section_maps_sorted[section_idx])
365 {
366 std::sort (map.begin (), map.end ());
367 data->section_maps_sorted[section_idx] = true;
368 }
369
370 struct arm_mapping_symbol map_key
371 = { memaddr - obj_section_addr (sec), 0 };
372 arm_mapping_symbol_vec::const_iterator it
373 = std::lower_bound (map.begin (), map.end (), map_key);
374
375 /* std::lower_bound finds the earliest ordered insertion
376 point. If the symbol at this position starts at this exact
377 address, we use that; otherwise, the preceding
378 mapping symbol covers this address. */
379 if (it < map.end ())
380 {
381 if (it->value == map_key.value)
382 {
383 if (start)
384 *start = it->value + obj_section_addr (sec);
385 return it->type;
386 }
387 }
388
389 if (it > map.begin ())
390 {
391 arm_mapping_symbol_vec::const_iterator prev_it
392 = it - 1;
393
394 if (start)
395 *start = prev_it->value + obj_section_addr (sec);
396 return prev_it->type;
397 }
398 }
399 }
400
401 return 0;
402 }
403
404 /* Determine if the program counter specified in MEMADDR is in a Thumb
405 function. This function should be called for addresses unrelated to
406 any executing frame; otherwise, prefer arm_frame_is_thumb. */
407
408 int
409 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
410 {
411 struct bound_minimal_symbol sym;
412 char type;
413 arm_displaced_step_closure *dsc
414 = ((arm_displaced_step_closure * )
415 get_displaced_step_closure_by_addr (memaddr));
416
417 /* If checking the mode of displaced instruction in copy area, the mode
418 should be determined by instruction on the original address. */
419 if (dsc)
420 {
421 if (debug_displaced)
422 fprintf_unfiltered (gdb_stdlog,
423 "displaced: check mode of %.8lx instead of %.8lx\n",
424 (unsigned long) dsc->insn_addr,
425 (unsigned long) memaddr);
426 memaddr = dsc->insn_addr;
427 }
428
429 /* If bit 0 of the address is set, assume this is a Thumb address. */
430 if (IS_THUMB_ADDR (memaddr))
431 return 1;
432
433 /* If the user wants to override the symbol table, let him. */
434 if (strcmp (arm_force_mode_string, "arm") == 0)
435 return 0;
436 if (strcmp (arm_force_mode_string, "thumb") == 0)
437 return 1;
438
439 /* ARM v6-M and v7-M are always in Thumb mode. */
440 if (gdbarch_tdep (gdbarch)->is_m)
441 return 1;
442
443 /* If there are mapping symbols, consult them. */
444 type = arm_find_mapping_symbol (memaddr, NULL);
445 if (type)
446 return type == 't';
447
448 /* Thumb functions have a "special" bit set in minimal symbols. */
449 sym = lookup_minimal_symbol_by_pc (memaddr);
450 if (sym.minsym)
451 return (MSYMBOL_IS_SPECIAL (sym.minsym));
452
453 /* If the user wants to override the fallback mode, let them. */
454 if (strcmp (arm_fallback_mode_string, "arm") == 0)
455 return 0;
456 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
457 return 1;
458
459 /* If we couldn't find any symbol, but we're talking to a running
460 target, then trust the current value of $cpsr. This lets
461 "display/i $pc" always show the correct mode (though if there is
462 a symbol table we will not reach here, so it still may not be
463 displayed in the mode it will be executed). */
464 if (target_has_registers)
465 return arm_frame_is_thumb (get_current_frame ());
466
467 /* Otherwise we're out of luck; we assume ARM. */
468 return 0;
469 }
470
471 /* Determine if the address specified equals any of these magic return
472 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
473 architectures.
474
475 From ARMv6-M Reference Manual B1.5.8
476 Table B1-5 Exception return behavior
477
478 EXC_RETURN Return To Return Stack
479 0xFFFFFFF1 Handler mode Main
480 0xFFFFFFF9 Thread mode Main
481 0xFFFFFFFD Thread mode Process
482
483 From ARMv7-M Reference Manual B1.5.8
484 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
485
486 EXC_RETURN Return To Return Stack
487 0xFFFFFFF1 Handler mode Main
488 0xFFFFFFF9 Thread mode Main
489 0xFFFFFFFD Thread mode Process
490
491 Table B1-9 EXC_RETURN definition of exception return behavior, with
492 FP
493
494 EXC_RETURN Return To Return Stack Frame Type
495 0xFFFFFFE1 Handler mode Main Extended
496 0xFFFFFFE9 Thread mode Main Extended
497 0xFFFFFFED Thread mode Process Extended
498 0xFFFFFFF1 Handler mode Main Basic
499 0xFFFFFFF9 Thread mode Main Basic
500 0xFFFFFFFD Thread mode Process Basic
501
502 For more details see "B1.5.8 Exception return behavior"
503 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
504
505 static int
506 arm_m_addr_is_magic (CORE_ADDR addr)
507 {
508 switch (addr)
509 {
510 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
511 the exception return behavior. */
512 case 0xffffffe1:
513 case 0xffffffe9:
514 case 0xffffffed:
515 case 0xfffffff1:
516 case 0xfffffff9:
517 case 0xfffffffd:
518 /* Address is magic. */
519 return 1;
520
521 default:
522 /* Address is not magic. */
523 return 0;
524 }
525 }
526
527 /* Remove useless bits from addresses in a running program. */
528 static CORE_ADDR
529 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
530 {
531 /* On M-profile devices, do not strip the low bit from EXC_RETURN
532 (the magic exception return address). */
533 if (gdbarch_tdep (gdbarch)->is_m
534 && arm_m_addr_is_magic (val))
535 return val;
536
537 if (arm_apcs_32)
538 return UNMAKE_THUMB_ADDR (val);
539 else
540 return (val & 0x03fffffc);
541 }
542
543 /* Return 1 if PC is the start of a compiler helper function which
544 can be safely ignored during prologue skipping. IS_THUMB is true
545 if the function is known to be a Thumb function due to the way it
546 is being called. */
547 static int
548 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
549 {
550 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
551 struct bound_minimal_symbol msym;
552
553 msym = lookup_minimal_symbol_by_pc (pc);
554 if (msym.minsym != NULL
555 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
556 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
557 {
558 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
559
560 /* The GNU linker's Thumb call stub to foo is named
561 __foo_from_thumb. */
562 if (strstr (name, "_from_thumb") != NULL)
563 name += 2;
564
565 /* On soft-float targets, __truncdfsf2 is called to convert promoted
566 arguments to their argument types in non-prototyped
567 functions. */
568 if (startswith (name, "__truncdfsf2"))
569 return 1;
570 if (startswith (name, "__aeabi_d2f"))
571 return 1;
572
573 /* Internal functions related to thread-local storage. */
574 if (startswith (name, "__tls_get_addr"))
575 return 1;
576 if (startswith (name, "__aeabi_read_tp"))
577 return 1;
578 }
579 else
580 {
581 /* If we run against a stripped glibc, we may be unable to identify
582 special functions by name. Check for one important case,
583 __aeabi_read_tp, by comparing the *code* against the default
584 implementation (this is hand-written ARM assembler in glibc). */
585
586 if (!is_thumb
587 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
588 == 0xe3e00a0f /* mov r0, #0xffff0fff */
589 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
590 == 0xe240f01f) /* sub pc, r0, #31 */
591 return 1;
592 }
593
594 return 0;
595 }
596
597 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
598 the first 16-bit of instruction, and INSN2 is the second 16-bit of
599 instruction. */
600 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
601 ((bits ((insn1), 0, 3) << 12) \
602 | (bits ((insn1), 10, 10) << 11) \
603 | (bits ((insn2), 12, 14) << 8) \
604 | bits ((insn2), 0, 7))
605
606 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
607 the 32-bit instruction. */
608 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
609 ((bits ((insn), 16, 19) << 12) \
610 | bits ((insn), 0, 11))
611
612 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
613
614 static unsigned int
615 thumb_expand_immediate (unsigned int imm)
616 {
617 unsigned int count = imm >> 7;
618
619 if (count < 8)
620 switch (count / 2)
621 {
622 case 0:
623 return imm & 0xff;
624 case 1:
625 return (imm & 0xff) | ((imm & 0xff) << 16);
626 case 2:
627 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
628 case 3:
629 return (imm & 0xff) | ((imm & 0xff) << 8)
630 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
631 }
632
633 return (0x80 | (imm & 0x7f)) << (32 - count);
634 }
635
636 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
637 epilogue, 0 otherwise. */
638
639 static int
640 thumb_instruction_restores_sp (unsigned short insn)
641 {
642 return (insn == 0x46bd /* mov sp, r7 */
643 || (insn & 0xff80) == 0xb000 /* add sp, imm */
644 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
645 }
646
647 /* Analyze a Thumb prologue, looking for a recognizable stack frame
648 and frame pointer. Scan until we encounter a store that could
649 clobber the stack frame unexpectedly, or an unknown instruction.
650 Return the last address which is definitely safe to skip for an
651 initial breakpoint. */
652
653 static CORE_ADDR
654 thumb_analyze_prologue (struct gdbarch *gdbarch,
655 CORE_ADDR start, CORE_ADDR limit,
656 struct arm_prologue_cache *cache)
657 {
658 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
659 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
660 int i;
661 pv_t regs[16];
662 CORE_ADDR offset;
663 CORE_ADDR unrecognized_pc = 0;
664
665 for (i = 0; i < 16; i++)
666 regs[i] = pv_register (i, 0);
667 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
668
669 while (start < limit)
670 {
671 unsigned short insn;
672
673 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
674
675 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
676 {
677 int regno;
678 int mask;
679
680 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
681 break;
682
683 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
684 whether to save LR (R14). */
685 mask = (insn & 0xff) | ((insn & 0x100) << 6);
686
687 /* Calculate offsets of saved R0-R7 and LR. */
688 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
689 if (mask & (1 << regno))
690 {
691 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
692 -4);
693 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
694 }
695 }
696 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
697 {
698 offset = (insn & 0x7f) << 2; /* get scaled offset */
699 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
700 -offset);
701 }
702 else if (thumb_instruction_restores_sp (insn))
703 {
704 /* Don't scan past the epilogue. */
705 break;
706 }
707 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
708 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
709 (insn & 0xff) << 2);
710 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
711 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
712 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
713 bits (insn, 6, 8));
714 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
715 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
716 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
717 bits (insn, 0, 7));
718 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
719 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
720 && pv_is_constant (regs[bits (insn, 3, 5)]))
721 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
722 regs[bits (insn, 6, 8)]);
723 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
724 && pv_is_constant (regs[bits (insn, 3, 6)]))
725 {
726 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
727 int rm = bits (insn, 3, 6);
728 regs[rd] = pv_add (regs[rd], regs[rm]);
729 }
730 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
731 {
732 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
733 int src_reg = (insn & 0x78) >> 3;
734 regs[dst_reg] = regs[src_reg];
735 }
736 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
737 {
738 /* Handle stores to the stack. Normally pushes are used,
739 but with GCC -mtpcs-frame, there may be other stores
740 in the prologue to create the frame. */
741 int regno = (insn >> 8) & 0x7;
742 pv_t addr;
743
744 offset = (insn & 0xff) << 2;
745 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
746
747 if (stack.store_would_trash (addr))
748 break;
749
750 stack.store (addr, 4, regs[regno]);
751 }
752 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
753 {
754 int rd = bits (insn, 0, 2);
755 int rn = bits (insn, 3, 5);
756 pv_t addr;
757
758 offset = bits (insn, 6, 10) << 2;
759 addr = pv_add_constant (regs[rn], offset);
760
761 if (stack.store_would_trash (addr))
762 break;
763
764 stack.store (addr, 4, regs[rd]);
765 }
766 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
767 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
768 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
769 /* Ignore stores of argument registers to the stack. */
770 ;
771 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
772 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
773 /* Ignore block loads from the stack, potentially copying
774 parameters from memory. */
775 ;
776 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
777 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
778 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
779 /* Similarly ignore single loads from the stack. */
780 ;
781 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
782 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
783 /* Skip register copies, i.e. saves to another register
784 instead of the stack. */
785 ;
786 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
787 /* Recognize constant loads; even with small stacks these are necessary
788 on Thumb. */
789 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
790 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
791 {
792 /* Constant pool loads, for the same reason. */
793 unsigned int constant;
794 CORE_ADDR loc;
795
796 loc = start + 4 + bits (insn, 0, 7) * 4;
797 constant = read_memory_unsigned_integer (loc, 4, byte_order);
798 regs[bits (insn, 8, 10)] = pv_constant (constant);
799 }
800 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
801 {
802 unsigned short inst2;
803
804 inst2 = read_code_unsigned_integer (start + 2, 2,
805 byte_order_for_code);
806
807 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
808 {
809 /* BL, BLX. Allow some special function calls when
810 skipping the prologue; GCC generates these before
811 storing arguments to the stack. */
812 CORE_ADDR nextpc;
813 int j1, j2, imm1, imm2;
814
815 imm1 = sbits (insn, 0, 10);
816 imm2 = bits (inst2, 0, 10);
817 j1 = bit (inst2, 13);
818 j2 = bit (inst2, 11);
819
820 offset = ((imm1 << 12) + (imm2 << 1));
821 offset ^= ((!j2) << 22) | ((!j1) << 23);
822
823 nextpc = start + 4 + offset;
824 /* For BLX make sure to clear the low bits. */
825 if (bit (inst2, 12) == 0)
826 nextpc = nextpc & 0xfffffffc;
827
828 if (!skip_prologue_function (gdbarch, nextpc,
829 bit (inst2, 12) != 0))
830 break;
831 }
832
833 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
834 { registers } */
835 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
836 {
837 pv_t addr = regs[bits (insn, 0, 3)];
838 int regno;
839
840 if (stack.store_would_trash (addr))
841 break;
842
843 /* Calculate offsets of saved registers. */
844 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
845 if (inst2 & (1 << regno))
846 {
847 addr = pv_add_constant (addr, -4);
848 stack.store (addr, 4, regs[regno]);
849 }
850
851 if (insn & 0x0020)
852 regs[bits (insn, 0, 3)] = addr;
853 }
854
855 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
856 [Rn, #+/-imm]{!} */
857 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
858 {
859 int regno1 = bits (inst2, 12, 15);
860 int regno2 = bits (inst2, 8, 11);
861 pv_t addr = regs[bits (insn, 0, 3)];
862
863 offset = inst2 & 0xff;
864 if (insn & 0x0080)
865 addr = pv_add_constant (addr, offset);
866 else
867 addr = pv_add_constant (addr, -offset);
868
869 if (stack.store_would_trash (addr))
870 break;
871
872 stack.store (addr, 4, regs[regno1]);
873 stack.store (pv_add_constant (addr, 4),
874 4, regs[regno2]);
875
876 if (insn & 0x0020)
877 regs[bits (insn, 0, 3)] = addr;
878 }
879
880 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
881 && (inst2 & 0x0c00) == 0x0c00
882 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
883 {
884 int regno = bits (inst2, 12, 15);
885 pv_t addr = regs[bits (insn, 0, 3)];
886
887 offset = inst2 & 0xff;
888 if (inst2 & 0x0200)
889 addr = pv_add_constant (addr, offset);
890 else
891 addr = pv_add_constant (addr, -offset);
892
893 if (stack.store_would_trash (addr))
894 break;
895
896 stack.store (addr, 4, regs[regno]);
897
898 if (inst2 & 0x0100)
899 regs[bits (insn, 0, 3)] = addr;
900 }
901
902 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
903 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
904 {
905 int regno = bits (inst2, 12, 15);
906 pv_t addr;
907
908 offset = inst2 & 0xfff;
909 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
910
911 if (stack.store_would_trash (addr))
912 break;
913
914 stack.store (addr, 4, regs[regno]);
915 }
916
917 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
918 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
919 /* Ignore stores of argument registers to the stack. */
920 ;
921
922 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
923 && (inst2 & 0x0d00) == 0x0c00
924 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
925 /* Ignore stores of argument registers to the stack. */
926 ;
927
928 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
929 { registers } */
930 && (inst2 & 0x8000) == 0x0000
931 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
932 /* Ignore block loads from the stack, potentially copying
933 parameters from memory. */
934 ;
935
936 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
937 [Rn, #+/-imm] */
938 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
939 /* Similarly ignore dual loads from the stack. */
940 ;
941
942 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
943 && (inst2 & 0x0d00) == 0x0c00
944 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
945 /* Similarly ignore single loads from the stack. */
946 ;
947
948 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
949 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
950 /* Similarly ignore single loads from the stack. */
951 ;
952
953 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
954 && (inst2 & 0x8000) == 0x0000)
955 {
956 unsigned int imm = ((bits (insn, 10, 10) << 11)
957 | (bits (inst2, 12, 14) << 8)
958 | bits (inst2, 0, 7));
959
960 regs[bits (inst2, 8, 11)]
961 = pv_add_constant (regs[bits (insn, 0, 3)],
962 thumb_expand_immediate (imm));
963 }
964
965 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
966 && (inst2 & 0x8000) == 0x0000)
967 {
968 unsigned int imm = ((bits (insn, 10, 10) << 11)
969 | (bits (inst2, 12, 14) << 8)
970 | bits (inst2, 0, 7));
971
972 regs[bits (inst2, 8, 11)]
973 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
974 }
975
976 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
977 && (inst2 & 0x8000) == 0x0000)
978 {
979 unsigned int imm = ((bits (insn, 10, 10) << 11)
980 | (bits (inst2, 12, 14) << 8)
981 | bits (inst2, 0, 7));
982
983 regs[bits (inst2, 8, 11)]
984 = pv_add_constant (regs[bits (insn, 0, 3)],
985 - (CORE_ADDR) thumb_expand_immediate (imm));
986 }
987
988 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
989 && (inst2 & 0x8000) == 0x0000)
990 {
991 unsigned int imm = ((bits (insn, 10, 10) << 11)
992 | (bits (inst2, 12, 14) << 8)
993 | bits (inst2, 0, 7));
994
995 regs[bits (inst2, 8, 11)]
996 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
997 }
998
999 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1000 {
1001 unsigned int imm = ((bits (insn, 10, 10) << 11)
1002 | (bits (inst2, 12, 14) << 8)
1003 | bits (inst2, 0, 7));
1004
1005 regs[bits (inst2, 8, 11)]
1006 = pv_constant (thumb_expand_immediate (imm));
1007 }
1008
1009 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1010 {
1011 unsigned int imm
1012 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1013
1014 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1015 }
1016
1017 else if (insn == 0xea5f /* mov.w Rd,Rm */
1018 && (inst2 & 0xf0f0) == 0)
1019 {
1020 int dst_reg = (inst2 & 0x0f00) >> 8;
1021 int src_reg = inst2 & 0xf;
1022 regs[dst_reg] = regs[src_reg];
1023 }
1024
1025 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1026 {
1027 /* Constant pool loads. */
1028 unsigned int constant;
1029 CORE_ADDR loc;
1030
1031 offset = bits (inst2, 0, 11);
1032 if (insn & 0x0080)
1033 loc = start + 4 + offset;
1034 else
1035 loc = start + 4 - offset;
1036
1037 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1038 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1039 }
1040
1041 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1042 {
1043 /* Constant pool loads. */
1044 unsigned int constant;
1045 CORE_ADDR loc;
1046
1047 offset = bits (inst2, 0, 7) << 2;
1048 if (insn & 0x0080)
1049 loc = start + 4 + offset;
1050 else
1051 loc = start + 4 - offset;
1052
1053 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1054 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1055
1056 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1057 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1058 }
1059
1060 else if (thumb2_instruction_changes_pc (insn, inst2))
1061 {
1062 /* Don't scan past anything that might change control flow. */
1063 break;
1064 }
1065 else
1066 {
1067 /* The optimizer might shove anything into the prologue,
1068 so we just skip what we don't recognize. */
1069 unrecognized_pc = start;
1070 }
1071
1072 start += 2;
1073 }
1074 else if (thumb_instruction_changes_pc (insn))
1075 {
1076 /* Don't scan past anything that might change control flow. */
1077 break;
1078 }
1079 else
1080 {
1081 /* The optimizer might shove anything into the prologue,
1082 so we just skip what we don't recognize. */
1083 unrecognized_pc = start;
1084 }
1085
1086 start += 2;
1087 }
1088
1089 if (arm_debug)
1090 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1091 paddress (gdbarch, start));
1092
1093 if (unrecognized_pc == 0)
1094 unrecognized_pc = start;
1095
1096 if (cache == NULL)
1097 return unrecognized_pc;
1098
1099 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1100 {
1101 /* Frame pointer is fp. Frame size is constant. */
1102 cache->framereg = ARM_FP_REGNUM;
1103 cache->framesize = -regs[ARM_FP_REGNUM].k;
1104 }
1105 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1106 {
1107 /* Frame pointer is r7. Frame size is constant. */
1108 cache->framereg = THUMB_FP_REGNUM;
1109 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1110 }
1111 else
1112 {
1113 /* Try the stack pointer... this is a bit desperate. */
1114 cache->framereg = ARM_SP_REGNUM;
1115 cache->framesize = -regs[ARM_SP_REGNUM].k;
1116 }
1117
1118 for (i = 0; i < 16; i++)
1119 if (stack.find_reg (gdbarch, i, &offset))
1120 cache->saved_regs[i].addr = offset;
1121
1122 return unrecognized_pc;
1123 }
1124
1125
1126 /* Try to analyze the instructions starting from PC, which load symbol
1127 __stack_chk_guard. Return the address of instruction after loading this
1128 symbol, set the dest register number to *BASEREG, and set the size of
1129 instructions for loading symbol in OFFSET. Return 0 if instructions are
1130 not recognized. */
1131
1132 static CORE_ADDR
1133 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1134 unsigned int *destreg, int *offset)
1135 {
1136 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1137 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1138 unsigned int low, high, address;
1139
1140 address = 0;
1141 if (is_thumb)
1142 {
1143 unsigned short insn1
1144 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1145
1146 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1147 {
1148 *destreg = bits (insn1, 8, 10);
1149 *offset = 2;
1150 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1151 address = read_memory_unsigned_integer (address, 4,
1152 byte_order_for_code);
1153 }
1154 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1155 {
1156 unsigned short insn2
1157 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1158
1159 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1160
1161 insn1
1162 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1163 insn2
1164 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1165
1166 /* movt Rd, #const */
1167 if ((insn1 & 0xfbc0) == 0xf2c0)
1168 {
1169 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1170 *destreg = bits (insn2, 8, 11);
1171 *offset = 8;
1172 address = (high << 16 | low);
1173 }
1174 }
1175 }
1176 else
1177 {
1178 unsigned int insn
1179 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1180
1181 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1182 {
1183 address = bits (insn, 0, 11) + pc + 8;
1184 address = read_memory_unsigned_integer (address, 4,
1185 byte_order_for_code);
1186
1187 *destreg = bits (insn, 12, 15);
1188 *offset = 4;
1189 }
1190 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1191 {
1192 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1193
1194 insn
1195 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1196
1197 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1198 {
1199 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1200 *destreg = bits (insn, 12, 15);
1201 *offset = 8;
1202 address = (high << 16 | low);
1203 }
1204 }
1205 }
1206
1207 return address;
1208 }
1209
1210 /* Try to skip a sequence of instructions used for stack protector. If PC
1211 points to the first instruction of this sequence, return the address of
1212 first instruction after this sequence, otherwise, return original PC.
1213
1214 On arm, this sequence of instructions is composed of mainly three steps,
1215 Step 1: load symbol __stack_chk_guard,
1216 Step 2: load from address of __stack_chk_guard,
1217 Step 3: store it to somewhere else.
1218
1219 Usually, instructions on step 2 and step 3 are the same on various ARM
1220 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1221 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1222 instructions in step 1 vary from different ARM architectures. On ARMv7,
1223 they are,
1224
1225 movw Rn, #:lower16:__stack_chk_guard
1226 movt Rn, #:upper16:__stack_chk_guard
1227
1228 On ARMv5t, it is,
1229
1230 ldr Rn, .Label
1231 ....
1232 .Lable:
1233 .word __stack_chk_guard
1234
1235 Since ldr/str is a very popular instruction, we can't use them as
1236 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1237 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1238 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1239
1240 static CORE_ADDR
1241 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1242 {
1243 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1244 unsigned int basereg;
1245 struct bound_minimal_symbol stack_chk_guard;
1246 int offset;
1247 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1248 CORE_ADDR addr;
1249
1250 /* Try to parse the instructions in Step 1. */
1251 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1252 &basereg, &offset);
1253 if (!addr)
1254 return pc;
1255
1256 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1257 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1258 Otherwise, this sequence cannot be for stack protector. */
1259 if (stack_chk_guard.minsym == NULL
1260 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1261 return pc;
1262
1263 if (is_thumb)
1264 {
1265 unsigned int destreg;
1266 unsigned short insn
1267 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1268
1269 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1270 if ((insn & 0xf800) != 0x6800)
1271 return pc;
1272 if (bits (insn, 3, 5) != basereg)
1273 return pc;
1274 destreg = bits (insn, 0, 2);
1275
1276 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1277 byte_order_for_code);
1278 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1279 if ((insn & 0xf800) != 0x6000)
1280 return pc;
1281 if (destreg != bits (insn, 0, 2))
1282 return pc;
1283 }
1284 else
1285 {
1286 unsigned int destreg;
1287 unsigned int insn
1288 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1289
1290 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1291 if ((insn & 0x0e500000) != 0x04100000)
1292 return pc;
1293 if (bits (insn, 16, 19) != basereg)
1294 return pc;
1295 destreg = bits (insn, 12, 15);
1296 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1297 insn = read_code_unsigned_integer (pc + offset + 4,
1298 4, byte_order_for_code);
1299 if ((insn & 0x0e500000) != 0x04000000)
1300 return pc;
1301 if (bits (insn, 12, 15) != destreg)
1302 return pc;
1303 }
1304 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1305 on arm. */
1306 if (is_thumb)
1307 return pc + offset + 4;
1308 else
1309 return pc + offset + 8;
1310 }
1311
1312 /* Advance the PC across any function entry prologue instructions to
1313 reach some "real" code.
1314
1315 The APCS (ARM Procedure Call Standard) defines the following
1316 prologue:
1317
1318 mov ip, sp
1319 [stmfd sp!, {a1,a2,a3,a4}]
1320 stmfd sp!, {...,fp,ip,lr,pc}
1321 [stfe f7, [sp, #-12]!]
1322 [stfe f6, [sp, #-12]!]
1323 [stfe f5, [sp, #-12]!]
1324 [stfe f4, [sp, #-12]!]
1325 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1326
1327 static CORE_ADDR
1328 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1329 {
1330 CORE_ADDR func_addr, limit_pc;
1331
1332 /* See if we can determine the end of the prologue via the symbol table.
1333 If so, then return either PC, or the PC after the prologue, whichever
1334 is greater. */
1335 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1336 {
1337 CORE_ADDR post_prologue_pc
1338 = skip_prologue_using_sal (gdbarch, func_addr);
1339 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1340
1341 if (post_prologue_pc)
1342 post_prologue_pc
1343 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1344
1345
1346 /* GCC always emits a line note before the prologue and another
1347 one after, even if the two are at the same address or on the
1348 same line. Take advantage of this so that we do not need to
1349 know every instruction that might appear in the prologue. We
1350 will have producer information for most binaries; if it is
1351 missing (e.g. for -gstabs), assuming the GNU tools. */
1352 if (post_prologue_pc
1353 && (cust == NULL
1354 || COMPUNIT_PRODUCER (cust) == NULL
1355 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1356 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1357 return post_prologue_pc;
1358
1359 if (post_prologue_pc != 0)
1360 {
1361 CORE_ADDR analyzed_limit;
1362
1363 /* For non-GCC compilers, make sure the entire line is an
1364 acceptable prologue; GDB will round this function's
1365 return value up to the end of the following line so we
1366 can not skip just part of a line (and we do not want to).
1367
1368 RealView does not treat the prologue specially, but does
1369 associate prologue code with the opening brace; so this
1370 lets us skip the first line if we think it is the opening
1371 brace. */
1372 if (arm_pc_is_thumb (gdbarch, func_addr))
1373 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1374 post_prologue_pc, NULL);
1375 else
1376 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1377 post_prologue_pc, NULL);
1378
1379 if (analyzed_limit != post_prologue_pc)
1380 return func_addr;
1381
1382 return post_prologue_pc;
1383 }
1384 }
1385
1386 /* Can't determine prologue from the symbol table, need to examine
1387 instructions. */
1388
1389 /* Find an upper limit on the function prologue using the debug
1390 information. If the debug information could not be used to provide
1391 that bound, then use an arbitrary large number as the upper bound. */
1392 /* Like arm_scan_prologue, stop no later than pc + 64. */
1393 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1394 if (limit_pc == 0)
1395 limit_pc = pc + 64; /* Magic. */
1396
1397
1398 /* Check if this is Thumb code. */
1399 if (arm_pc_is_thumb (gdbarch, pc))
1400 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1401 else
1402 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1403 }
1404
1405 /* *INDENT-OFF* */
1406 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1407 This function decodes a Thumb function prologue to determine:
1408 1) the size of the stack frame
1409 2) which registers are saved on it
1410 3) the offsets of saved regs
1411 4) the offset from the stack pointer to the frame pointer
1412
1413 A typical Thumb function prologue would create this stack frame
1414 (offsets relative to FP)
1415 old SP -> 24 stack parameters
1416 20 LR
1417 16 R7
1418 R7 -> 0 local variables (16 bytes)
1419 SP -> -12 additional stack space (12 bytes)
1420 The frame size would thus be 36 bytes, and the frame offset would be
1421 12 bytes. The frame register is R7.
1422
1423 The comments for thumb_skip_prolog() describe the algorithm we use
1424 to detect the end of the prolog. */
1425 /* *INDENT-ON* */
1426
1427 static void
1428 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1429 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1430 {
1431 CORE_ADDR prologue_start;
1432 CORE_ADDR prologue_end;
1433
1434 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1435 &prologue_end))
1436 {
1437 /* See comment in arm_scan_prologue for an explanation of
1438 this heuristics. */
1439 if (prologue_end > prologue_start + 64)
1440 {
1441 prologue_end = prologue_start + 64;
1442 }
1443 }
1444 else
1445 /* We're in the boondocks: we have no idea where the start of the
1446 function is. */
1447 return;
1448
1449 prologue_end = std::min (prologue_end, prev_pc);
1450
1451 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1452 }
1453
1454 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1455 otherwise. */
1456
1457 static int
1458 arm_instruction_restores_sp (unsigned int insn)
1459 {
1460 if (bits (insn, 28, 31) != INST_NV)
1461 {
1462 if ((insn & 0x0df0f000) == 0x0080d000
1463 /* ADD SP (register or immediate). */
1464 || (insn & 0x0df0f000) == 0x0040d000
1465 /* SUB SP (register or immediate). */
1466 || (insn & 0x0ffffff0) == 0x01a0d000
1467 /* MOV SP. */
1468 || (insn & 0x0fff0000) == 0x08bd0000
1469 /* POP (LDMIA). */
1470 || (insn & 0x0fff0000) == 0x049d0000)
1471 /* POP of a single register. */
1472 return 1;
1473 }
1474
1475 return 0;
1476 }
1477
1478 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1479 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1480 fill it in. Return the first address not recognized as a prologue
1481 instruction.
1482
1483 We recognize all the instructions typically found in ARM prologues,
1484 plus harmless instructions which can be skipped (either for analysis
1485 purposes, or a more restrictive set that can be skipped when finding
1486 the end of the prologue). */
1487
1488 static CORE_ADDR
1489 arm_analyze_prologue (struct gdbarch *gdbarch,
1490 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1491 struct arm_prologue_cache *cache)
1492 {
1493 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1494 int regno;
1495 CORE_ADDR offset, current_pc;
1496 pv_t regs[ARM_FPS_REGNUM];
1497 CORE_ADDR unrecognized_pc = 0;
1498
1499 /* Search the prologue looking for instructions that set up the
1500 frame pointer, adjust the stack pointer, and save registers.
1501
1502 Be careful, however, and if it doesn't look like a prologue,
1503 don't try to scan it. If, for instance, a frameless function
1504 begins with stmfd sp!, then we will tell ourselves there is
1505 a frame, which will confuse stack traceback, as well as "finish"
1506 and other operations that rely on a knowledge of the stack
1507 traceback. */
1508
1509 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1510 regs[regno] = pv_register (regno, 0);
1511 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1512
1513 for (current_pc = prologue_start;
1514 current_pc < prologue_end;
1515 current_pc += 4)
1516 {
1517 unsigned int insn
1518 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1519
1520 if (insn == 0xe1a0c00d) /* mov ip, sp */
1521 {
1522 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1523 continue;
1524 }
1525 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1526 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1527 {
1528 unsigned imm = insn & 0xff; /* immediate value */
1529 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1530 int rd = bits (insn, 12, 15);
1531 imm = (imm >> rot) | (imm << (32 - rot));
1532 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1533 continue;
1534 }
1535 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1536 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1537 {
1538 unsigned imm = insn & 0xff; /* immediate value */
1539 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1540 int rd = bits (insn, 12, 15);
1541 imm = (imm >> rot) | (imm << (32 - rot));
1542 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1543 continue;
1544 }
1545 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1546 [sp, #-4]! */
1547 {
1548 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1549 break;
1550 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1551 stack.store (regs[ARM_SP_REGNUM], 4,
1552 regs[bits (insn, 12, 15)]);
1553 continue;
1554 }
1555 else if ((insn & 0xffff0000) == 0xe92d0000)
1556 /* stmfd sp!, {..., fp, ip, lr, pc}
1557 or
1558 stmfd sp!, {a1, a2, a3, a4} */
1559 {
1560 int mask = insn & 0xffff;
1561
1562 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1563 break;
1564
1565 /* Calculate offsets of saved registers. */
1566 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1567 if (mask & (1 << regno))
1568 {
1569 regs[ARM_SP_REGNUM]
1570 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1571 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1572 }
1573 }
1574 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1575 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1576 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1577 {
1578 /* No need to add this to saved_regs -- it's just an arg reg. */
1579 continue;
1580 }
1581 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1582 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1583 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1584 {
1585 /* No need to add this to saved_regs -- it's just an arg reg. */
1586 continue;
1587 }
1588 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1589 { registers } */
1590 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1591 {
1592 /* No need to add this to saved_regs -- it's just arg regs. */
1593 continue;
1594 }
1595 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1596 {
1597 unsigned imm = insn & 0xff; /* immediate value */
1598 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1599 imm = (imm >> rot) | (imm << (32 - rot));
1600 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1601 }
1602 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1603 {
1604 unsigned imm = insn & 0xff; /* immediate value */
1605 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1606 imm = (imm >> rot) | (imm << (32 - rot));
1607 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1608 }
1609 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1610 [sp, -#c]! */
1611 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1612 {
1613 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1614 break;
1615
1616 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1617 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1618 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1619 }
1620 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1621 [sp!] */
1622 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1623 {
1624 int n_saved_fp_regs;
1625 unsigned int fp_start_reg, fp_bound_reg;
1626
1627 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1628 break;
1629
1630 if ((insn & 0x800) == 0x800) /* N0 is set */
1631 {
1632 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1633 n_saved_fp_regs = 3;
1634 else
1635 n_saved_fp_regs = 1;
1636 }
1637 else
1638 {
1639 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1640 n_saved_fp_regs = 2;
1641 else
1642 n_saved_fp_regs = 4;
1643 }
1644
1645 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1646 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1647 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1648 {
1649 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1650 stack.store (regs[ARM_SP_REGNUM], 12,
1651 regs[fp_start_reg++]);
1652 }
1653 }
1654 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1655 {
1656 /* Allow some special function calls when skipping the
1657 prologue; GCC generates these before storing arguments to
1658 the stack. */
1659 CORE_ADDR dest = BranchDest (current_pc, insn);
1660
1661 if (skip_prologue_function (gdbarch, dest, 0))
1662 continue;
1663 else
1664 break;
1665 }
1666 else if ((insn & 0xf0000000) != 0xe0000000)
1667 break; /* Condition not true, exit early. */
1668 else if (arm_instruction_changes_pc (insn))
1669 /* Don't scan past anything that might change control flow. */
1670 break;
1671 else if (arm_instruction_restores_sp (insn))
1672 {
1673 /* Don't scan past the epilogue. */
1674 break;
1675 }
1676 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1677 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1678 /* Ignore block loads from the stack, potentially copying
1679 parameters from memory. */
1680 continue;
1681 else if ((insn & 0xfc500000) == 0xe4100000
1682 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1683 /* Similarly ignore single loads from the stack. */
1684 continue;
1685 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1686 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1687 register instead of the stack. */
1688 continue;
1689 else
1690 {
1691 /* The optimizer might shove anything into the prologue, if
1692 we build up cache (cache != NULL) from scanning prologue,
1693 we just skip what we don't recognize and scan further to
1694 make cache as complete as possible. However, if we skip
1695 prologue, we'll stop immediately on unrecognized
1696 instruction. */
1697 unrecognized_pc = current_pc;
1698 if (cache != NULL)
1699 continue;
1700 else
1701 break;
1702 }
1703 }
1704
1705 if (unrecognized_pc == 0)
1706 unrecognized_pc = current_pc;
1707
1708 if (cache)
1709 {
1710 int framereg, framesize;
1711
1712 /* The frame size is just the distance from the frame register
1713 to the original stack pointer. */
1714 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1715 {
1716 /* Frame pointer is fp. */
1717 framereg = ARM_FP_REGNUM;
1718 framesize = -regs[ARM_FP_REGNUM].k;
1719 }
1720 else
1721 {
1722 /* Try the stack pointer... this is a bit desperate. */
1723 framereg = ARM_SP_REGNUM;
1724 framesize = -regs[ARM_SP_REGNUM].k;
1725 }
1726
1727 cache->framereg = framereg;
1728 cache->framesize = framesize;
1729
1730 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1731 if (stack.find_reg (gdbarch, regno, &offset))
1732 cache->saved_regs[regno].addr = offset;
1733 }
1734
1735 if (arm_debug)
1736 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1737 paddress (gdbarch, unrecognized_pc));
1738
1739 return unrecognized_pc;
1740 }
1741
1742 static void
1743 arm_scan_prologue (struct frame_info *this_frame,
1744 struct arm_prologue_cache *cache)
1745 {
1746 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1747 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1748 CORE_ADDR prologue_start, prologue_end;
1749 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1750 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1751
1752 /* Assume there is no frame until proven otherwise. */
1753 cache->framereg = ARM_SP_REGNUM;
1754 cache->framesize = 0;
1755
1756 /* Check for Thumb prologue. */
1757 if (arm_frame_is_thumb (this_frame))
1758 {
1759 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1760 return;
1761 }
1762
1763 /* Find the function prologue. If we can't find the function in
1764 the symbol table, peek in the stack frame to find the PC. */
1765 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1766 &prologue_end))
1767 {
1768 /* One way to find the end of the prologue (which works well
1769 for unoptimized code) is to do the following:
1770
1771 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1772
1773 if (sal.line == 0)
1774 prologue_end = prev_pc;
1775 else if (sal.end < prologue_end)
1776 prologue_end = sal.end;
1777
1778 This mechanism is very accurate so long as the optimizer
1779 doesn't move any instructions from the function body into the
1780 prologue. If this happens, sal.end will be the last
1781 instruction in the first hunk of prologue code just before
1782 the first instruction that the scheduler has moved from
1783 the body to the prologue.
1784
1785 In order to make sure that we scan all of the prologue
1786 instructions, we use a slightly less accurate mechanism which
1787 may scan more than necessary. To help compensate for this
1788 lack of accuracy, the prologue scanning loop below contains
1789 several clauses which'll cause the loop to terminate early if
1790 an implausible prologue instruction is encountered.
1791
1792 The expression
1793
1794 prologue_start + 64
1795
1796 is a suitable endpoint since it accounts for the largest
1797 possible prologue plus up to five instructions inserted by
1798 the scheduler. */
1799
1800 if (prologue_end > prologue_start + 64)
1801 {
1802 prologue_end = prologue_start + 64; /* See above. */
1803 }
1804 }
1805 else
1806 {
1807 /* We have no symbol information. Our only option is to assume this
1808 function has a standard stack frame and the normal frame register.
1809 Then, we can find the value of our frame pointer on entrance to
1810 the callee (or at the present moment if this is the innermost frame).
1811 The value stored there should be the address of the stmfd + 8. */
1812 CORE_ADDR frame_loc;
1813 ULONGEST return_value;
1814
1815 /* AAPCS does not use a frame register, so we can abort here. */
1816 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1817 return;
1818
1819 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1820 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1821 &return_value))
1822 return;
1823 else
1824 {
1825 prologue_start = gdbarch_addr_bits_remove
1826 (gdbarch, return_value) - 8;
1827 prologue_end = prologue_start + 64; /* See above. */
1828 }
1829 }
1830
1831 if (prev_pc < prologue_end)
1832 prologue_end = prev_pc;
1833
1834 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1835 }
1836
1837 static struct arm_prologue_cache *
1838 arm_make_prologue_cache (struct frame_info *this_frame)
1839 {
1840 int reg;
1841 struct arm_prologue_cache *cache;
1842 CORE_ADDR unwound_fp;
1843
1844 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1845 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1846
1847 arm_scan_prologue (this_frame, cache);
1848
1849 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1850 if (unwound_fp == 0)
1851 return cache;
1852
1853 cache->prev_sp = unwound_fp + cache->framesize;
1854
1855 /* Calculate actual addresses of saved registers using offsets
1856 determined by arm_scan_prologue. */
1857 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1858 if (trad_frame_addr_p (cache->saved_regs, reg))
1859 cache->saved_regs[reg].addr += cache->prev_sp;
1860
1861 return cache;
1862 }
1863
1864 /* Implementation of the stop_reason hook for arm_prologue frames. */
1865
1866 static enum unwind_stop_reason
1867 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1868 void **this_cache)
1869 {
1870 struct arm_prologue_cache *cache;
1871 CORE_ADDR pc;
1872
1873 if (*this_cache == NULL)
1874 *this_cache = arm_make_prologue_cache (this_frame);
1875 cache = (struct arm_prologue_cache *) *this_cache;
1876
1877 /* This is meant to halt the backtrace at "_start". */
1878 pc = get_frame_pc (this_frame);
1879 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1880 return UNWIND_OUTERMOST;
1881
1882 /* If we've hit a wall, stop. */
1883 if (cache->prev_sp == 0)
1884 return UNWIND_OUTERMOST;
1885
1886 return UNWIND_NO_REASON;
1887 }
1888
1889 /* Our frame ID for a normal frame is the current function's starting PC
1890 and the caller's SP when we were called. */
1891
1892 static void
1893 arm_prologue_this_id (struct frame_info *this_frame,
1894 void **this_cache,
1895 struct frame_id *this_id)
1896 {
1897 struct arm_prologue_cache *cache;
1898 struct frame_id id;
1899 CORE_ADDR pc, func;
1900
1901 if (*this_cache == NULL)
1902 *this_cache = arm_make_prologue_cache (this_frame);
1903 cache = (struct arm_prologue_cache *) *this_cache;
1904
1905 /* Use function start address as part of the frame ID. If we cannot
1906 identify the start address (due to missing symbol information),
1907 fall back to just using the current PC. */
1908 pc = get_frame_pc (this_frame);
1909 func = get_frame_func (this_frame);
1910 if (!func)
1911 func = pc;
1912
1913 id = frame_id_build (cache->prev_sp, func);
1914 *this_id = id;
1915 }
1916
1917 static struct value *
1918 arm_prologue_prev_register (struct frame_info *this_frame,
1919 void **this_cache,
1920 int prev_regnum)
1921 {
1922 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1923 struct arm_prologue_cache *cache;
1924
1925 if (*this_cache == NULL)
1926 *this_cache = arm_make_prologue_cache (this_frame);
1927 cache = (struct arm_prologue_cache *) *this_cache;
1928
1929 /* If we are asked to unwind the PC, then we need to return the LR
1930 instead. The prologue may save PC, but it will point into this
1931 frame's prologue, not the next frame's resume location. Also
1932 strip the saved T bit. A valid LR may have the low bit set, but
1933 a valid PC never does. */
1934 if (prev_regnum == ARM_PC_REGNUM)
1935 {
1936 CORE_ADDR lr;
1937
1938 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1939 return frame_unwind_got_constant (this_frame, prev_regnum,
1940 arm_addr_bits_remove (gdbarch, lr));
1941 }
1942
1943 /* SP is generally not saved to the stack, but this frame is
1944 identified by the next frame's stack pointer at the time of the call.
1945 The value was already reconstructed into PREV_SP. */
1946 if (prev_regnum == ARM_SP_REGNUM)
1947 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1948
1949 /* The CPSR may have been changed by the call instruction and by the
1950 called function. The only bit we can reconstruct is the T bit,
1951 by checking the low bit of LR as of the call. This is a reliable
1952 indicator of Thumb-ness except for some ARM v4T pre-interworking
1953 Thumb code, which could get away with a clear low bit as long as
1954 the called function did not use bx. Guess that all other
1955 bits are unchanged; the condition flags are presumably lost,
1956 but the processor status is likely valid. */
1957 if (prev_regnum == ARM_PS_REGNUM)
1958 {
1959 CORE_ADDR lr, cpsr;
1960 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1961
1962 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1963 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1964 if (IS_THUMB_ADDR (lr))
1965 cpsr |= t_bit;
1966 else
1967 cpsr &= ~t_bit;
1968 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1969 }
1970
1971 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1972 prev_regnum);
1973 }
1974
1975 struct frame_unwind arm_prologue_unwind = {
1976 NORMAL_FRAME,
1977 arm_prologue_unwind_stop_reason,
1978 arm_prologue_this_id,
1979 arm_prologue_prev_register,
1980 NULL,
1981 default_frame_sniffer
1982 };
1983
1984 /* Maintain a list of ARM exception table entries per objfile, similar to the
1985 list of mapping symbols. We only cache entries for standard ARM-defined
1986 personality routines; the cache will contain only the frame unwinding
1987 instructions associated with the entry (not the descriptors). */
1988
1989 struct arm_exidx_entry
1990 {
1991 bfd_vma addr;
1992 gdb_byte *entry;
1993
1994 bool operator< (const arm_exidx_entry &other) const
1995 {
1996 return addr < other.addr;
1997 }
1998 };
1999
2000 struct arm_exidx_data
2001 {
2002 std::vector<std::vector<arm_exidx_entry>> section_maps;
2003 };
2004
2005 static const struct objfile_key<arm_exidx_data> arm_exidx_data_key;
2006
2007 static struct obj_section *
2008 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2009 {
2010 struct obj_section *osect;
2011
2012 ALL_OBJFILE_OSECTIONS (objfile, osect)
2013 if (bfd_get_section_flags (objfile->obfd,
2014 osect->the_bfd_section) & SEC_ALLOC)
2015 {
2016 bfd_vma start, size;
2017 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2018 size = bfd_get_section_size (osect->the_bfd_section);
2019
2020 if (start <= vma && vma < start + size)
2021 return osect;
2022 }
2023
2024 return NULL;
2025 }
2026
2027 /* Parse contents of exception table and exception index sections
2028 of OBJFILE, and fill in the exception table entry cache.
2029
2030 For each entry that refers to a standard ARM-defined personality
2031 routine, extract the frame unwinding instructions (from either
2032 the index or the table section). The unwinding instructions
2033 are normalized by:
2034 - extracting them from the rest of the table data
2035 - converting to host endianness
2036 - appending the implicit 0xb0 ("Finish") code
2037
2038 The extracted and normalized instructions are stored for later
2039 retrieval by the arm_find_exidx_entry routine. */
2040
2041 static void
2042 arm_exidx_new_objfile (struct objfile *objfile)
2043 {
2044 struct arm_exidx_data *data;
2045 asection *exidx, *extab;
2046 bfd_vma exidx_vma = 0, extab_vma = 0;
2047 LONGEST i;
2048
2049 /* If we've already touched this file, do nothing. */
2050 if (!objfile || arm_exidx_data_key.get (objfile) != NULL)
2051 return;
2052
2053 /* Read contents of exception table and index. */
2054 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2055 gdb::byte_vector exidx_data;
2056 if (exidx)
2057 {
2058 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2059 exidx_data.resize (bfd_get_section_size (exidx));
2060
2061 if (!bfd_get_section_contents (objfile->obfd, exidx,
2062 exidx_data.data (), 0,
2063 exidx_data.size ()))
2064 return;
2065 }
2066
2067 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2068 gdb::byte_vector extab_data;
2069 if (extab)
2070 {
2071 extab_vma = bfd_section_vma (objfile->obfd, extab);
2072 extab_data.resize (bfd_get_section_size (extab));
2073
2074 if (!bfd_get_section_contents (objfile->obfd, extab,
2075 extab_data.data (), 0,
2076 extab_data.size ()))
2077 return;
2078 }
2079
2080 /* Allocate exception table data structure. */
2081 data = arm_exidx_data_key.emplace (objfile);
2082 data->section_maps.resize (objfile->obfd->section_count);
2083
2084 /* Fill in exception table. */
2085 for (i = 0; i < exidx_data.size () / 8; i++)
2086 {
2087 struct arm_exidx_entry new_exidx_entry;
2088 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2089 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2090 exidx_data.data () + i * 8 + 4);
2091 bfd_vma addr = 0, word = 0;
2092 int n_bytes = 0, n_words = 0;
2093 struct obj_section *sec;
2094 gdb_byte *entry = NULL;
2095
2096 /* Extract address of start of function. */
2097 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2098 idx += exidx_vma + i * 8;
2099
2100 /* Find section containing function and compute section offset. */
2101 sec = arm_obj_section_from_vma (objfile, idx);
2102 if (sec == NULL)
2103 continue;
2104 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2105
2106 /* Determine address of exception table entry. */
2107 if (val == 1)
2108 {
2109 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2110 }
2111 else if ((val & 0xff000000) == 0x80000000)
2112 {
2113 /* Exception table entry embedded in .ARM.exidx
2114 -- must be short form. */
2115 word = val;
2116 n_bytes = 3;
2117 }
2118 else if (!(val & 0x80000000))
2119 {
2120 /* Exception table entry in .ARM.extab. */
2121 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2122 addr += exidx_vma + i * 8 + 4;
2123
2124 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2125 {
2126 word = bfd_h_get_32 (objfile->obfd,
2127 extab_data.data () + addr - extab_vma);
2128 addr += 4;
2129
2130 if ((word & 0xff000000) == 0x80000000)
2131 {
2132 /* Short form. */
2133 n_bytes = 3;
2134 }
2135 else if ((word & 0xff000000) == 0x81000000
2136 || (word & 0xff000000) == 0x82000000)
2137 {
2138 /* Long form. */
2139 n_bytes = 2;
2140 n_words = ((word >> 16) & 0xff);
2141 }
2142 else if (!(word & 0x80000000))
2143 {
2144 bfd_vma pers;
2145 struct obj_section *pers_sec;
2146 int gnu_personality = 0;
2147
2148 /* Custom personality routine. */
2149 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2150 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2151
2152 /* Check whether we've got one of the variants of the
2153 GNU personality routines. */
2154 pers_sec = arm_obj_section_from_vma (objfile, pers);
2155 if (pers_sec)
2156 {
2157 static const char *personality[] =
2158 {
2159 "__gcc_personality_v0",
2160 "__gxx_personality_v0",
2161 "__gcj_personality_v0",
2162 "__gnu_objc_personality_v0",
2163 NULL
2164 };
2165
2166 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2167 int k;
2168
2169 for (k = 0; personality[k]; k++)
2170 if (lookup_minimal_symbol_by_pc_name
2171 (pc, personality[k], objfile))
2172 {
2173 gnu_personality = 1;
2174 break;
2175 }
2176 }
2177
2178 /* If so, the next word contains a word count in the high
2179 byte, followed by the same unwind instructions as the
2180 pre-defined forms. */
2181 if (gnu_personality
2182 && addr + 4 <= extab_vma + extab_data.size ())
2183 {
2184 word = bfd_h_get_32 (objfile->obfd,
2185 (extab_data.data ()
2186 + addr - extab_vma));
2187 addr += 4;
2188 n_bytes = 3;
2189 n_words = ((word >> 24) & 0xff);
2190 }
2191 }
2192 }
2193 }
2194
2195 /* Sanity check address. */
2196 if (n_words)
2197 if (addr < extab_vma
2198 || addr + 4 * n_words > extab_vma + extab_data.size ())
2199 n_words = n_bytes = 0;
2200
2201 /* The unwind instructions reside in WORD (only the N_BYTES least
2202 significant bytes are valid), followed by N_WORDS words in the
2203 extab section starting at ADDR. */
2204 if (n_bytes || n_words)
2205 {
2206 gdb_byte *p = entry
2207 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2208 n_bytes + n_words * 4 + 1);
2209
2210 while (n_bytes--)
2211 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2212
2213 while (n_words--)
2214 {
2215 word = bfd_h_get_32 (objfile->obfd,
2216 extab_data.data () + addr - extab_vma);
2217 addr += 4;
2218
2219 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2220 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2221 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2222 *p++ = (gdb_byte) (word & 0xff);
2223 }
2224
2225 /* Implied "Finish" to terminate the list. */
2226 *p++ = 0xb0;
2227 }
2228
2229 /* Push entry onto vector. They are guaranteed to always
2230 appear in order of increasing addresses. */
2231 new_exidx_entry.addr = idx;
2232 new_exidx_entry.entry = entry;
2233 data->section_maps[sec->the_bfd_section->index].push_back
2234 (new_exidx_entry);
2235 }
2236 }
2237
2238 /* Search for the exception table entry covering MEMADDR. If one is found,
2239 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2240 set *START to the start of the region covered by this entry. */
2241
2242 static gdb_byte *
2243 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2244 {
2245 struct obj_section *sec;
2246
2247 sec = find_pc_section (memaddr);
2248 if (sec != NULL)
2249 {
2250 struct arm_exidx_data *data;
2251 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2252
2253 data = arm_exidx_data_key.get (sec->objfile);
2254 if (data != NULL)
2255 {
2256 std::vector<arm_exidx_entry> &map
2257 = data->section_maps[sec->the_bfd_section->index];
2258 if (!map.empty ())
2259 {
2260 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2261
2262 /* std::lower_bound finds the earliest ordered insertion
2263 point. If the following symbol starts at this exact
2264 address, we use that; otherwise, the preceding
2265 exception table entry covers this address. */
2266 if (idx < map.end ())
2267 {
2268 if (idx->addr == map_key.addr)
2269 {
2270 if (start)
2271 *start = idx->addr + obj_section_addr (sec);
2272 return idx->entry;
2273 }
2274 }
2275
2276 if (idx > map.begin ())
2277 {
2278 idx = idx - 1;
2279 if (start)
2280 *start = idx->addr + obj_section_addr (sec);
2281 return idx->entry;
2282 }
2283 }
2284 }
2285 }
2286
2287 return NULL;
2288 }
2289
2290 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2291 instruction list from the ARM exception table entry ENTRY, allocate and
2292 return a prologue cache structure describing how to unwind this frame.
2293
2294 Return NULL if the unwinding instruction list contains a "spare",
2295 "reserved" or "refuse to unwind" instruction as defined in section
2296 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2297 for the ARM Architecture" document. */
2298
2299 static struct arm_prologue_cache *
2300 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2301 {
2302 CORE_ADDR vsp = 0;
2303 int vsp_valid = 0;
2304
2305 struct arm_prologue_cache *cache;
2306 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2307 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2308
2309 for (;;)
2310 {
2311 gdb_byte insn;
2312
2313 /* Whenever we reload SP, we actually have to retrieve its
2314 actual value in the current frame. */
2315 if (!vsp_valid)
2316 {
2317 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2318 {
2319 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2320 vsp = get_frame_register_unsigned (this_frame, reg);
2321 }
2322 else
2323 {
2324 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2325 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2326 }
2327
2328 vsp_valid = 1;
2329 }
2330
2331 /* Decode next unwind instruction. */
2332 insn = *entry++;
2333
2334 if ((insn & 0xc0) == 0)
2335 {
2336 int offset = insn & 0x3f;
2337 vsp += (offset << 2) + 4;
2338 }
2339 else if ((insn & 0xc0) == 0x40)
2340 {
2341 int offset = insn & 0x3f;
2342 vsp -= (offset << 2) + 4;
2343 }
2344 else if ((insn & 0xf0) == 0x80)
2345 {
2346 int mask = ((insn & 0xf) << 8) | *entry++;
2347 int i;
2348
2349 /* The special case of an all-zero mask identifies
2350 "Refuse to unwind". We return NULL to fall back
2351 to the prologue analyzer. */
2352 if (mask == 0)
2353 return NULL;
2354
2355 /* Pop registers r4..r15 under mask. */
2356 for (i = 0; i < 12; i++)
2357 if (mask & (1 << i))
2358 {
2359 cache->saved_regs[4 + i].addr = vsp;
2360 vsp += 4;
2361 }
2362
2363 /* Special-case popping SP -- we need to reload vsp. */
2364 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2365 vsp_valid = 0;
2366 }
2367 else if ((insn & 0xf0) == 0x90)
2368 {
2369 int reg = insn & 0xf;
2370
2371 /* Reserved cases. */
2372 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2373 return NULL;
2374
2375 /* Set SP from another register and mark VSP for reload. */
2376 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2377 vsp_valid = 0;
2378 }
2379 else if ((insn & 0xf0) == 0xa0)
2380 {
2381 int count = insn & 0x7;
2382 int pop_lr = (insn & 0x8) != 0;
2383 int i;
2384
2385 /* Pop r4..r[4+count]. */
2386 for (i = 0; i <= count; i++)
2387 {
2388 cache->saved_regs[4 + i].addr = vsp;
2389 vsp += 4;
2390 }
2391
2392 /* If indicated by flag, pop LR as well. */
2393 if (pop_lr)
2394 {
2395 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2396 vsp += 4;
2397 }
2398 }
2399 else if (insn == 0xb0)
2400 {
2401 /* We could only have updated PC by popping into it; if so, it
2402 will show up as address. Otherwise, copy LR into PC. */
2403 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2404 cache->saved_regs[ARM_PC_REGNUM]
2405 = cache->saved_regs[ARM_LR_REGNUM];
2406
2407 /* We're done. */
2408 break;
2409 }
2410 else if (insn == 0xb1)
2411 {
2412 int mask = *entry++;
2413 int i;
2414
2415 /* All-zero mask and mask >= 16 is "spare". */
2416 if (mask == 0 || mask >= 16)
2417 return NULL;
2418
2419 /* Pop r0..r3 under mask. */
2420 for (i = 0; i < 4; i++)
2421 if (mask & (1 << i))
2422 {
2423 cache->saved_regs[i].addr = vsp;
2424 vsp += 4;
2425 }
2426 }
2427 else if (insn == 0xb2)
2428 {
2429 ULONGEST offset = 0;
2430 unsigned shift = 0;
2431
2432 do
2433 {
2434 offset |= (*entry & 0x7f) << shift;
2435 shift += 7;
2436 }
2437 while (*entry++ & 0x80);
2438
2439 vsp += 0x204 + (offset << 2);
2440 }
2441 else if (insn == 0xb3)
2442 {
2443 int start = *entry >> 4;
2444 int count = (*entry++) & 0xf;
2445 int i;
2446
2447 /* Only registers D0..D15 are valid here. */
2448 if (start + count >= 16)
2449 return NULL;
2450
2451 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2452 for (i = 0; i <= count; i++)
2453 {
2454 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2455 vsp += 8;
2456 }
2457
2458 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2459 vsp += 4;
2460 }
2461 else if ((insn & 0xf8) == 0xb8)
2462 {
2463 int count = insn & 0x7;
2464 int i;
2465
2466 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2467 for (i = 0; i <= count; i++)
2468 {
2469 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2470 vsp += 8;
2471 }
2472
2473 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2474 vsp += 4;
2475 }
2476 else if (insn == 0xc6)
2477 {
2478 int start = *entry >> 4;
2479 int count = (*entry++) & 0xf;
2480 int i;
2481
2482 /* Only registers WR0..WR15 are valid. */
2483 if (start + count >= 16)
2484 return NULL;
2485
2486 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2487 for (i = 0; i <= count; i++)
2488 {
2489 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2490 vsp += 8;
2491 }
2492 }
2493 else if (insn == 0xc7)
2494 {
2495 int mask = *entry++;
2496 int i;
2497
2498 /* All-zero mask and mask >= 16 is "spare". */
2499 if (mask == 0 || mask >= 16)
2500 return NULL;
2501
2502 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2503 for (i = 0; i < 4; i++)
2504 if (mask & (1 << i))
2505 {
2506 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2507 vsp += 4;
2508 }
2509 }
2510 else if ((insn & 0xf8) == 0xc0)
2511 {
2512 int count = insn & 0x7;
2513 int i;
2514
2515 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2516 for (i = 0; i <= count; i++)
2517 {
2518 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2519 vsp += 8;
2520 }
2521 }
2522 else if (insn == 0xc8)
2523 {
2524 int start = *entry >> 4;
2525 int count = (*entry++) & 0xf;
2526 int i;
2527
2528 /* Only registers D0..D31 are valid. */
2529 if (start + count >= 16)
2530 return NULL;
2531
2532 /* Pop VFP double-precision registers
2533 D[16+start]..D[16+start+count]. */
2534 for (i = 0; i <= count; i++)
2535 {
2536 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2537 vsp += 8;
2538 }
2539 }
2540 else if (insn == 0xc9)
2541 {
2542 int start = *entry >> 4;
2543 int count = (*entry++) & 0xf;
2544 int i;
2545
2546 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2547 for (i = 0; i <= count; i++)
2548 {
2549 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2550 vsp += 8;
2551 }
2552 }
2553 else if ((insn & 0xf8) == 0xd0)
2554 {
2555 int count = insn & 0x7;
2556 int i;
2557
2558 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2559 for (i = 0; i <= count; i++)
2560 {
2561 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2562 vsp += 8;
2563 }
2564 }
2565 else
2566 {
2567 /* Everything else is "spare". */
2568 return NULL;
2569 }
2570 }
2571
2572 /* If we restore SP from a register, assume this was the frame register.
2573 Otherwise just fall back to SP as frame register. */
2574 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2575 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2576 else
2577 cache->framereg = ARM_SP_REGNUM;
2578
2579 /* Determine offset to previous frame. */
2580 cache->framesize
2581 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2582
2583 /* We already got the previous SP. */
2584 cache->prev_sp = vsp;
2585
2586 return cache;
2587 }
2588
2589 /* Unwinding via ARM exception table entries. Note that the sniffer
2590 already computes a filled-in prologue cache, which is then used
2591 with the same arm_prologue_this_id and arm_prologue_prev_register
2592 routines also used for prologue-parsing based unwinding. */
2593
2594 static int
2595 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2596 struct frame_info *this_frame,
2597 void **this_prologue_cache)
2598 {
2599 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2600 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2601 CORE_ADDR addr_in_block, exidx_region, func_start;
2602 struct arm_prologue_cache *cache;
2603 gdb_byte *entry;
2604
2605 /* See if we have an ARM exception table entry covering this address. */
2606 addr_in_block = get_frame_address_in_block (this_frame);
2607 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2608 if (!entry)
2609 return 0;
2610
2611 /* The ARM exception table does not describe unwind information
2612 for arbitrary PC values, but is guaranteed to be correct only
2613 at call sites. We have to decide here whether we want to use
2614 ARM exception table information for this frame, or fall back
2615 to using prologue parsing. (Note that if we have DWARF CFI,
2616 this sniffer isn't even called -- CFI is always preferred.)
2617
2618 Before we make this decision, however, we check whether we
2619 actually have *symbol* information for the current frame.
2620 If not, prologue parsing would not work anyway, so we might
2621 as well use the exception table and hope for the best. */
2622 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2623 {
2624 int exc_valid = 0;
2625
2626 /* If the next frame is "normal", we are at a call site in this
2627 frame, so exception information is guaranteed to be valid. */
2628 if (get_next_frame (this_frame)
2629 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2630 exc_valid = 1;
2631
2632 /* We also assume exception information is valid if we're currently
2633 blocked in a system call. The system library is supposed to
2634 ensure this, so that e.g. pthread cancellation works. */
2635 if (arm_frame_is_thumb (this_frame))
2636 {
2637 ULONGEST insn;
2638
2639 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2640 2, byte_order_for_code, &insn)
2641 && (insn & 0xff00) == 0xdf00 /* svc */)
2642 exc_valid = 1;
2643 }
2644 else
2645 {
2646 ULONGEST insn;
2647
2648 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2649 4, byte_order_for_code, &insn)
2650 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2651 exc_valid = 1;
2652 }
2653
2654 /* Bail out if we don't know that exception information is valid. */
2655 if (!exc_valid)
2656 return 0;
2657
2658 /* The ARM exception index does not mark the *end* of the region
2659 covered by the entry, and some functions will not have any entry.
2660 To correctly recognize the end of the covered region, the linker
2661 should have inserted dummy records with a CANTUNWIND marker.
2662
2663 Unfortunately, current versions of GNU ld do not reliably do
2664 this, and thus we may have found an incorrect entry above.
2665 As a (temporary) sanity check, we only use the entry if it
2666 lies *within* the bounds of the function. Note that this check
2667 might reject perfectly valid entries that just happen to cover
2668 multiple functions; therefore this check ought to be removed
2669 once the linker is fixed. */
2670 if (func_start > exidx_region)
2671 return 0;
2672 }
2673
2674 /* Decode the list of unwinding instructions into a prologue cache.
2675 Note that this may fail due to e.g. a "refuse to unwind" code. */
2676 cache = arm_exidx_fill_cache (this_frame, entry);
2677 if (!cache)
2678 return 0;
2679
2680 *this_prologue_cache = cache;
2681 return 1;
2682 }
2683
2684 struct frame_unwind arm_exidx_unwind = {
2685 NORMAL_FRAME,
2686 default_frame_unwind_stop_reason,
2687 arm_prologue_this_id,
2688 arm_prologue_prev_register,
2689 NULL,
2690 arm_exidx_unwind_sniffer
2691 };
2692
2693 static struct arm_prologue_cache *
2694 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2695 {
2696 struct arm_prologue_cache *cache;
2697 int reg;
2698
2699 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2700 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2701
2702 /* Still rely on the offset calculated from prologue. */
2703 arm_scan_prologue (this_frame, cache);
2704
2705 /* Since we are in epilogue, the SP has been restored. */
2706 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2707
2708 /* Calculate actual addresses of saved registers using offsets
2709 determined by arm_scan_prologue. */
2710 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2711 if (trad_frame_addr_p (cache->saved_regs, reg))
2712 cache->saved_regs[reg].addr += cache->prev_sp;
2713
2714 return cache;
2715 }
2716
2717 /* Implementation of function hook 'this_id' in
2718 'struct frame_uwnind' for epilogue unwinder. */
2719
2720 static void
2721 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2722 void **this_cache,
2723 struct frame_id *this_id)
2724 {
2725 struct arm_prologue_cache *cache;
2726 CORE_ADDR pc, func;
2727
2728 if (*this_cache == NULL)
2729 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2730 cache = (struct arm_prologue_cache *) *this_cache;
2731
2732 /* Use function start address as part of the frame ID. If we cannot
2733 identify the start address (due to missing symbol information),
2734 fall back to just using the current PC. */
2735 pc = get_frame_pc (this_frame);
2736 func = get_frame_func (this_frame);
2737 if (func == 0)
2738 func = pc;
2739
2740 (*this_id) = frame_id_build (cache->prev_sp, pc);
2741 }
2742
2743 /* Implementation of function hook 'prev_register' in
2744 'struct frame_uwnind' for epilogue unwinder. */
2745
2746 static struct value *
2747 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2748 void **this_cache, int regnum)
2749 {
2750 if (*this_cache == NULL)
2751 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2752
2753 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2754 }
2755
2756 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2757 CORE_ADDR pc);
2758 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2759 CORE_ADDR pc);
2760
2761 /* Implementation of function hook 'sniffer' in
2762 'struct frame_uwnind' for epilogue unwinder. */
2763
2764 static int
2765 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2766 struct frame_info *this_frame,
2767 void **this_prologue_cache)
2768 {
2769 if (frame_relative_level (this_frame) == 0)
2770 {
2771 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2772 CORE_ADDR pc = get_frame_pc (this_frame);
2773
2774 if (arm_frame_is_thumb (this_frame))
2775 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2776 else
2777 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2778 }
2779 else
2780 return 0;
2781 }
2782
2783 /* Frame unwinder from epilogue. */
2784
2785 static const struct frame_unwind arm_epilogue_frame_unwind =
2786 {
2787 NORMAL_FRAME,
2788 default_frame_unwind_stop_reason,
2789 arm_epilogue_frame_this_id,
2790 arm_epilogue_frame_prev_register,
2791 NULL,
2792 arm_epilogue_frame_sniffer,
2793 };
2794
2795 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2796 trampoline, return the target PC. Otherwise return 0.
2797
2798 void call0a (char c, short s, int i, long l) {}
2799
2800 int main (void)
2801 {
2802 (*pointer_to_call0a) (c, s, i, l);
2803 }
2804
2805 Instead of calling a stub library function _call_via_xx (xx is
2806 the register name), GCC may inline the trampoline in the object
2807 file as below (register r2 has the address of call0a).
2808
2809 .global main
2810 .type main, %function
2811 ...
2812 bl .L1
2813 ...
2814 .size main, .-main
2815
2816 .L1:
2817 bx r2
2818
2819 The trampoline 'bx r2' doesn't belong to main. */
2820
2821 static CORE_ADDR
2822 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2823 {
2824 /* The heuristics of recognizing such trampoline is that FRAME is
2825 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2826 if (arm_frame_is_thumb (frame))
2827 {
2828 gdb_byte buf[2];
2829
2830 if (target_read_memory (pc, buf, 2) == 0)
2831 {
2832 struct gdbarch *gdbarch = get_frame_arch (frame);
2833 enum bfd_endian byte_order_for_code
2834 = gdbarch_byte_order_for_code (gdbarch);
2835 uint16_t insn
2836 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2837
2838 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2839 {
2840 CORE_ADDR dest
2841 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2842
2843 /* Clear the LSB so that gdb core sets step-resume
2844 breakpoint at the right address. */
2845 return UNMAKE_THUMB_ADDR (dest);
2846 }
2847 }
2848 }
2849
2850 return 0;
2851 }
2852
2853 static struct arm_prologue_cache *
2854 arm_make_stub_cache (struct frame_info *this_frame)
2855 {
2856 struct arm_prologue_cache *cache;
2857
2858 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2859 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2860
2861 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2862
2863 return cache;
2864 }
2865
2866 /* Our frame ID for a stub frame is the current SP and LR. */
2867
2868 static void
2869 arm_stub_this_id (struct frame_info *this_frame,
2870 void **this_cache,
2871 struct frame_id *this_id)
2872 {
2873 struct arm_prologue_cache *cache;
2874
2875 if (*this_cache == NULL)
2876 *this_cache = arm_make_stub_cache (this_frame);
2877 cache = (struct arm_prologue_cache *) *this_cache;
2878
2879 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2880 }
2881
2882 static int
2883 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2884 struct frame_info *this_frame,
2885 void **this_prologue_cache)
2886 {
2887 CORE_ADDR addr_in_block;
2888 gdb_byte dummy[4];
2889 CORE_ADDR pc, start_addr;
2890 const char *name;
2891
2892 addr_in_block = get_frame_address_in_block (this_frame);
2893 pc = get_frame_pc (this_frame);
2894 if (in_plt_section (addr_in_block)
2895 /* We also use the stub winder if the target memory is unreadable
2896 to avoid having the prologue unwinder trying to read it. */
2897 || target_read_memory (pc, dummy, 4) != 0)
2898 return 1;
2899
2900 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2901 && arm_skip_bx_reg (this_frame, pc) != 0)
2902 return 1;
2903
2904 return 0;
2905 }
2906
2907 struct frame_unwind arm_stub_unwind = {
2908 NORMAL_FRAME,
2909 default_frame_unwind_stop_reason,
2910 arm_stub_this_id,
2911 arm_prologue_prev_register,
2912 NULL,
2913 arm_stub_unwind_sniffer
2914 };
2915
2916 /* Put here the code to store, into CACHE->saved_regs, the addresses
2917 of the saved registers of frame described by THIS_FRAME. CACHE is
2918 returned. */
2919
2920 static struct arm_prologue_cache *
2921 arm_m_exception_cache (struct frame_info *this_frame)
2922 {
2923 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2924 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2925 struct arm_prologue_cache *cache;
2926 CORE_ADDR unwound_sp;
2927 LONGEST xpsr;
2928
2929 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2930 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2931
2932 unwound_sp = get_frame_register_unsigned (this_frame,
2933 ARM_SP_REGNUM);
2934
2935 /* The hardware saves eight 32-bit words, comprising xPSR,
2936 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2937 "B1.5.6 Exception entry behavior" in
2938 "ARMv7-M Architecture Reference Manual". */
2939 cache->saved_regs[0].addr = unwound_sp;
2940 cache->saved_regs[1].addr = unwound_sp + 4;
2941 cache->saved_regs[2].addr = unwound_sp + 8;
2942 cache->saved_regs[3].addr = unwound_sp + 12;
2943 cache->saved_regs[12].addr = unwound_sp + 16;
2944 cache->saved_regs[14].addr = unwound_sp + 20;
2945 cache->saved_regs[15].addr = unwound_sp + 24;
2946 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2947
2948 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2949 aligner between the top of the 32-byte stack frame and the
2950 previous context's stack pointer. */
2951 cache->prev_sp = unwound_sp + 32;
2952 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2953 && (xpsr & (1 << 9)) != 0)
2954 cache->prev_sp += 4;
2955
2956 return cache;
2957 }
2958
2959 /* Implementation of function hook 'this_id' in
2960 'struct frame_uwnind'. */
2961
2962 static void
2963 arm_m_exception_this_id (struct frame_info *this_frame,
2964 void **this_cache,
2965 struct frame_id *this_id)
2966 {
2967 struct arm_prologue_cache *cache;
2968
2969 if (*this_cache == NULL)
2970 *this_cache = arm_m_exception_cache (this_frame);
2971 cache = (struct arm_prologue_cache *) *this_cache;
2972
2973 /* Our frame ID for a stub frame is the current SP and LR. */
2974 *this_id = frame_id_build (cache->prev_sp,
2975 get_frame_pc (this_frame));
2976 }
2977
2978 /* Implementation of function hook 'prev_register' in
2979 'struct frame_uwnind'. */
2980
2981 static struct value *
2982 arm_m_exception_prev_register (struct frame_info *this_frame,
2983 void **this_cache,
2984 int prev_regnum)
2985 {
2986 struct arm_prologue_cache *cache;
2987
2988 if (*this_cache == NULL)
2989 *this_cache = arm_m_exception_cache (this_frame);
2990 cache = (struct arm_prologue_cache *) *this_cache;
2991
2992 /* The value was already reconstructed into PREV_SP. */
2993 if (prev_regnum == ARM_SP_REGNUM)
2994 return frame_unwind_got_constant (this_frame, prev_regnum,
2995 cache->prev_sp);
2996
2997 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2998 prev_regnum);
2999 }
3000
3001 /* Implementation of function hook 'sniffer' in
3002 'struct frame_uwnind'. */
3003
3004 static int
3005 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3006 struct frame_info *this_frame,
3007 void **this_prologue_cache)
3008 {
3009 CORE_ADDR this_pc = get_frame_pc (this_frame);
3010
3011 /* No need to check is_m; this sniffer is only registered for
3012 M-profile architectures. */
3013
3014 /* Check if exception frame returns to a magic PC value. */
3015 return arm_m_addr_is_magic (this_pc);
3016 }
3017
3018 /* Frame unwinder for M-profile exceptions. */
3019
3020 struct frame_unwind arm_m_exception_unwind =
3021 {
3022 SIGTRAMP_FRAME,
3023 default_frame_unwind_stop_reason,
3024 arm_m_exception_this_id,
3025 arm_m_exception_prev_register,
3026 NULL,
3027 arm_m_exception_unwind_sniffer
3028 };
3029
3030 static CORE_ADDR
3031 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3032 {
3033 struct arm_prologue_cache *cache;
3034
3035 if (*this_cache == NULL)
3036 *this_cache = arm_make_prologue_cache (this_frame);
3037 cache = (struct arm_prologue_cache *) *this_cache;
3038
3039 return cache->prev_sp - cache->framesize;
3040 }
3041
3042 struct frame_base arm_normal_base = {
3043 &arm_prologue_unwind,
3044 arm_normal_frame_base,
3045 arm_normal_frame_base,
3046 arm_normal_frame_base
3047 };
3048
3049 static struct value *
3050 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3051 int regnum)
3052 {
3053 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3054 CORE_ADDR lr, cpsr;
3055 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3056
3057 switch (regnum)
3058 {
3059 case ARM_PC_REGNUM:
3060 /* The PC is normally copied from the return column, which
3061 describes saves of LR. However, that version may have an
3062 extra bit set to indicate Thumb state. The bit is not
3063 part of the PC. */
3064 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3065 return frame_unwind_got_constant (this_frame, regnum,
3066 arm_addr_bits_remove (gdbarch, lr));
3067
3068 case ARM_PS_REGNUM:
3069 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3070 cpsr = get_frame_register_unsigned (this_frame, regnum);
3071 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3072 if (IS_THUMB_ADDR (lr))
3073 cpsr |= t_bit;
3074 else
3075 cpsr &= ~t_bit;
3076 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3077
3078 default:
3079 internal_error (__FILE__, __LINE__,
3080 _("Unexpected register %d"), regnum);
3081 }
3082 }
3083
3084 static void
3085 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3086 struct dwarf2_frame_state_reg *reg,
3087 struct frame_info *this_frame)
3088 {
3089 switch (regnum)
3090 {
3091 case ARM_PC_REGNUM:
3092 case ARM_PS_REGNUM:
3093 reg->how = DWARF2_FRAME_REG_FN;
3094 reg->loc.fn = arm_dwarf2_prev_register;
3095 break;
3096 case ARM_SP_REGNUM:
3097 reg->how = DWARF2_FRAME_REG_CFA;
3098 break;
3099 }
3100 }
3101
3102 /* Implement the stack_frame_destroyed_p gdbarch method. */
3103
3104 static int
3105 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3106 {
3107 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3108 unsigned int insn, insn2;
3109 int found_return = 0, found_stack_adjust = 0;
3110 CORE_ADDR func_start, func_end;
3111 CORE_ADDR scan_pc;
3112 gdb_byte buf[4];
3113
3114 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3115 return 0;
3116
3117 /* The epilogue is a sequence of instructions along the following lines:
3118
3119 - add stack frame size to SP or FP
3120 - [if frame pointer used] restore SP from FP
3121 - restore registers from SP [may include PC]
3122 - a return-type instruction [if PC wasn't already restored]
3123
3124 In a first pass, we scan forward from the current PC and verify the
3125 instructions we find as compatible with this sequence, ending in a
3126 return instruction.
3127
3128 However, this is not sufficient to distinguish indirect function calls
3129 within a function from indirect tail calls in the epilogue in some cases.
3130 Therefore, if we didn't already find any SP-changing instruction during
3131 forward scan, we add a backward scanning heuristic to ensure we actually
3132 are in the epilogue. */
3133
3134 scan_pc = pc;
3135 while (scan_pc < func_end && !found_return)
3136 {
3137 if (target_read_memory (scan_pc, buf, 2))
3138 break;
3139
3140 scan_pc += 2;
3141 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3142
3143 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3144 found_return = 1;
3145 else if (insn == 0x46f7) /* mov pc, lr */
3146 found_return = 1;
3147 else if (thumb_instruction_restores_sp (insn))
3148 {
3149 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3150 found_return = 1;
3151 }
3152 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3153 {
3154 if (target_read_memory (scan_pc, buf, 2))
3155 break;
3156
3157 scan_pc += 2;
3158 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3159
3160 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3161 {
3162 if (insn2 & 0x8000) /* <registers> include PC. */
3163 found_return = 1;
3164 }
3165 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3166 && (insn2 & 0x0fff) == 0x0b04)
3167 {
3168 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3169 found_return = 1;
3170 }
3171 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3172 && (insn2 & 0x0e00) == 0x0a00)
3173 ;
3174 else
3175 break;
3176 }
3177 else
3178 break;
3179 }
3180
3181 if (!found_return)
3182 return 0;
3183
3184 /* Since any instruction in the epilogue sequence, with the possible
3185 exception of return itself, updates the stack pointer, we need to
3186 scan backwards for at most one instruction. Try either a 16-bit or
3187 a 32-bit instruction. This is just a heuristic, so we do not worry
3188 too much about false positives. */
3189
3190 if (pc - 4 < func_start)
3191 return 0;
3192 if (target_read_memory (pc - 4, buf, 4))
3193 return 0;
3194
3195 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3196 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3197
3198 if (thumb_instruction_restores_sp (insn2))
3199 found_stack_adjust = 1;
3200 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3201 found_stack_adjust = 1;
3202 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3203 && (insn2 & 0x0fff) == 0x0b04)
3204 found_stack_adjust = 1;
3205 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3206 && (insn2 & 0x0e00) == 0x0a00)
3207 found_stack_adjust = 1;
3208
3209 return found_stack_adjust;
3210 }
3211
3212 static int
3213 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3214 {
3215 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3216 unsigned int insn;
3217 int found_return;
3218 CORE_ADDR func_start, func_end;
3219
3220 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3221 return 0;
3222
3223 /* We are in the epilogue if the previous instruction was a stack
3224 adjustment and the next instruction is a possible return (bx, mov
3225 pc, or pop). We could have to scan backwards to find the stack
3226 adjustment, or forwards to find the return, but this is a decent
3227 approximation. First scan forwards. */
3228
3229 found_return = 0;
3230 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3231 if (bits (insn, 28, 31) != INST_NV)
3232 {
3233 if ((insn & 0x0ffffff0) == 0x012fff10)
3234 /* BX. */
3235 found_return = 1;
3236 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3237 /* MOV PC. */
3238 found_return = 1;
3239 else if ((insn & 0x0fff0000) == 0x08bd0000
3240 && (insn & 0x0000c000) != 0)
3241 /* POP (LDMIA), including PC or LR. */
3242 found_return = 1;
3243 }
3244
3245 if (!found_return)
3246 return 0;
3247
3248 /* Scan backwards. This is just a heuristic, so do not worry about
3249 false positives from mode changes. */
3250
3251 if (pc < func_start + 4)
3252 return 0;
3253
3254 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3255 if (arm_instruction_restores_sp (insn))
3256 return 1;
3257
3258 return 0;
3259 }
3260
3261 /* Implement the stack_frame_destroyed_p gdbarch method. */
3262
3263 static int
3264 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3265 {
3266 if (arm_pc_is_thumb (gdbarch, pc))
3267 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3268 else
3269 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3270 }
3271
3272 /* When arguments must be pushed onto the stack, they go on in reverse
3273 order. The code below implements a FILO (stack) to do this. */
3274
3275 struct stack_item
3276 {
3277 int len;
3278 struct stack_item *prev;
3279 gdb_byte *data;
3280 };
3281
3282 static struct stack_item *
3283 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3284 {
3285 struct stack_item *si;
3286 si = XNEW (struct stack_item);
3287 si->data = (gdb_byte *) xmalloc (len);
3288 si->len = len;
3289 si->prev = prev;
3290 memcpy (si->data, contents, len);
3291 return si;
3292 }
3293
3294 static struct stack_item *
3295 pop_stack_item (struct stack_item *si)
3296 {
3297 struct stack_item *dead = si;
3298 si = si->prev;
3299 xfree (dead->data);
3300 xfree (dead);
3301 return si;
3302 }
3303
3304 /* Implement the gdbarch type alignment method, overrides the generic
3305 alignment algorithm for anything that is arm specific. */
3306
3307 static ULONGEST
3308 arm_type_align (gdbarch *gdbarch, struct type *t)
3309 {
3310 t = check_typedef (t);
3311 if (TYPE_CODE (t) == TYPE_CODE_ARRAY && TYPE_VECTOR (t))
3312 {
3313 /* Use the natural alignment for vector types (the same for
3314 scalar type), but the maximum alignment is 64-bit. */
3315 if (TYPE_LENGTH (t) > 8)
3316 return 8;
3317 else
3318 return TYPE_LENGTH (t);
3319 }
3320
3321 /* Allow the common code to calculate the alignment. */
3322 return 0;
3323 }
3324
3325 /* Possible base types for a candidate for passing and returning in
3326 VFP registers. */
3327
3328 enum arm_vfp_cprc_base_type
3329 {
3330 VFP_CPRC_UNKNOWN,
3331 VFP_CPRC_SINGLE,
3332 VFP_CPRC_DOUBLE,
3333 VFP_CPRC_VEC64,
3334 VFP_CPRC_VEC128
3335 };
3336
3337 /* The length of one element of base type B. */
3338
3339 static unsigned
3340 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3341 {
3342 switch (b)
3343 {
3344 case VFP_CPRC_SINGLE:
3345 return 4;
3346 case VFP_CPRC_DOUBLE:
3347 return 8;
3348 case VFP_CPRC_VEC64:
3349 return 8;
3350 case VFP_CPRC_VEC128:
3351 return 16;
3352 default:
3353 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3354 (int) b);
3355 }
3356 }
3357
3358 /* The character ('s', 'd' or 'q') for the type of VFP register used
3359 for passing base type B. */
3360
3361 static int
3362 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3363 {
3364 switch (b)
3365 {
3366 case VFP_CPRC_SINGLE:
3367 return 's';
3368 case VFP_CPRC_DOUBLE:
3369 return 'd';
3370 case VFP_CPRC_VEC64:
3371 return 'd';
3372 case VFP_CPRC_VEC128:
3373 return 'q';
3374 default:
3375 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3376 (int) b);
3377 }
3378 }
3379
3380 /* Determine whether T may be part of a candidate for passing and
3381 returning in VFP registers, ignoring the limit on the total number
3382 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3383 classification of the first valid component found; if it is not
3384 VFP_CPRC_UNKNOWN, all components must have the same classification
3385 as *BASE_TYPE. If it is found that T contains a type not permitted
3386 for passing and returning in VFP registers, a type differently
3387 classified from *BASE_TYPE, or two types differently classified
3388 from each other, return -1, otherwise return the total number of
3389 base-type elements found (possibly 0 in an empty structure or
3390 array). Vector types are not currently supported, matching the
3391 generic AAPCS support. */
3392
3393 static int
3394 arm_vfp_cprc_sub_candidate (struct type *t,
3395 enum arm_vfp_cprc_base_type *base_type)
3396 {
3397 t = check_typedef (t);
3398 switch (TYPE_CODE (t))
3399 {
3400 case TYPE_CODE_FLT:
3401 switch (TYPE_LENGTH (t))
3402 {
3403 case 4:
3404 if (*base_type == VFP_CPRC_UNKNOWN)
3405 *base_type = VFP_CPRC_SINGLE;
3406 else if (*base_type != VFP_CPRC_SINGLE)
3407 return -1;
3408 return 1;
3409
3410 case 8:
3411 if (*base_type == VFP_CPRC_UNKNOWN)
3412 *base_type = VFP_CPRC_DOUBLE;
3413 else if (*base_type != VFP_CPRC_DOUBLE)
3414 return -1;
3415 return 1;
3416
3417 default:
3418 return -1;
3419 }
3420 break;
3421
3422 case TYPE_CODE_COMPLEX:
3423 /* Arguments of complex T where T is one of the types float or
3424 double get treated as if they are implemented as:
3425
3426 struct complexT
3427 {
3428 T real;
3429 T imag;
3430 };
3431
3432 */
3433 switch (TYPE_LENGTH (t))
3434 {
3435 case 8:
3436 if (*base_type == VFP_CPRC_UNKNOWN)
3437 *base_type = VFP_CPRC_SINGLE;
3438 else if (*base_type != VFP_CPRC_SINGLE)
3439 return -1;
3440 return 2;
3441
3442 case 16:
3443 if (*base_type == VFP_CPRC_UNKNOWN)
3444 *base_type = VFP_CPRC_DOUBLE;
3445 else if (*base_type != VFP_CPRC_DOUBLE)
3446 return -1;
3447 return 2;
3448
3449 default:
3450 return -1;
3451 }
3452 break;
3453
3454 case TYPE_CODE_ARRAY:
3455 {
3456 if (TYPE_VECTOR (t))
3457 {
3458 /* A 64-bit or 128-bit containerized vector type are VFP
3459 CPRCs. */
3460 switch (TYPE_LENGTH (t))
3461 {
3462 case 8:
3463 if (*base_type == VFP_CPRC_UNKNOWN)
3464 *base_type = VFP_CPRC_VEC64;
3465 return 1;
3466 case 16:
3467 if (*base_type == VFP_CPRC_UNKNOWN)
3468 *base_type = VFP_CPRC_VEC128;
3469 return 1;
3470 default:
3471 return -1;
3472 }
3473 }
3474 else
3475 {
3476 int count;
3477 unsigned unitlen;
3478
3479 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3480 base_type);
3481 if (count == -1)
3482 return -1;
3483 if (TYPE_LENGTH (t) == 0)
3484 {
3485 gdb_assert (count == 0);
3486 return 0;
3487 }
3488 else if (count == 0)
3489 return -1;
3490 unitlen = arm_vfp_cprc_unit_length (*base_type);
3491 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3492 return TYPE_LENGTH (t) / unitlen;
3493 }
3494 }
3495 break;
3496
3497 case TYPE_CODE_STRUCT:
3498 {
3499 int count = 0;
3500 unsigned unitlen;
3501 int i;
3502 for (i = 0; i < TYPE_NFIELDS (t); i++)
3503 {
3504 int sub_count = 0;
3505
3506 if (!field_is_static (&TYPE_FIELD (t, i)))
3507 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3508 base_type);
3509 if (sub_count == -1)
3510 return -1;
3511 count += sub_count;
3512 }
3513 if (TYPE_LENGTH (t) == 0)
3514 {
3515 gdb_assert (count == 0);
3516 return 0;
3517 }
3518 else if (count == 0)
3519 return -1;
3520 unitlen = arm_vfp_cprc_unit_length (*base_type);
3521 if (TYPE_LENGTH (t) != unitlen * count)
3522 return -1;
3523 return count;
3524 }
3525
3526 case TYPE_CODE_UNION:
3527 {
3528 int count = 0;
3529 unsigned unitlen;
3530 int i;
3531 for (i = 0; i < TYPE_NFIELDS (t); i++)
3532 {
3533 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3534 base_type);
3535 if (sub_count == -1)
3536 return -1;
3537 count = (count > sub_count ? count : sub_count);
3538 }
3539 if (TYPE_LENGTH (t) == 0)
3540 {
3541 gdb_assert (count == 0);
3542 return 0;
3543 }
3544 else if (count == 0)
3545 return -1;
3546 unitlen = arm_vfp_cprc_unit_length (*base_type);
3547 if (TYPE_LENGTH (t) != unitlen * count)
3548 return -1;
3549 return count;
3550 }
3551
3552 default:
3553 break;
3554 }
3555
3556 return -1;
3557 }
3558
3559 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3560 if passed to or returned from a non-variadic function with the VFP
3561 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3562 *BASE_TYPE to the base type for T and *COUNT to the number of
3563 elements of that base type before returning. */
3564
3565 static int
3566 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3567 int *count)
3568 {
3569 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3570 int c = arm_vfp_cprc_sub_candidate (t, &b);
3571 if (c <= 0 || c > 4)
3572 return 0;
3573 *base_type = b;
3574 *count = c;
3575 return 1;
3576 }
3577
3578 /* Return 1 if the VFP ABI should be used for passing arguments to and
3579 returning values from a function of type FUNC_TYPE, 0
3580 otherwise. */
3581
3582 static int
3583 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3584 {
3585 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3586 /* Variadic functions always use the base ABI. Assume that functions
3587 without debug info are not variadic. */
3588 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3589 return 0;
3590 /* The VFP ABI is only supported as a variant of AAPCS. */
3591 if (tdep->arm_abi != ARM_ABI_AAPCS)
3592 return 0;
3593 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3594 }
3595
3596 /* We currently only support passing parameters in integer registers, which
3597 conforms with GCC's default model, and VFP argument passing following
3598 the VFP variant of AAPCS. Several other variants exist and
3599 we should probably support some of them based on the selected ABI. */
3600
3601 static CORE_ADDR
3602 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3603 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3604 struct value **args, CORE_ADDR sp,
3605 function_call_return_method return_method,
3606 CORE_ADDR struct_addr)
3607 {
3608 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3609 int argnum;
3610 int argreg;
3611 int nstack;
3612 struct stack_item *si = NULL;
3613 int use_vfp_abi;
3614 struct type *ftype;
3615 unsigned vfp_regs_free = (1 << 16) - 1;
3616
3617 /* Determine the type of this function and whether the VFP ABI
3618 applies. */
3619 ftype = check_typedef (value_type (function));
3620 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3621 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3622 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3623
3624 /* Set the return address. For the ARM, the return breakpoint is
3625 always at BP_ADDR. */
3626 if (arm_pc_is_thumb (gdbarch, bp_addr))
3627 bp_addr |= 1;
3628 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3629
3630 /* Walk through the list of args and determine how large a temporary
3631 stack is required. Need to take care here as structs may be
3632 passed on the stack, and we have to push them. */
3633 nstack = 0;
3634
3635 argreg = ARM_A1_REGNUM;
3636 nstack = 0;
3637
3638 /* The struct_return pointer occupies the first parameter
3639 passing register. */
3640 if (return_method == return_method_struct)
3641 {
3642 if (arm_debug)
3643 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3644 gdbarch_register_name (gdbarch, argreg),
3645 paddress (gdbarch, struct_addr));
3646 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3647 argreg++;
3648 }
3649
3650 for (argnum = 0; argnum < nargs; argnum++)
3651 {
3652 int len;
3653 struct type *arg_type;
3654 struct type *target_type;
3655 enum type_code typecode;
3656 const bfd_byte *val;
3657 int align;
3658 enum arm_vfp_cprc_base_type vfp_base_type;
3659 int vfp_base_count;
3660 int may_use_core_reg = 1;
3661
3662 arg_type = check_typedef (value_type (args[argnum]));
3663 len = TYPE_LENGTH (arg_type);
3664 target_type = TYPE_TARGET_TYPE (arg_type);
3665 typecode = TYPE_CODE (arg_type);
3666 val = value_contents (args[argnum]);
3667
3668 align = type_align (arg_type);
3669 /* Round alignment up to a whole number of words. */
3670 align = (align + ARM_INT_REGISTER_SIZE - 1)
3671 & ~(ARM_INT_REGISTER_SIZE - 1);
3672 /* Different ABIs have different maximum alignments. */
3673 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3674 {
3675 /* The APCS ABI only requires word alignment. */
3676 align = ARM_INT_REGISTER_SIZE;
3677 }
3678 else
3679 {
3680 /* The AAPCS requires at most doubleword alignment. */
3681 if (align > ARM_INT_REGISTER_SIZE * 2)
3682 align = ARM_INT_REGISTER_SIZE * 2;
3683 }
3684
3685 if (use_vfp_abi
3686 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3687 &vfp_base_count))
3688 {
3689 int regno;
3690 int unit_length;
3691 int shift;
3692 unsigned mask;
3693
3694 /* Because this is a CPRC it cannot go in a core register or
3695 cause a core register to be skipped for alignment.
3696 Either it goes in VFP registers and the rest of this loop
3697 iteration is skipped for this argument, or it goes on the
3698 stack (and the stack alignment code is correct for this
3699 case). */
3700 may_use_core_reg = 0;
3701
3702 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3703 shift = unit_length / 4;
3704 mask = (1 << (shift * vfp_base_count)) - 1;
3705 for (regno = 0; regno < 16; regno += shift)
3706 if (((vfp_regs_free >> regno) & mask) == mask)
3707 break;
3708
3709 if (regno < 16)
3710 {
3711 int reg_char;
3712 int reg_scaled;
3713 int i;
3714
3715 vfp_regs_free &= ~(mask << regno);
3716 reg_scaled = regno / shift;
3717 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3718 for (i = 0; i < vfp_base_count; i++)
3719 {
3720 char name_buf[4];
3721 int regnum;
3722 if (reg_char == 'q')
3723 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3724 val + i * unit_length);
3725 else
3726 {
3727 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3728 reg_char, reg_scaled + i);
3729 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3730 strlen (name_buf));
3731 regcache->cooked_write (regnum, val + i * unit_length);
3732 }
3733 }
3734 continue;
3735 }
3736 else
3737 {
3738 /* This CPRC could not go in VFP registers, so all VFP
3739 registers are now marked as used. */
3740 vfp_regs_free = 0;
3741 }
3742 }
3743
3744 /* Push stack padding for dowubleword alignment. */
3745 if (nstack & (align - 1))
3746 {
3747 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3748 nstack += ARM_INT_REGISTER_SIZE;
3749 }
3750
3751 /* Doubleword aligned quantities must go in even register pairs. */
3752 if (may_use_core_reg
3753 && argreg <= ARM_LAST_ARG_REGNUM
3754 && align > ARM_INT_REGISTER_SIZE
3755 && argreg & 1)
3756 argreg++;
3757
3758 /* If the argument is a pointer to a function, and it is a
3759 Thumb function, create a LOCAL copy of the value and set
3760 the THUMB bit in it. */
3761 if (TYPE_CODE_PTR == typecode
3762 && target_type != NULL
3763 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3764 {
3765 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3766 if (arm_pc_is_thumb (gdbarch, regval))
3767 {
3768 bfd_byte *copy = (bfd_byte *) alloca (len);
3769 store_unsigned_integer (copy, len, byte_order,
3770 MAKE_THUMB_ADDR (regval));
3771 val = copy;
3772 }
3773 }
3774
3775 /* Copy the argument to general registers or the stack in
3776 register-sized pieces. Large arguments are split between
3777 registers and stack. */
3778 while (len > 0)
3779 {
3780 int partial_len = len < ARM_INT_REGISTER_SIZE
3781 ? len : ARM_INT_REGISTER_SIZE;
3782 CORE_ADDR regval
3783 = extract_unsigned_integer (val, partial_len, byte_order);
3784
3785 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3786 {
3787 /* The argument is being passed in a general purpose
3788 register. */
3789 if (byte_order == BFD_ENDIAN_BIG)
3790 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3791 if (arm_debug)
3792 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3793 argnum,
3794 gdbarch_register_name
3795 (gdbarch, argreg),
3796 phex (regval, ARM_INT_REGISTER_SIZE));
3797 regcache_cooked_write_unsigned (regcache, argreg, regval);
3798 argreg++;
3799 }
3800 else
3801 {
3802 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3803
3804 memset (buf, 0, sizeof (buf));
3805 store_unsigned_integer (buf, partial_len, byte_order, regval);
3806
3807 /* Push the arguments onto the stack. */
3808 if (arm_debug)
3809 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3810 argnum, nstack);
3811 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3812 nstack += ARM_INT_REGISTER_SIZE;
3813 }
3814
3815 len -= partial_len;
3816 val += partial_len;
3817 }
3818 }
3819 /* If we have an odd number of words to push, then decrement the stack
3820 by one word now, so first stack argument will be dword aligned. */
3821 if (nstack & 4)
3822 sp -= 4;
3823
3824 while (si)
3825 {
3826 sp -= si->len;
3827 write_memory (sp, si->data, si->len);
3828 si = pop_stack_item (si);
3829 }
3830
3831 /* Finally, update teh SP register. */
3832 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3833
3834 return sp;
3835 }
3836
3837
3838 /* Always align the frame to an 8-byte boundary. This is required on
3839 some platforms and harmless on the rest. */
3840
3841 static CORE_ADDR
3842 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3843 {
3844 /* Align the stack to eight bytes. */
3845 return sp & ~ (CORE_ADDR) 7;
3846 }
3847
3848 static void
3849 print_fpu_flags (struct ui_file *file, int flags)
3850 {
3851 if (flags & (1 << 0))
3852 fputs_filtered ("IVO ", file);
3853 if (flags & (1 << 1))
3854 fputs_filtered ("DVZ ", file);
3855 if (flags & (1 << 2))
3856 fputs_filtered ("OFL ", file);
3857 if (flags & (1 << 3))
3858 fputs_filtered ("UFL ", file);
3859 if (flags & (1 << 4))
3860 fputs_filtered ("INX ", file);
3861 fputc_filtered ('\n', file);
3862 }
3863
3864 /* Print interesting information about the floating point processor
3865 (if present) or emulator. */
3866 static void
3867 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3868 struct frame_info *frame, const char *args)
3869 {
3870 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3871 int type;
3872
3873 type = (status >> 24) & 127;
3874 if (status & (1 << 31))
3875 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3876 else
3877 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3878 /* i18n: [floating point unit] mask */
3879 fputs_filtered (_("mask: "), file);
3880 print_fpu_flags (file, status >> 16);
3881 /* i18n: [floating point unit] flags */
3882 fputs_filtered (_("flags: "), file);
3883 print_fpu_flags (file, status);
3884 }
3885
3886 /* Construct the ARM extended floating point type. */
3887 static struct type *
3888 arm_ext_type (struct gdbarch *gdbarch)
3889 {
3890 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3891
3892 if (!tdep->arm_ext_type)
3893 tdep->arm_ext_type
3894 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3895 floatformats_arm_ext);
3896
3897 return tdep->arm_ext_type;
3898 }
3899
3900 static struct type *
3901 arm_neon_double_type (struct gdbarch *gdbarch)
3902 {
3903 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3904
3905 if (tdep->neon_double_type == NULL)
3906 {
3907 struct type *t, *elem;
3908
3909 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3910 TYPE_CODE_UNION);
3911 elem = builtin_type (gdbarch)->builtin_uint8;
3912 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3913 elem = builtin_type (gdbarch)->builtin_uint16;
3914 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3915 elem = builtin_type (gdbarch)->builtin_uint32;
3916 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3917 elem = builtin_type (gdbarch)->builtin_uint64;
3918 append_composite_type_field (t, "u64", elem);
3919 elem = builtin_type (gdbarch)->builtin_float;
3920 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3921 elem = builtin_type (gdbarch)->builtin_double;
3922 append_composite_type_field (t, "f64", elem);
3923
3924 TYPE_VECTOR (t) = 1;
3925 TYPE_NAME (t) = "neon_d";
3926 tdep->neon_double_type = t;
3927 }
3928
3929 return tdep->neon_double_type;
3930 }
3931
3932 /* FIXME: The vector types are not correctly ordered on big-endian
3933 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3934 bits of d0 - regardless of what unit size is being held in d0. So
3935 the offset of the first uint8 in d0 is 7, but the offset of the
3936 first float is 4. This code works as-is for little-endian
3937 targets. */
3938
3939 static struct type *
3940 arm_neon_quad_type (struct gdbarch *gdbarch)
3941 {
3942 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3943
3944 if (tdep->neon_quad_type == NULL)
3945 {
3946 struct type *t, *elem;
3947
3948 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3949 TYPE_CODE_UNION);
3950 elem = builtin_type (gdbarch)->builtin_uint8;
3951 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3952 elem = builtin_type (gdbarch)->builtin_uint16;
3953 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3954 elem = builtin_type (gdbarch)->builtin_uint32;
3955 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3956 elem = builtin_type (gdbarch)->builtin_uint64;
3957 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3958 elem = builtin_type (gdbarch)->builtin_float;
3959 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3960 elem = builtin_type (gdbarch)->builtin_double;
3961 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3962
3963 TYPE_VECTOR (t) = 1;
3964 TYPE_NAME (t) = "neon_q";
3965 tdep->neon_quad_type = t;
3966 }
3967
3968 return tdep->neon_quad_type;
3969 }
3970
3971 /* Return the GDB type object for the "standard" data type of data in
3972 register N. */
3973
3974 static struct type *
3975 arm_register_type (struct gdbarch *gdbarch, int regnum)
3976 {
3977 int num_regs = gdbarch_num_regs (gdbarch);
3978
3979 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3980 && regnum >= num_regs && regnum < num_regs + 32)
3981 return builtin_type (gdbarch)->builtin_float;
3982
3983 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3984 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3985 return arm_neon_quad_type (gdbarch);
3986
3987 /* If the target description has register information, we are only
3988 in this function so that we can override the types of
3989 double-precision registers for NEON. */
3990 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3991 {
3992 struct type *t = tdesc_register_type (gdbarch, regnum);
3993
3994 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3995 && TYPE_CODE (t) == TYPE_CODE_FLT
3996 && gdbarch_tdep (gdbarch)->have_neon)
3997 return arm_neon_double_type (gdbarch);
3998 else
3999 return t;
4000 }
4001
4002 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4003 {
4004 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4005 return builtin_type (gdbarch)->builtin_void;
4006
4007 return arm_ext_type (gdbarch);
4008 }
4009 else if (regnum == ARM_SP_REGNUM)
4010 return builtin_type (gdbarch)->builtin_data_ptr;
4011 else if (regnum == ARM_PC_REGNUM)
4012 return builtin_type (gdbarch)->builtin_func_ptr;
4013 else if (regnum >= ARRAY_SIZE (arm_register_names))
4014 /* These registers are only supported on targets which supply
4015 an XML description. */
4016 return builtin_type (gdbarch)->builtin_int0;
4017 else
4018 return builtin_type (gdbarch)->builtin_uint32;
4019 }
4020
4021 /* Map a DWARF register REGNUM onto the appropriate GDB register
4022 number. */
4023
4024 static int
4025 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4026 {
4027 /* Core integer regs. */
4028 if (reg >= 0 && reg <= 15)
4029 return reg;
4030
4031 /* Legacy FPA encoding. These were once used in a way which
4032 overlapped with VFP register numbering, so their use is
4033 discouraged, but GDB doesn't support the ARM toolchain
4034 which used them for VFP. */
4035 if (reg >= 16 && reg <= 23)
4036 return ARM_F0_REGNUM + reg - 16;
4037
4038 /* New assignments for the FPA registers. */
4039 if (reg >= 96 && reg <= 103)
4040 return ARM_F0_REGNUM + reg - 96;
4041
4042 /* WMMX register assignments. */
4043 if (reg >= 104 && reg <= 111)
4044 return ARM_WCGR0_REGNUM + reg - 104;
4045
4046 if (reg >= 112 && reg <= 127)
4047 return ARM_WR0_REGNUM + reg - 112;
4048
4049 if (reg >= 192 && reg <= 199)
4050 return ARM_WC0_REGNUM + reg - 192;
4051
4052 /* VFP v2 registers. A double precision value is actually
4053 in d1 rather than s2, but the ABI only defines numbering
4054 for the single precision registers. This will "just work"
4055 in GDB for little endian targets (we'll read eight bytes,
4056 starting in s0 and then progressing to s1), but will be
4057 reversed on big endian targets with VFP. This won't
4058 be a problem for the new Neon quad registers; you're supposed
4059 to use DW_OP_piece for those. */
4060 if (reg >= 64 && reg <= 95)
4061 {
4062 char name_buf[4];
4063
4064 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4065 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4066 strlen (name_buf));
4067 }
4068
4069 /* VFP v3 / Neon registers. This range is also used for VFP v2
4070 registers, except that it now describes d0 instead of s0. */
4071 if (reg >= 256 && reg <= 287)
4072 {
4073 char name_buf[4];
4074
4075 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4076 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4077 strlen (name_buf));
4078 }
4079
4080 return -1;
4081 }
4082
4083 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4084 static int
4085 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4086 {
4087 int reg = regnum;
4088 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4089
4090 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4091 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4092
4093 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4094 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4095
4096 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4097 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4098
4099 if (reg < NUM_GREGS)
4100 return SIM_ARM_R0_REGNUM + reg;
4101 reg -= NUM_GREGS;
4102
4103 if (reg < NUM_FREGS)
4104 return SIM_ARM_FP0_REGNUM + reg;
4105 reg -= NUM_FREGS;
4106
4107 if (reg < NUM_SREGS)
4108 return SIM_ARM_FPS_REGNUM + reg;
4109 reg -= NUM_SREGS;
4110
4111 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4112 }
4113
4114 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4115 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4116 NULL if an error occurs. BUF is freed. */
4117
4118 static gdb_byte *
4119 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4120 int old_len, int new_len)
4121 {
4122 gdb_byte *new_buf;
4123 int bytes_to_read = new_len - old_len;
4124
4125 new_buf = (gdb_byte *) xmalloc (new_len);
4126 memcpy (new_buf + bytes_to_read, buf, old_len);
4127 xfree (buf);
4128 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4129 {
4130 xfree (new_buf);
4131 return NULL;
4132 }
4133 return new_buf;
4134 }
4135
4136 /* An IT block is at most the 2-byte IT instruction followed by
4137 four 4-byte instructions. The furthest back we must search to
4138 find an IT block that affects the current instruction is thus
4139 2 + 3 * 4 == 14 bytes. */
4140 #define MAX_IT_BLOCK_PREFIX 14
4141
4142 /* Use a quick scan if there are more than this many bytes of
4143 code. */
4144 #define IT_SCAN_THRESHOLD 32
4145
4146 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4147 A breakpoint in an IT block may not be hit, depending on the
4148 condition flags. */
4149 static CORE_ADDR
4150 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4151 {
4152 gdb_byte *buf;
4153 char map_type;
4154 CORE_ADDR boundary, func_start;
4155 int buf_len;
4156 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4157 int i, any, last_it, last_it_count;
4158
4159 /* If we are using BKPT breakpoints, none of this is necessary. */
4160 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4161 return bpaddr;
4162
4163 /* ARM mode does not have this problem. */
4164 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4165 return bpaddr;
4166
4167 /* We are setting a breakpoint in Thumb code that could potentially
4168 contain an IT block. The first step is to find how much Thumb
4169 code there is; we do not need to read outside of known Thumb
4170 sequences. */
4171 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4172 if (map_type == 0)
4173 /* Thumb-2 code must have mapping symbols to have a chance. */
4174 return bpaddr;
4175
4176 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4177
4178 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4179 && func_start > boundary)
4180 boundary = func_start;
4181
4182 /* Search for a candidate IT instruction. We have to do some fancy
4183 footwork to distinguish a real IT instruction from the second
4184 half of a 32-bit instruction, but there is no need for that if
4185 there's no candidate. */
4186 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4187 if (buf_len == 0)
4188 /* No room for an IT instruction. */
4189 return bpaddr;
4190
4191 buf = (gdb_byte *) xmalloc (buf_len);
4192 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4193 return bpaddr;
4194 any = 0;
4195 for (i = 0; i < buf_len; i += 2)
4196 {
4197 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4198 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4199 {
4200 any = 1;
4201 break;
4202 }
4203 }
4204
4205 if (any == 0)
4206 {
4207 xfree (buf);
4208 return bpaddr;
4209 }
4210
4211 /* OK, the code bytes before this instruction contain at least one
4212 halfword which resembles an IT instruction. We know that it's
4213 Thumb code, but there are still two possibilities. Either the
4214 halfword really is an IT instruction, or it is the second half of
4215 a 32-bit Thumb instruction. The only way we can tell is to
4216 scan forwards from a known instruction boundary. */
4217 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4218 {
4219 int definite;
4220
4221 /* There's a lot of code before this instruction. Start with an
4222 optimistic search; it's easy to recognize halfwords that can
4223 not be the start of a 32-bit instruction, and use that to
4224 lock on to the instruction boundaries. */
4225 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4226 if (buf == NULL)
4227 return bpaddr;
4228 buf_len = IT_SCAN_THRESHOLD;
4229
4230 definite = 0;
4231 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4232 {
4233 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4234 if (thumb_insn_size (inst1) == 2)
4235 {
4236 definite = 1;
4237 break;
4238 }
4239 }
4240
4241 /* At this point, if DEFINITE, BUF[I] is the first place we
4242 are sure that we know the instruction boundaries, and it is far
4243 enough from BPADDR that we could not miss an IT instruction
4244 affecting BPADDR. If ! DEFINITE, give up - start from a
4245 known boundary. */
4246 if (! definite)
4247 {
4248 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4249 bpaddr - boundary);
4250 if (buf == NULL)
4251 return bpaddr;
4252 buf_len = bpaddr - boundary;
4253 i = 0;
4254 }
4255 }
4256 else
4257 {
4258 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4259 if (buf == NULL)
4260 return bpaddr;
4261 buf_len = bpaddr - boundary;
4262 i = 0;
4263 }
4264
4265 /* Scan forwards. Find the last IT instruction before BPADDR. */
4266 last_it = -1;
4267 last_it_count = 0;
4268 while (i < buf_len)
4269 {
4270 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4271 last_it_count--;
4272 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4273 {
4274 last_it = i;
4275 if (inst1 & 0x0001)
4276 last_it_count = 4;
4277 else if (inst1 & 0x0002)
4278 last_it_count = 3;
4279 else if (inst1 & 0x0004)
4280 last_it_count = 2;
4281 else
4282 last_it_count = 1;
4283 }
4284 i += thumb_insn_size (inst1);
4285 }
4286
4287 xfree (buf);
4288
4289 if (last_it == -1)
4290 /* There wasn't really an IT instruction after all. */
4291 return bpaddr;
4292
4293 if (last_it_count < 1)
4294 /* It was too far away. */
4295 return bpaddr;
4296
4297 /* This really is a trouble spot. Move the breakpoint to the IT
4298 instruction. */
4299 return bpaddr - buf_len + last_it;
4300 }
4301
4302 /* ARM displaced stepping support.
4303
4304 Generally ARM displaced stepping works as follows:
4305
4306 1. When an instruction is to be single-stepped, it is first decoded by
4307 arm_process_displaced_insn. Depending on the type of instruction, it is
4308 then copied to a scratch location, possibly in a modified form. The
4309 copy_* set of functions performs such modification, as necessary. A
4310 breakpoint is placed after the modified instruction in the scratch space
4311 to return control to GDB. Note in particular that instructions which
4312 modify the PC will no longer do so after modification.
4313
4314 2. The instruction is single-stepped, by setting the PC to the scratch
4315 location address, and resuming. Control returns to GDB when the
4316 breakpoint is hit.
4317
4318 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4319 function used for the current instruction. This function's job is to
4320 put the CPU/memory state back to what it would have been if the
4321 instruction had been executed unmodified in its original location. */
4322
4323 /* NOP instruction (mov r0, r0). */
4324 #define ARM_NOP 0xe1a00000
4325 #define THUMB_NOP 0x4600
4326
4327 /* Helper for register reads for displaced stepping. In particular, this
4328 returns the PC as it would be seen by the instruction at its original
4329 location. */
4330
4331 ULONGEST
4332 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4333 int regno)
4334 {
4335 ULONGEST ret;
4336 CORE_ADDR from = dsc->insn_addr;
4337
4338 if (regno == ARM_PC_REGNUM)
4339 {
4340 /* Compute pipeline offset:
4341 - When executing an ARM instruction, PC reads as the address of the
4342 current instruction plus 8.
4343 - When executing a Thumb instruction, PC reads as the address of the
4344 current instruction plus 4. */
4345
4346 if (!dsc->is_thumb)
4347 from += 8;
4348 else
4349 from += 4;
4350
4351 if (debug_displaced)
4352 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4353 (unsigned long) from);
4354 return (ULONGEST) from;
4355 }
4356 else
4357 {
4358 regcache_cooked_read_unsigned (regs, regno, &ret);
4359 if (debug_displaced)
4360 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4361 regno, (unsigned long) ret);
4362 return ret;
4363 }
4364 }
4365
4366 static int
4367 displaced_in_arm_mode (struct regcache *regs)
4368 {
4369 ULONGEST ps;
4370 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4371
4372 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4373
4374 return (ps & t_bit) == 0;
4375 }
4376
4377 /* Write to the PC as from a branch instruction. */
4378
4379 static void
4380 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4381 ULONGEST val)
4382 {
4383 if (!dsc->is_thumb)
4384 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4385 architecture versions < 6. */
4386 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4387 val & ~(ULONGEST) 0x3);
4388 else
4389 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4390 val & ~(ULONGEST) 0x1);
4391 }
4392
4393 /* Write to the PC as from a branch-exchange instruction. */
4394
4395 static void
4396 bx_write_pc (struct regcache *regs, ULONGEST val)
4397 {
4398 ULONGEST ps;
4399 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4400
4401 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4402
4403 if ((val & 1) == 1)
4404 {
4405 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4406 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4407 }
4408 else if ((val & 2) == 0)
4409 {
4410 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4411 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4412 }
4413 else
4414 {
4415 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4416 mode, align dest to 4 bytes). */
4417 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4418 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4419 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4420 }
4421 }
4422
4423 /* Write to the PC as if from a load instruction. */
4424
4425 static void
4426 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4427 ULONGEST val)
4428 {
4429 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4430 bx_write_pc (regs, val);
4431 else
4432 branch_write_pc (regs, dsc, val);
4433 }
4434
4435 /* Write to the PC as if from an ALU instruction. */
4436
4437 static void
4438 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4439 ULONGEST val)
4440 {
4441 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4442 bx_write_pc (regs, val);
4443 else
4444 branch_write_pc (regs, dsc, val);
4445 }
4446
4447 /* Helper for writing to registers for displaced stepping. Writing to the PC
4448 has a varying effects depending on the instruction which does the write:
4449 this is controlled by the WRITE_PC argument. */
4450
4451 void
4452 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4453 int regno, ULONGEST val, enum pc_write_style write_pc)
4454 {
4455 if (regno == ARM_PC_REGNUM)
4456 {
4457 if (debug_displaced)
4458 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4459 (unsigned long) val);
4460 switch (write_pc)
4461 {
4462 case BRANCH_WRITE_PC:
4463 branch_write_pc (regs, dsc, val);
4464 break;
4465
4466 case BX_WRITE_PC:
4467 bx_write_pc (regs, val);
4468 break;
4469
4470 case LOAD_WRITE_PC:
4471 load_write_pc (regs, dsc, val);
4472 break;
4473
4474 case ALU_WRITE_PC:
4475 alu_write_pc (regs, dsc, val);
4476 break;
4477
4478 case CANNOT_WRITE_PC:
4479 warning (_("Instruction wrote to PC in an unexpected way when "
4480 "single-stepping"));
4481 break;
4482
4483 default:
4484 internal_error (__FILE__, __LINE__,
4485 _("Invalid argument to displaced_write_reg"));
4486 }
4487
4488 dsc->wrote_to_pc = 1;
4489 }
4490 else
4491 {
4492 if (debug_displaced)
4493 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4494 regno, (unsigned long) val);
4495 regcache_cooked_write_unsigned (regs, regno, val);
4496 }
4497 }
4498
4499 /* This function is used to concisely determine if an instruction INSN
4500 references PC. Register fields of interest in INSN should have the
4501 corresponding fields of BITMASK set to 0b1111. The function
4502 returns return 1 if any of these fields in INSN reference the PC
4503 (also 0b1111, r15), else it returns 0. */
4504
4505 static int
4506 insn_references_pc (uint32_t insn, uint32_t bitmask)
4507 {
4508 uint32_t lowbit = 1;
4509
4510 while (bitmask != 0)
4511 {
4512 uint32_t mask;
4513
4514 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4515 ;
4516
4517 if (!lowbit)
4518 break;
4519
4520 mask = lowbit * 0xf;
4521
4522 if ((insn & mask) == mask)
4523 return 1;
4524
4525 bitmask &= ~mask;
4526 }
4527
4528 return 0;
4529 }
4530
4531 /* The simplest copy function. Many instructions have the same effect no
4532 matter what address they are executed at: in those cases, use this. */
4533
4534 static int
4535 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4536 const char *iname, arm_displaced_step_closure *dsc)
4537 {
4538 if (debug_displaced)
4539 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4540 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4541 iname);
4542
4543 dsc->modinsn[0] = insn;
4544
4545 return 0;
4546 }
4547
4548 static int
4549 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4550 uint16_t insn2, const char *iname,
4551 arm_displaced_step_closure *dsc)
4552 {
4553 if (debug_displaced)
4554 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4555 "opcode/class '%s' unmodified\n", insn1, insn2,
4556 iname);
4557
4558 dsc->modinsn[0] = insn1;
4559 dsc->modinsn[1] = insn2;
4560 dsc->numinsns = 2;
4561
4562 return 0;
4563 }
4564
4565 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4566 modification. */
4567 static int
4568 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4569 const char *iname,
4570 arm_displaced_step_closure *dsc)
4571 {
4572 if (debug_displaced)
4573 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4574 "opcode/class '%s' unmodified\n", insn,
4575 iname);
4576
4577 dsc->modinsn[0] = insn;
4578
4579 return 0;
4580 }
4581
4582 /* Preload instructions with immediate offset. */
4583
4584 static void
4585 cleanup_preload (struct gdbarch *gdbarch,
4586 struct regcache *regs, arm_displaced_step_closure *dsc)
4587 {
4588 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4589 if (!dsc->u.preload.immed)
4590 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4591 }
4592
4593 static void
4594 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4595 arm_displaced_step_closure *dsc, unsigned int rn)
4596 {
4597 ULONGEST rn_val;
4598 /* Preload instructions:
4599
4600 {pli/pld} [rn, #+/-imm]
4601 ->
4602 {pli/pld} [r0, #+/-imm]. */
4603
4604 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4605 rn_val = displaced_read_reg (regs, dsc, rn);
4606 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4607 dsc->u.preload.immed = 1;
4608
4609 dsc->cleanup = &cleanup_preload;
4610 }
4611
4612 static int
4613 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4614 arm_displaced_step_closure *dsc)
4615 {
4616 unsigned int rn = bits (insn, 16, 19);
4617
4618 if (!insn_references_pc (insn, 0x000f0000ul))
4619 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4620
4621 if (debug_displaced)
4622 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4623 (unsigned long) insn);
4624
4625 dsc->modinsn[0] = insn & 0xfff0ffff;
4626
4627 install_preload (gdbarch, regs, dsc, rn);
4628
4629 return 0;
4630 }
4631
4632 static int
4633 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4634 struct regcache *regs, arm_displaced_step_closure *dsc)
4635 {
4636 unsigned int rn = bits (insn1, 0, 3);
4637 unsigned int u_bit = bit (insn1, 7);
4638 int imm12 = bits (insn2, 0, 11);
4639 ULONGEST pc_val;
4640
4641 if (rn != ARM_PC_REGNUM)
4642 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4643
4644 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4645 PLD (literal) Encoding T1. */
4646 if (debug_displaced)
4647 fprintf_unfiltered (gdb_stdlog,
4648 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4649 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4650 imm12);
4651
4652 if (!u_bit)
4653 imm12 = -1 * imm12;
4654
4655 /* Rewrite instruction {pli/pld} PC imm12 into:
4656 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4657
4658 {pli/pld} [r0, r1]
4659
4660 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4661
4662 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4663 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4664
4665 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4666
4667 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4668 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4669 dsc->u.preload.immed = 0;
4670
4671 /* {pli/pld} [r0, r1] */
4672 dsc->modinsn[0] = insn1 & 0xfff0;
4673 dsc->modinsn[1] = 0xf001;
4674 dsc->numinsns = 2;
4675
4676 dsc->cleanup = &cleanup_preload;
4677 return 0;
4678 }
4679
4680 /* Preload instructions with register offset. */
4681
4682 static void
4683 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4684 arm_displaced_step_closure *dsc, unsigned int rn,
4685 unsigned int rm)
4686 {
4687 ULONGEST rn_val, rm_val;
4688
4689 /* Preload register-offset instructions:
4690
4691 {pli/pld} [rn, rm {, shift}]
4692 ->
4693 {pli/pld} [r0, r1 {, shift}]. */
4694
4695 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4696 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4697 rn_val = displaced_read_reg (regs, dsc, rn);
4698 rm_val = displaced_read_reg (regs, dsc, rm);
4699 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4700 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4701 dsc->u.preload.immed = 0;
4702
4703 dsc->cleanup = &cleanup_preload;
4704 }
4705
4706 static int
4707 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4708 struct regcache *regs,
4709 arm_displaced_step_closure *dsc)
4710 {
4711 unsigned int rn = bits (insn, 16, 19);
4712 unsigned int rm = bits (insn, 0, 3);
4713
4714
4715 if (!insn_references_pc (insn, 0x000f000ful))
4716 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4717
4718 if (debug_displaced)
4719 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4720 (unsigned long) insn);
4721
4722 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4723
4724 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4725 return 0;
4726 }
4727
4728 /* Copy/cleanup coprocessor load and store instructions. */
4729
4730 static void
4731 cleanup_copro_load_store (struct gdbarch *gdbarch,
4732 struct regcache *regs,
4733 arm_displaced_step_closure *dsc)
4734 {
4735 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4736
4737 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4738
4739 if (dsc->u.ldst.writeback)
4740 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4741 }
4742
4743 static void
4744 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4745 arm_displaced_step_closure *dsc,
4746 int writeback, unsigned int rn)
4747 {
4748 ULONGEST rn_val;
4749
4750 /* Coprocessor load/store instructions:
4751
4752 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4753 ->
4754 {stc/stc2} [r0, #+/-imm].
4755
4756 ldc/ldc2 are handled identically. */
4757
4758 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4759 rn_val = displaced_read_reg (regs, dsc, rn);
4760 /* PC should be 4-byte aligned. */
4761 rn_val = rn_val & 0xfffffffc;
4762 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4763
4764 dsc->u.ldst.writeback = writeback;
4765 dsc->u.ldst.rn = rn;
4766
4767 dsc->cleanup = &cleanup_copro_load_store;
4768 }
4769
4770 static int
4771 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4772 struct regcache *regs,
4773 arm_displaced_step_closure *dsc)
4774 {
4775 unsigned int rn = bits (insn, 16, 19);
4776
4777 if (!insn_references_pc (insn, 0x000f0000ul))
4778 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4779
4780 if (debug_displaced)
4781 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4782 "load/store insn %.8lx\n", (unsigned long) insn);
4783
4784 dsc->modinsn[0] = insn & 0xfff0ffff;
4785
4786 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4787
4788 return 0;
4789 }
4790
4791 static int
4792 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4793 uint16_t insn2, struct regcache *regs,
4794 arm_displaced_step_closure *dsc)
4795 {
4796 unsigned int rn = bits (insn1, 0, 3);
4797
4798 if (rn != ARM_PC_REGNUM)
4799 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4800 "copro load/store", dsc);
4801
4802 if (debug_displaced)
4803 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4804 "load/store insn %.4x%.4x\n", insn1, insn2);
4805
4806 dsc->modinsn[0] = insn1 & 0xfff0;
4807 dsc->modinsn[1] = insn2;
4808 dsc->numinsns = 2;
4809
4810 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4811 doesn't support writeback, so pass 0. */
4812 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4813
4814 return 0;
4815 }
4816
4817 /* Clean up branch instructions (actually perform the branch, by setting
4818 PC). */
4819
4820 static void
4821 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4822 arm_displaced_step_closure *dsc)
4823 {
4824 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4825 int branch_taken = condition_true (dsc->u.branch.cond, status);
4826 enum pc_write_style write_pc = dsc->u.branch.exchange
4827 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4828
4829 if (!branch_taken)
4830 return;
4831
4832 if (dsc->u.branch.link)
4833 {
4834 /* The value of LR should be the next insn of current one. In order
4835 not to confuse logic hanlding later insn `bx lr', if current insn mode
4836 is Thumb, the bit 0 of LR value should be set to 1. */
4837 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4838
4839 if (dsc->is_thumb)
4840 next_insn_addr |= 0x1;
4841
4842 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4843 CANNOT_WRITE_PC);
4844 }
4845
4846 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4847 }
4848
4849 /* Copy B/BL/BLX instructions with immediate destinations. */
4850
4851 static void
4852 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4853 arm_displaced_step_closure *dsc,
4854 unsigned int cond, int exchange, int link, long offset)
4855 {
4856 /* Implement "BL<cond> <label>" as:
4857
4858 Preparation: cond <- instruction condition
4859 Insn: mov r0, r0 (nop)
4860 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4861
4862 B<cond> similar, but don't set r14 in cleanup. */
4863
4864 dsc->u.branch.cond = cond;
4865 dsc->u.branch.link = link;
4866 dsc->u.branch.exchange = exchange;
4867
4868 dsc->u.branch.dest = dsc->insn_addr;
4869 if (link && exchange)
4870 /* For BLX, offset is computed from the Align (PC, 4). */
4871 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4872
4873 if (dsc->is_thumb)
4874 dsc->u.branch.dest += 4 + offset;
4875 else
4876 dsc->u.branch.dest += 8 + offset;
4877
4878 dsc->cleanup = &cleanup_branch;
4879 }
4880 static int
4881 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4882 struct regcache *regs, arm_displaced_step_closure *dsc)
4883 {
4884 unsigned int cond = bits (insn, 28, 31);
4885 int exchange = (cond == 0xf);
4886 int link = exchange || bit (insn, 24);
4887 long offset;
4888
4889 if (debug_displaced)
4890 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4891 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4892 (unsigned long) insn);
4893 if (exchange)
4894 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4895 then arrange the switch into Thumb mode. */
4896 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4897 else
4898 offset = bits (insn, 0, 23) << 2;
4899
4900 if (bit (offset, 25))
4901 offset = offset | ~0x3ffffff;
4902
4903 dsc->modinsn[0] = ARM_NOP;
4904
4905 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4906 return 0;
4907 }
4908
4909 static int
4910 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4911 uint16_t insn2, struct regcache *regs,
4912 arm_displaced_step_closure *dsc)
4913 {
4914 int link = bit (insn2, 14);
4915 int exchange = link && !bit (insn2, 12);
4916 int cond = INST_AL;
4917 long offset = 0;
4918 int j1 = bit (insn2, 13);
4919 int j2 = bit (insn2, 11);
4920 int s = sbits (insn1, 10, 10);
4921 int i1 = !(j1 ^ bit (insn1, 10));
4922 int i2 = !(j2 ^ bit (insn1, 10));
4923
4924 if (!link && !exchange) /* B */
4925 {
4926 offset = (bits (insn2, 0, 10) << 1);
4927 if (bit (insn2, 12)) /* Encoding T4 */
4928 {
4929 offset |= (bits (insn1, 0, 9) << 12)
4930 | (i2 << 22)
4931 | (i1 << 23)
4932 | (s << 24);
4933 cond = INST_AL;
4934 }
4935 else /* Encoding T3 */
4936 {
4937 offset |= (bits (insn1, 0, 5) << 12)
4938 | (j1 << 18)
4939 | (j2 << 19)
4940 | (s << 20);
4941 cond = bits (insn1, 6, 9);
4942 }
4943 }
4944 else
4945 {
4946 offset = (bits (insn1, 0, 9) << 12);
4947 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4948 offset |= exchange ?
4949 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4950 }
4951
4952 if (debug_displaced)
4953 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4954 "%.4x %.4x with offset %.8lx\n",
4955 link ? (exchange) ? "blx" : "bl" : "b",
4956 insn1, insn2, offset);
4957
4958 dsc->modinsn[0] = THUMB_NOP;
4959
4960 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4961 return 0;
4962 }
4963
4964 /* Copy B Thumb instructions. */
4965 static int
4966 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
4967 arm_displaced_step_closure *dsc)
4968 {
4969 unsigned int cond = 0;
4970 int offset = 0;
4971 unsigned short bit_12_15 = bits (insn, 12, 15);
4972 CORE_ADDR from = dsc->insn_addr;
4973
4974 if (bit_12_15 == 0xd)
4975 {
4976 /* offset = SignExtend (imm8:0, 32) */
4977 offset = sbits ((insn << 1), 0, 8);
4978 cond = bits (insn, 8, 11);
4979 }
4980 else if (bit_12_15 == 0xe) /* Encoding T2 */
4981 {
4982 offset = sbits ((insn << 1), 0, 11);
4983 cond = INST_AL;
4984 }
4985
4986 if (debug_displaced)
4987 fprintf_unfiltered (gdb_stdlog,
4988 "displaced: copying b immediate insn %.4x "
4989 "with offset %d\n", insn, offset);
4990
4991 dsc->u.branch.cond = cond;
4992 dsc->u.branch.link = 0;
4993 dsc->u.branch.exchange = 0;
4994 dsc->u.branch.dest = from + 4 + offset;
4995
4996 dsc->modinsn[0] = THUMB_NOP;
4997
4998 dsc->cleanup = &cleanup_branch;
4999
5000 return 0;
5001 }
5002
5003 /* Copy BX/BLX with register-specified destinations. */
5004
5005 static void
5006 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5007 arm_displaced_step_closure *dsc, int link,
5008 unsigned int cond, unsigned int rm)
5009 {
5010 /* Implement {BX,BLX}<cond> <reg>" as:
5011
5012 Preparation: cond <- instruction condition
5013 Insn: mov r0, r0 (nop)
5014 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5015
5016 Don't set r14 in cleanup for BX. */
5017
5018 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5019
5020 dsc->u.branch.cond = cond;
5021 dsc->u.branch.link = link;
5022
5023 dsc->u.branch.exchange = 1;
5024
5025 dsc->cleanup = &cleanup_branch;
5026 }
5027
5028 static int
5029 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5030 struct regcache *regs, arm_displaced_step_closure *dsc)
5031 {
5032 unsigned int cond = bits (insn, 28, 31);
5033 /* BX: x12xxx1x
5034 BLX: x12xxx3x. */
5035 int link = bit (insn, 5);
5036 unsigned int rm = bits (insn, 0, 3);
5037
5038 if (debug_displaced)
5039 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5040 (unsigned long) insn);
5041
5042 dsc->modinsn[0] = ARM_NOP;
5043
5044 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5045 return 0;
5046 }
5047
5048 static int
5049 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5050 struct regcache *regs,
5051 arm_displaced_step_closure *dsc)
5052 {
5053 int link = bit (insn, 7);
5054 unsigned int rm = bits (insn, 3, 6);
5055
5056 if (debug_displaced)
5057 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5058 (unsigned short) insn);
5059
5060 dsc->modinsn[0] = THUMB_NOP;
5061
5062 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5063
5064 return 0;
5065 }
5066
5067
5068 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5069
5070 static void
5071 cleanup_alu_imm (struct gdbarch *gdbarch,
5072 struct regcache *regs, arm_displaced_step_closure *dsc)
5073 {
5074 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5075 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5076 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5077 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5078 }
5079
5080 static int
5081 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5082 arm_displaced_step_closure *dsc)
5083 {
5084 unsigned int rn = bits (insn, 16, 19);
5085 unsigned int rd = bits (insn, 12, 15);
5086 unsigned int op = bits (insn, 21, 24);
5087 int is_mov = (op == 0xd);
5088 ULONGEST rd_val, rn_val;
5089
5090 if (!insn_references_pc (insn, 0x000ff000ul))
5091 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5092
5093 if (debug_displaced)
5094 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5095 "%.8lx\n", is_mov ? "move" : "ALU",
5096 (unsigned long) insn);
5097
5098 /* Instruction is of form:
5099
5100 <op><cond> rd, [rn,] #imm
5101
5102 Rewrite as:
5103
5104 Preparation: tmp1, tmp2 <- r0, r1;
5105 r0, r1 <- rd, rn
5106 Insn: <op><cond> r0, r1, #imm
5107 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5108 */
5109
5110 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5111 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5112 rn_val = displaced_read_reg (regs, dsc, rn);
5113 rd_val = displaced_read_reg (regs, dsc, rd);
5114 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5115 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5116 dsc->rd = rd;
5117
5118 if (is_mov)
5119 dsc->modinsn[0] = insn & 0xfff00fff;
5120 else
5121 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5122
5123 dsc->cleanup = &cleanup_alu_imm;
5124
5125 return 0;
5126 }
5127
5128 static int
5129 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5130 uint16_t insn2, struct regcache *regs,
5131 arm_displaced_step_closure *dsc)
5132 {
5133 unsigned int op = bits (insn1, 5, 8);
5134 unsigned int rn, rm, rd;
5135 ULONGEST rd_val, rn_val;
5136
5137 rn = bits (insn1, 0, 3); /* Rn */
5138 rm = bits (insn2, 0, 3); /* Rm */
5139 rd = bits (insn2, 8, 11); /* Rd */
5140
5141 /* This routine is only called for instruction MOV. */
5142 gdb_assert (op == 0x2 && rn == 0xf);
5143
5144 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5145 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5146
5147 if (debug_displaced)
5148 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5149 "ALU", insn1, insn2);
5150
5151 /* Instruction is of form:
5152
5153 <op><cond> rd, [rn,] #imm
5154
5155 Rewrite as:
5156
5157 Preparation: tmp1, tmp2 <- r0, r1;
5158 r0, r1 <- rd, rn
5159 Insn: <op><cond> r0, r1, #imm
5160 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5161 */
5162
5163 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5164 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5165 rn_val = displaced_read_reg (regs, dsc, rn);
5166 rd_val = displaced_read_reg (regs, dsc, rd);
5167 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5168 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5169 dsc->rd = rd;
5170
5171 dsc->modinsn[0] = insn1;
5172 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5173 dsc->numinsns = 2;
5174
5175 dsc->cleanup = &cleanup_alu_imm;
5176
5177 return 0;
5178 }
5179
5180 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5181
5182 static void
5183 cleanup_alu_reg (struct gdbarch *gdbarch,
5184 struct regcache *regs, arm_displaced_step_closure *dsc)
5185 {
5186 ULONGEST rd_val;
5187 int i;
5188
5189 rd_val = displaced_read_reg (regs, dsc, 0);
5190
5191 for (i = 0; i < 3; i++)
5192 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5193
5194 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5195 }
5196
5197 static void
5198 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5199 arm_displaced_step_closure *dsc,
5200 unsigned int rd, unsigned int rn, unsigned int rm)
5201 {
5202 ULONGEST rd_val, rn_val, rm_val;
5203
5204 /* Instruction is of form:
5205
5206 <op><cond> rd, [rn,] rm [, <shift>]
5207
5208 Rewrite as:
5209
5210 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5211 r0, r1, r2 <- rd, rn, rm
5212 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5213 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5214 */
5215
5216 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5217 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5218 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5219 rd_val = displaced_read_reg (regs, dsc, rd);
5220 rn_val = displaced_read_reg (regs, dsc, rn);
5221 rm_val = displaced_read_reg (regs, dsc, rm);
5222 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5223 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5224 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5225 dsc->rd = rd;
5226
5227 dsc->cleanup = &cleanup_alu_reg;
5228 }
5229
5230 static int
5231 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5232 arm_displaced_step_closure *dsc)
5233 {
5234 unsigned int op = bits (insn, 21, 24);
5235 int is_mov = (op == 0xd);
5236
5237 if (!insn_references_pc (insn, 0x000ff00ful))
5238 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5239
5240 if (debug_displaced)
5241 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5242 is_mov ? "move" : "ALU", (unsigned long) insn);
5243
5244 if (is_mov)
5245 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5246 else
5247 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5248
5249 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5250 bits (insn, 0, 3));
5251 return 0;
5252 }
5253
5254 static int
5255 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5256 struct regcache *regs,
5257 arm_displaced_step_closure *dsc)
5258 {
5259 unsigned rm, rd;
5260
5261 rm = bits (insn, 3, 6);
5262 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5263
5264 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5265 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5266
5267 if (debug_displaced)
5268 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5269 (unsigned short) insn);
5270
5271 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5272
5273 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5274
5275 return 0;
5276 }
5277
5278 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5279
5280 static void
5281 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5282 struct regcache *regs,
5283 arm_displaced_step_closure *dsc)
5284 {
5285 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5286 int i;
5287
5288 for (i = 0; i < 4; i++)
5289 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5290
5291 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5292 }
5293
5294 static void
5295 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5296 arm_displaced_step_closure *dsc,
5297 unsigned int rd, unsigned int rn, unsigned int rm,
5298 unsigned rs)
5299 {
5300 int i;
5301 ULONGEST rd_val, rn_val, rm_val, rs_val;
5302
5303 /* Instruction is of form:
5304
5305 <op><cond> rd, [rn,] rm, <shift> rs
5306
5307 Rewrite as:
5308
5309 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5310 r0, r1, r2, r3 <- rd, rn, rm, rs
5311 Insn: <op><cond> r0, r1, r2, <shift> r3
5312 Cleanup: tmp5 <- r0
5313 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5314 rd <- tmp5
5315 */
5316
5317 for (i = 0; i < 4; i++)
5318 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5319
5320 rd_val = displaced_read_reg (regs, dsc, rd);
5321 rn_val = displaced_read_reg (regs, dsc, rn);
5322 rm_val = displaced_read_reg (regs, dsc, rm);
5323 rs_val = displaced_read_reg (regs, dsc, rs);
5324 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5325 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5326 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5327 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5328 dsc->rd = rd;
5329 dsc->cleanup = &cleanup_alu_shifted_reg;
5330 }
5331
5332 static int
5333 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5334 struct regcache *regs,
5335 arm_displaced_step_closure *dsc)
5336 {
5337 unsigned int op = bits (insn, 21, 24);
5338 int is_mov = (op == 0xd);
5339 unsigned int rd, rn, rm, rs;
5340
5341 if (!insn_references_pc (insn, 0x000fff0ful))
5342 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5343
5344 if (debug_displaced)
5345 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5346 "%.8lx\n", is_mov ? "move" : "ALU",
5347 (unsigned long) insn);
5348
5349 rn = bits (insn, 16, 19);
5350 rm = bits (insn, 0, 3);
5351 rs = bits (insn, 8, 11);
5352 rd = bits (insn, 12, 15);
5353
5354 if (is_mov)
5355 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5356 else
5357 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5358
5359 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5360
5361 return 0;
5362 }
5363
5364 /* Clean up load instructions. */
5365
5366 static void
5367 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5368 arm_displaced_step_closure *dsc)
5369 {
5370 ULONGEST rt_val, rt_val2 = 0, rn_val;
5371
5372 rt_val = displaced_read_reg (regs, dsc, 0);
5373 if (dsc->u.ldst.xfersize == 8)
5374 rt_val2 = displaced_read_reg (regs, dsc, 1);
5375 rn_val = displaced_read_reg (regs, dsc, 2);
5376
5377 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5378 if (dsc->u.ldst.xfersize > 4)
5379 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5380 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5381 if (!dsc->u.ldst.immed)
5382 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5383
5384 /* Handle register writeback. */
5385 if (dsc->u.ldst.writeback)
5386 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5387 /* Put result in right place. */
5388 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5389 if (dsc->u.ldst.xfersize == 8)
5390 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5391 }
5392
5393 /* Clean up store instructions. */
5394
5395 static void
5396 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5397 arm_displaced_step_closure *dsc)
5398 {
5399 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5400
5401 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5402 if (dsc->u.ldst.xfersize > 4)
5403 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5404 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5405 if (!dsc->u.ldst.immed)
5406 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5407 if (!dsc->u.ldst.restore_r4)
5408 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5409
5410 /* Writeback. */
5411 if (dsc->u.ldst.writeback)
5412 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5413 }
5414
5415 /* Copy "extra" load/store instructions. These are halfword/doubleword
5416 transfers, which have a different encoding to byte/word transfers. */
5417
5418 static int
5419 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5420 struct regcache *regs, arm_displaced_step_closure *dsc)
5421 {
5422 unsigned int op1 = bits (insn, 20, 24);
5423 unsigned int op2 = bits (insn, 5, 6);
5424 unsigned int rt = bits (insn, 12, 15);
5425 unsigned int rn = bits (insn, 16, 19);
5426 unsigned int rm = bits (insn, 0, 3);
5427 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5428 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5429 int immed = (op1 & 0x4) != 0;
5430 int opcode;
5431 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5432
5433 if (!insn_references_pc (insn, 0x000ff00ful))
5434 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5435
5436 if (debug_displaced)
5437 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5438 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5439 (unsigned long) insn);
5440
5441 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5442
5443 if (opcode < 0)
5444 internal_error (__FILE__, __LINE__,
5445 _("copy_extra_ld_st: instruction decode error"));
5446
5447 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5448 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5449 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5450 if (!immed)
5451 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5452
5453 rt_val = displaced_read_reg (regs, dsc, rt);
5454 if (bytesize[opcode] == 8)
5455 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5456 rn_val = displaced_read_reg (regs, dsc, rn);
5457 if (!immed)
5458 rm_val = displaced_read_reg (regs, dsc, rm);
5459
5460 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5461 if (bytesize[opcode] == 8)
5462 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5463 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5464 if (!immed)
5465 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5466
5467 dsc->rd = rt;
5468 dsc->u.ldst.xfersize = bytesize[opcode];
5469 dsc->u.ldst.rn = rn;
5470 dsc->u.ldst.immed = immed;
5471 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5472 dsc->u.ldst.restore_r4 = 0;
5473
5474 if (immed)
5475 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5476 ->
5477 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5478 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5479 else
5480 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5481 ->
5482 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5483 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5484
5485 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5486
5487 return 0;
5488 }
5489
5490 /* Copy byte/half word/word loads and stores. */
5491
5492 static void
5493 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5494 arm_displaced_step_closure *dsc, int load,
5495 int immed, int writeback, int size, int usermode,
5496 int rt, int rm, int rn)
5497 {
5498 ULONGEST rt_val, rn_val, rm_val = 0;
5499
5500 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5501 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5502 if (!immed)
5503 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5504 if (!load)
5505 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5506
5507 rt_val = displaced_read_reg (regs, dsc, rt);
5508 rn_val = displaced_read_reg (regs, dsc, rn);
5509 if (!immed)
5510 rm_val = displaced_read_reg (regs, dsc, rm);
5511
5512 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5513 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5514 if (!immed)
5515 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5516 dsc->rd = rt;
5517 dsc->u.ldst.xfersize = size;
5518 dsc->u.ldst.rn = rn;
5519 dsc->u.ldst.immed = immed;
5520 dsc->u.ldst.writeback = writeback;
5521
5522 /* To write PC we can do:
5523
5524 Before this sequence of instructions:
5525 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5526 r2 is the Rn value got from dispalced_read_reg.
5527
5528 Insn1: push {pc} Write address of STR instruction + offset on stack
5529 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5530 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5531 = addr(Insn1) + offset - addr(Insn3) - 8
5532 = offset - 16
5533 Insn4: add r4, r4, #8 r4 = offset - 8
5534 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5535 = from + offset
5536 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5537
5538 Otherwise we don't know what value to write for PC, since the offset is
5539 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5540 of this can be found in Section "Saving from r15" in
5541 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5542
5543 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5544 }
5545
5546
5547 static int
5548 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5549 uint16_t insn2, struct regcache *regs,
5550 arm_displaced_step_closure *dsc, int size)
5551 {
5552 unsigned int u_bit = bit (insn1, 7);
5553 unsigned int rt = bits (insn2, 12, 15);
5554 int imm12 = bits (insn2, 0, 11);
5555 ULONGEST pc_val;
5556
5557 if (debug_displaced)
5558 fprintf_unfiltered (gdb_stdlog,
5559 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5560 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5561 imm12);
5562
5563 if (!u_bit)
5564 imm12 = -1 * imm12;
5565
5566 /* Rewrite instruction LDR Rt imm12 into:
5567
5568 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5569
5570 LDR R0, R2, R3,
5571
5572 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5573
5574
5575 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5576 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5577 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5578
5579 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5580
5581 pc_val = pc_val & 0xfffffffc;
5582
5583 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5584 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5585
5586 dsc->rd = rt;
5587
5588 dsc->u.ldst.xfersize = size;
5589 dsc->u.ldst.immed = 0;
5590 dsc->u.ldst.writeback = 0;
5591 dsc->u.ldst.restore_r4 = 0;
5592
5593 /* LDR R0, R2, R3 */
5594 dsc->modinsn[0] = 0xf852;
5595 dsc->modinsn[1] = 0x3;
5596 dsc->numinsns = 2;
5597
5598 dsc->cleanup = &cleanup_load;
5599
5600 return 0;
5601 }
5602
5603 static int
5604 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5605 uint16_t insn2, struct regcache *regs,
5606 arm_displaced_step_closure *dsc,
5607 int writeback, int immed)
5608 {
5609 unsigned int rt = bits (insn2, 12, 15);
5610 unsigned int rn = bits (insn1, 0, 3);
5611 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5612 /* In LDR (register), there is also a register Rm, which is not allowed to
5613 be PC, so we don't have to check it. */
5614
5615 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5616 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5617 dsc);
5618
5619 if (debug_displaced)
5620 fprintf_unfiltered (gdb_stdlog,
5621 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5622 rt, rn, insn1, insn2);
5623
5624 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5625 0, rt, rm, rn);
5626
5627 dsc->u.ldst.restore_r4 = 0;
5628
5629 if (immed)
5630 /* ldr[b]<cond> rt, [rn, #imm], etc.
5631 ->
5632 ldr[b]<cond> r0, [r2, #imm]. */
5633 {
5634 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5635 dsc->modinsn[1] = insn2 & 0x0fff;
5636 }
5637 else
5638 /* ldr[b]<cond> rt, [rn, rm], etc.
5639 ->
5640 ldr[b]<cond> r0, [r2, r3]. */
5641 {
5642 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5643 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5644 }
5645
5646 dsc->numinsns = 2;
5647
5648 return 0;
5649 }
5650
5651
5652 static int
5653 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5654 struct regcache *regs,
5655 arm_displaced_step_closure *dsc,
5656 int load, int size, int usermode)
5657 {
5658 int immed = !bit (insn, 25);
5659 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5660 unsigned int rt = bits (insn, 12, 15);
5661 unsigned int rn = bits (insn, 16, 19);
5662 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5663
5664 if (!insn_references_pc (insn, 0x000ff00ful))
5665 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5666
5667 if (debug_displaced)
5668 fprintf_unfiltered (gdb_stdlog,
5669 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5670 load ? (size == 1 ? "ldrb" : "ldr")
5671 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5672 rt, rn,
5673 (unsigned long) insn);
5674
5675 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5676 usermode, rt, rm, rn);
5677
5678 if (load || rt != ARM_PC_REGNUM)
5679 {
5680 dsc->u.ldst.restore_r4 = 0;
5681
5682 if (immed)
5683 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5684 ->
5685 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5686 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5687 else
5688 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5689 ->
5690 {ldr,str}[b]<cond> r0, [r2, r3]. */
5691 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5692 }
5693 else
5694 {
5695 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5696 dsc->u.ldst.restore_r4 = 1;
5697 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5698 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5699 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5700 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5701 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5702
5703 /* As above. */
5704 if (immed)
5705 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5706 else
5707 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5708
5709 dsc->numinsns = 6;
5710 }
5711
5712 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5713
5714 return 0;
5715 }
5716
5717 /* Cleanup LDM instructions with fully-populated register list. This is an
5718 unfortunate corner case: it's impossible to implement correctly by modifying
5719 the instruction. The issue is as follows: we have an instruction,
5720
5721 ldm rN, {r0-r15}
5722
5723 which we must rewrite to avoid loading PC. A possible solution would be to
5724 do the load in two halves, something like (with suitable cleanup
5725 afterwards):
5726
5727 mov r8, rN
5728 ldm[id][ab] r8!, {r0-r7}
5729 str r7, <temp>
5730 ldm[id][ab] r8, {r7-r14}
5731 <bkpt>
5732
5733 but at present there's no suitable place for <temp>, since the scratch space
5734 is overwritten before the cleanup routine is called. For now, we simply
5735 emulate the instruction. */
5736
5737 static void
5738 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5739 arm_displaced_step_closure *dsc)
5740 {
5741 int inc = dsc->u.block.increment;
5742 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5743 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5744 uint32_t regmask = dsc->u.block.regmask;
5745 int regno = inc ? 0 : 15;
5746 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5747 int exception_return = dsc->u.block.load && dsc->u.block.user
5748 && (regmask & 0x8000) != 0;
5749 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5750 int do_transfer = condition_true (dsc->u.block.cond, status);
5751 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5752
5753 if (!do_transfer)
5754 return;
5755
5756 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5757 sensible we can do here. Complain loudly. */
5758 if (exception_return)
5759 error (_("Cannot single-step exception return"));
5760
5761 /* We don't handle any stores here for now. */
5762 gdb_assert (dsc->u.block.load != 0);
5763
5764 if (debug_displaced)
5765 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5766 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5767 dsc->u.block.increment ? "inc" : "dec",
5768 dsc->u.block.before ? "before" : "after");
5769
5770 while (regmask)
5771 {
5772 uint32_t memword;
5773
5774 if (inc)
5775 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5776 regno++;
5777 else
5778 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5779 regno--;
5780
5781 xfer_addr += bump_before;
5782
5783 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5784 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5785
5786 xfer_addr += bump_after;
5787
5788 regmask &= ~(1 << regno);
5789 }
5790
5791 if (dsc->u.block.writeback)
5792 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5793 CANNOT_WRITE_PC);
5794 }
5795
5796 /* Clean up an STM which included the PC in the register list. */
5797
5798 static void
5799 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5800 arm_displaced_step_closure *dsc)
5801 {
5802 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5803 int store_executed = condition_true (dsc->u.block.cond, status);
5804 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5805 CORE_ADDR stm_insn_addr;
5806 uint32_t pc_val;
5807 long offset;
5808 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5809
5810 /* If condition code fails, there's nothing else to do. */
5811 if (!store_executed)
5812 return;
5813
5814 if (dsc->u.block.increment)
5815 {
5816 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5817
5818 if (dsc->u.block.before)
5819 pc_stored_at += 4;
5820 }
5821 else
5822 {
5823 pc_stored_at = dsc->u.block.xfer_addr;
5824
5825 if (dsc->u.block.before)
5826 pc_stored_at -= 4;
5827 }
5828
5829 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5830 stm_insn_addr = dsc->scratch_base;
5831 offset = pc_val - stm_insn_addr;
5832
5833 if (debug_displaced)
5834 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5835 "STM instruction\n", offset);
5836
5837 /* Rewrite the stored PC to the proper value for the non-displaced original
5838 instruction. */
5839 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5840 dsc->insn_addr + offset);
5841 }
5842
5843 /* Clean up an LDM which includes the PC in the register list. We clumped all
5844 the registers in the transferred list into a contiguous range r0...rX (to
5845 avoid loading PC directly and losing control of the debugged program), so we
5846 must undo that here. */
5847
5848 static void
5849 cleanup_block_load_pc (struct gdbarch *gdbarch,
5850 struct regcache *regs,
5851 arm_displaced_step_closure *dsc)
5852 {
5853 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5854 int load_executed = condition_true (dsc->u.block.cond, status);
5855 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5856 unsigned int regs_loaded = bitcount (mask);
5857 unsigned int num_to_shuffle = regs_loaded, clobbered;
5858
5859 /* The method employed here will fail if the register list is fully populated
5860 (we need to avoid loading PC directly). */
5861 gdb_assert (num_to_shuffle < 16);
5862
5863 if (!load_executed)
5864 return;
5865
5866 clobbered = (1 << num_to_shuffle) - 1;
5867
5868 while (num_to_shuffle > 0)
5869 {
5870 if ((mask & (1 << write_reg)) != 0)
5871 {
5872 unsigned int read_reg = num_to_shuffle - 1;
5873
5874 if (read_reg != write_reg)
5875 {
5876 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5877 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5878 if (debug_displaced)
5879 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5880 "loaded register r%d to r%d\n"), read_reg,
5881 write_reg);
5882 }
5883 else if (debug_displaced)
5884 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5885 "r%d already in the right place\n"),
5886 write_reg);
5887
5888 clobbered &= ~(1 << write_reg);
5889
5890 num_to_shuffle--;
5891 }
5892
5893 write_reg--;
5894 }
5895
5896 /* Restore any registers we scribbled over. */
5897 for (write_reg = 0; clobbered != 0; write_reg++)
5898 {
5899 if ((clobbered & (1 << write_reg)) != 0)
5900 {
5901 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5902 CANNOT_WRITE_PC);
5903 if (debug_displaced)
5904 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5905 "clobbered register r%d\n"), write_reg);
5906 clobbered &= ~(1 << write_reg);
5907 }
5908 }
5909
5910 /* Perform register writeback manually. */
5911 if (dsc->u.block.writeback)
5912 {
5913 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5914
5915 if (dsc->u.block.increment)
5916 new_rn_val += regs_loaded * 4;
5917 else
5918 new_rn_val -= regs_loaded * 4;
5919
5920 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5921 CANNOT_WRITE_PC);
5922 }
5923 }
5924
5925 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5926 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5927
5928 static int
5929 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5930 struct regcache *regs,
5931 arm_displaced_step_closure *dsc)
5932 {
5933 int load = bit (insn, 20);
5934 int user = bit (insn, 22);
5935 int increment = bit (insn, 23);
5936 int before = bit (insn, 24);
5937 int writeback = bit (insn, 21);
5938 int rn = bits (insn, 16, 19);
5939
5940 /* Block transfers which don't mention PC can be run directly
5941 out-of-line. */
5942 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5943 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5944
5945 if (rn == ARM_PC_REGNUM)
5946 {
5947 warning (_("displaced: Unpredictable LDM or STM with "
5948 "base register r15"));
5949 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5950 }
5951
5952 if (debug_displaced)
5953 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5954 "%.8lx\n", (unsigned long) insn);
5955
5956 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5957 dsc->u.block.rn = rn;
5958
5959 dsc->u.block.load = load;
5960 dsc->u.block.user = user;
5961 dsc->u.block.increment = increment;
5962 dsc->u.block.before = before;
5963 dsc->u.block.writeback = writeback;
5964 dsc->u.block.cond = bits (insn, 28, 31);
5965
5966 dsc->u.block.regmask = insn & 0xffff;
5967
5968 if (load)
5969 {
5970 if ((insn & 0xffff) == 0xffff)
5971 {
5972 /* LDM with a fully-populated register list. This case is
5973 particularly tricky. Implement for now by fully emulating the
5974 instruction (which might not behave perfectly in all cases, but
5975 these instructions should be rare enough for that not to matter
5976 too much). */
5977 dsc->modinsn[0] = ARM_NOP;
5978
5979 dsc->cleanup = &cleanup_block_load_all;
5980 }
5981 else
5982 {
5983 /* LDM of a list of registers which includes PC. Implement by
5984 rewriting the list of registers to be transferred into a
5985 contiguous chunk r0...rX before doing the transfer, then shuffling
5986 registers into the correct places in the cleanup routine. */
5987 unsigned int regmask = insn & 0xffff;
5988 unsigned int num_in_list = bitcount (regmask), new_regmask;
5989 unsigned int i;
5990
5991 for (i = 0; i < num_in_list; i++)
5992 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5993
5994 /* Writeback makes things complicated. We need to avoid clobbering
5995 the base register with one of the registers in our modified
5996 register list, but just using a different register can't work in
5997 all cases, e.g.:
5998
5999 ldm r14!, {r0-r13,pc}
6000
6001 which would need to be rewritten as:
6002
6003 ldm rN!, {r0-r14}
6004
6005 but that can't work, because there's no free register for N.
6006
6007 Solve this by turning off the writeback bit, and emulating
6008 writeback manually in the cleanup routine. */
6009
6010 if (writeback)
6011 insn &= ~(1 << 21);
6012
6013 new_regmask = (1 << num_in_list) - 1;
6014
6015 if (debug_displaced)
6016 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6017 "{..., pc}: original reg list %.4x, modified "
6018 "list %.4x\n"), rn, writeback ? "!" : "",
6019 (int) insn & 0xffff, new_regmask);
6020
6021 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6022
6023 dsc->cleanup = &cleanup_block_load_pc;
6024 }
6025 }
6026 else
6027 {
6028 /* STM of a list of registers which includes PC. Run the instruction
6029 as-is, but out of line: this will store the wrong value for the PC,
6030 so we must manually fix up the memory in the cleanup routine.
6031 Doing things this way has the advantage that we can auto-detect
6032 the offset of the PC write (which is architecture-dependent) in
6033 the cleanup routine. */
6034 dsc->modinsn[0] = insn;
6035
6036 dsc->cleanup = &cleanup_block_store_pc;
6037 }
6038
6039 return 0;
6040 }
6041
6042 static int
6043 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6044 struct regcache *regs,
6045 arm_displaced_step_closure *dsc)
6046 {
6047 int rn = bits (insn1, 0, 3);
6048 int load = bit (insn1, 4);
6049 int writeback = bit (insn1, 5);
6050
6051 /* Block transfers which don't mention PC can be run directly
6052 out-of-line. */
6053 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6054 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6055
6056 if (rn == ARM_PC_REGNUM)
6057 {
6058 warning (_("displaced: Unpredictable LDM or STM with "
6059 "base register r15"));
6060 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6061 "unpredictable ldm/stm", dsc);
6062 }
6063
6064 if (debug_displaced)
6065 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6066 "%.4x%.4x\n", insn1, insn2);
6067
6068 /* Clear bit 13, since it should be always zero. */
6069 dsc->u.block.regmask = (insn2 & 0xdfff);
6070 dsc->u.block.rn = rn;
6071
6072 dsc->u.block.load = load;
6073 dsc->u.block.user = 0;
6074 dsc->u.block.increment = bit (insn1, 7);
6075 dsc->u.block.before = bit (insn1, 8);
6076 dsc->u.block.writeback = writeback;
6077 dsc->u.block.cond = INST_AL;
6078 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6079
6080 if (load)
6081 {
6082 if (dsc->u.block.regmask == 0xffff)
6083 {
6084 /* This branch is impossible to happen. */
6085 gdb_assert (0);
6086 }
6087 else
6088 {
6089 unsigned int regmask = dsc->u.block.regmask;
6090 unsigned int num_in_list = bitcount (regmask), new_regmask;
6091 unsigned int i;
6092
6093 for (i = 0; i < num_in_list; i++)
6094 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6095
6096 if (writeback)
6097 insn1 &= ~(1 << 5);
6098
6099 new_regmask = (1 << num_in_list) - 1;
6100
6101 if (debug_displaced)
6102 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6103 "{..., pc}: original reg list %.4x, modified "
6104 "list %.4x\n"), rn, writeback ? "!" : "",
6105 (int) dsc->u.block.regmask, new_regmask);
6106
6107 dsc->modinsn[0] = insn1;
6108 dsc->modinsn[1] = (new_regmask & 0xffff);
6109 dsc->numinsns = 2;
6110
6111 dsc->cleanup = &cleanup_block_load_pc;
6112 }
6113 }
6114 else
6115 {
6116 dsc->modinsn[0] = insn1;
6117 dsc->modinsn[1] = insn2;
6118 dsc->numinsns = 2;
6119 dsc->cleanup = &cleanup_block_store_pc;
6120 }
6121 return 0;
6122 }
6123
6124 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6125 This is used to avoid a dependency on BFD's bfd_endian enum. */
6126
6127 ULONGEST
6128 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6129 int byte_order)
6130 {
6131 return read_memory_unsigned_integer (memaddr, len,
6132 (enum bfd_endian) byte_order);
6133 }
6134
6135 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6136
6137 CORE_ADDR
6138 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6139 CORE_ADDR val)
6140 {
6141 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6142 }
6143
6144 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6145
6146 static CORE_ADDR
6147 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6148 {
6149 return 0;
6150 }
6151
6152 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6153
6154 int
6155 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6156 {
6157 return arm_is_thumb (self->regcache);
6158 }
6159
6160 /* single_step() is called just before we want to resume the inferior,
6161 if we want to single-step it but there is no hardware or kernel
6162 single-step support. We find the target of the coming instructions
6163 and breakpoint them. */
6164
6165 std::vector<CORE_ADDR>
6166 arm_software_single_step (struct regcache *regcache)
6167 {
6168 struct gdbarch *gdbarch = regcache->arch ();
6169 struct arm_get_next_pcs next_pcs_ctx;
6170
6171 arm_get_next_pcs_ctor (&next_pcs_ctx,
6172 &arm_get_next_pcs_ops,
6173 gdbarch_byte_order (gdbarch),
6174 gdbarch_byte_order_for_code (gdbarch),
6175 0,
6176 regcache);
6177
6178 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6179
6180 for (CORE_ADDR &pc_ref : next_pcs)
6181 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6182
6183 return next_pcs;
6184 }
6185
6186 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6187 for Linux, where some SVC instructions must be treated specially. */
6188
6189 static void
6190 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6191 arm_displaced_step_closure *dsc)
6192 {
6193 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6194
6195 if (debug_displaced)
6196 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6197 "%.8lx\n", (unsigned long) resume_addr);
6198
6199 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6200 }
6201
6202
6203 /* Common copy routine for svc instruciton. */
6204
6205 static int
6206 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6207 arm_displaced_step_closure *dsc)
6208 {
6209 /* Preparation: none.
6210 Insn: unmodified svc.
6211 Cleanup: pc <- insn_addr + insn_size. */
6212
6213 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6214 instruction. */
6215 dsc->wrote_to_pc = 1;
6216
6217 /* Allow OS-specific code to override SVC handling. */
6218 if (dsc->u.svc.copy_svc_os)
6219 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6220 else
6221 {
6222 dsc->cleanup = &cleanup_svc;
6223 return 0;
6224 }
6225 }
6226
6227 static int
6228 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6229 struct regcache *regs, arm_displaced_step_closure *dsc)
6230 {
6231
6232 if (debug_displaced)
6233 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6234 (unsigned long) insn);
6235
6236 dsc->modinsn[0] = insn;
6237
6238 return install_svc (gdbarch, regs, dsc);
6239 }
6240
6241 static int
6242 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6243 struct regcache *regs, arm_displaced_step_closure *dsc)
6244 {
6245
6246 if (debug_displaced)
6247 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6248 insn);
6249
6250 dsc->modinsn[0] = insn;
6251
6252 return install_svc (gdbarch, regs, dsc);
6253 }
6254
6255 /* Copy undefined instructions. */
6256
6257 static int
6258 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6259 arm_displaced_step_closure *dsc)
6260 {
6261 if (debug_displaced)
6262 fprintf_unfiltered (gdb_stdlog,
6263 "displaced: copying undefined insn %.8lx\n",
6264 (unsigned long) insn);
6265
6266 dsc->modinsn[0] = insn;
6267
6268 return 0;
6269 }
6270
6271 static int
6272 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6273 arm_displaced_step_closure *dsc)
6274 {
6275
6276 if (debug_displaced)
6277 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6278 "%.4x %.4x\n", (unsigned short) insn1,
6279 (unsigned short) insn2);
6280
6281 dsc->modinsn[0] = insn1;
6282 dsc->modinsn[1] = insn2;
6283 dsc->numinsns = 2;
6284
6285 return 0;
6286 }
6287
6288 /* Copy unpredictable instructions. */
6289
6290 static int
6291 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6292 arm_displaced_step_closure *dsc)
6293 {
6294 if (debug_displaced)
6295 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6296 "%.8lx\n", (unsigned long) insn);
6297
6298 dsc->modinsn[0] = insn;
6299
6300 return 0;
6301 }
6302
6303 /* The decode_* functions are instruction decoding helpers. They mostly follow
6304 the presentation in the ARM ARM. */
6305
6306 static int
6307 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6308 struct regcache *regs,
6309 arm_displaced_step_closure *dsc)
6310 {
6311 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6312 unsigned int rn = bits (insn, 16, 19);
6313
6314 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6315 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6316 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6317 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6318 else if ((op1 & 0x60) == 0x20)
6319 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6320 else if ((op1 & 0x71) == 0x40)
6321 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6322 dsc);
6323 else if ((op1 & 0x77) == 0x41)
6324 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6325 else if ((op1 & 0x77) == 0x45)
6326 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6327 else if ((op1 & 0x77) == 0x51)
6328 {
6329 if (rn != 0xf)
6330 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6331 else
6332 return arm_copy_unpred (gdbarch, insn, dsc);
6333 }
6334 else if ((op1 & 0x77) == 0x55)
6335 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6336 else if (op1 == 0x57)
6337 switch (op2)
6338 {
6339 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6340 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6341 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6342 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6343 default: return arm_copy_unpred (gdbarch, insn, dsc);
6344 }
6345 else if ((op1 & 0x63) == 0x43)
6346 return arm_copy_unpred (gdbarch, insn, dsc);
6347 else if ((op2 & 0x1) == 0x0)
6348 switch (op1 & ~0x80)
6349 {
6350 case 0x61:
6351 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6352 case 0x65:
6353 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6354 case 0x71: case 0x75:
6355 /* pld/pldw reg. */
6356 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6357 case 0x63: case 0x67: case 0x73: case 0x77:
6358 return arm_copy_unpred (gdbarch, insn, dsc);
6359 default:
6360 return arm_copy_undef (gdbarch, insn, dsc);
6361 }
6362 else
6363 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6364 }
6365
6366 static int
6367 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6368 struct regcache *regs,
6369 arm_displaced_step_closure *dsc)
6370 {
6371 if (bit (insn, 27) == 0)
6372 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6373 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6374 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6375 {
6376 case 0x0: case 0x2:
6377 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6378
6379 case 0x1: case 0x3:
6380 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6381
6382 case 0x4: case 0x5: case 0x6: case 0x7:
6383 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6384
6385 case 0x8:
6386 switch ((insn & 0xe00000) >> 21)
6387 {
6388 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6389 /* stc/stc2. */
6390 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6391
6392 case 0x2:
6393 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6394
6395 default:
6396 return arm_copy_undef (gdbarch, insn, dsc);
6397 }
6398
6399 case 0x9:
6400 {
6401 int rn_f = (bits (insn, 16, 19) == 0xf);
6402 switch ((insn & 0xe00000) >> 21)
6403 {
6404 case 0x1: case 0x3:
6405 /* ldc/ldc2 imm (undefined for rn == pc). */
6406 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6407 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6408
6409 case 0x2:
6410 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6411
6412 case 0x4: case 0x5: case 0x6: case 0x7:
6413 /* ldc/ldc2 lit (undefined for rn != pc). */
6414 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6415 : arm_copy_undef (gdbarch, insn, dsc);
6416
6417 default:
6418 return arm_copy_undef (gdbarch, insn, dsc);
6419 }
6420 }
6421
6422 case 0xa:
6423 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6424
6425 case 0xb:
6426 if (bits (insn, 16, 19) == 0xf)
6427 /* ldc/ldc2 lit. */
6428 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6429 else
6430 return arm_copy_undef (gdbarch, insn, dsc);
6431
6432 case 0xc:
6433 if (bit (insn, 4))
6434 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6435 else
6436 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6437
6438 case 0xd:
6439 if (bit (insn, 4))
6440 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6441 else
6442 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6443
6444 default:
6445 return arm_copy_undef (gdbarch, insn, dsc);
6446 }
6447 }
6448
6449 /* Decode miscellaneous instructions in dp/misc encoding space. */
6450
6451 static int
6452 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6453 struct regcache *regs,
6454 arm_displaced_step_closure *dsc)
6455 {
6456 unsigned int op2 = bits (insn, 4, 6);
6457 unsigned int op = bits (insn, 21, 22);
6458
6459 switch (op2)
6460 {
6461 case 0x0:
6462 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6463
6464 case 0x1:
6465 if (op == 0x1) /* bx. */
6466 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6467 else if (op == 0x3)
6468 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6469 else
6470 return arm_copy_undef (gdbarch, insn, dsc);
6471
6472 case 0x2:
6473 if (op == 0x1)
6474 /* Not really supported. */
6475 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6476 else
6477 return arm_copy_undef (gdbarch, insn, dsc);
6478
6479 case 0x3:
6480 if (op == 0x1)
6481 return arm_copy_bx_blx_reg (gdbarch, insn,
6482 regs, dsc); /* blx register. */
6483 else
6484 return arm_copy_undef (gdbarch, insn, dsc);
6485
6486 case 0x5:
6487 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6488
6489 case 0x7:
6490 if (op == 0x1)
6491 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6492 else if (op == 0x3)
6493 /* Not really supported. */
6494 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6495 /* Fall through. */
6496
6497 default:
6498 return arm_copy_undef (gdbarch, insn, dsc);
6499 }
6500 }
6501
6502 static int
6503 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6504 struct regcache *regs,
6505 arm_displaced_step_closure *dsc)
6506 {
6507 if (bit (insn, 25))
6508 switch (bits (insn, 20, 24))
6509 {
6510 case 0x10:
6511 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6512
6513 case 0x14:
6514 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6515
6516 case 0x12: case 0x16:
6517 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6518
6519 default:
6520 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6521 }
6522 else
6523 {
6524 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6525
6526 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6527 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6528 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6529 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6530 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6531 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6532 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6533 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6534 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6535 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6536 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6537 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6538 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6539 /* 2nd arg means "unprivileged". */
6540 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6541 dsc);
6542 }
6543
6544 /* Should be unreachable. */
6545 return 1;
6546 }
6547
6548 static int
6549 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6550 struct regcache *regs,
6551 arm_displaced_step_closure *dsc)
6552 {
6553 int a = bit (insn, 25), b = bit (insn, 4);
6554 uint32_t op1 = bits (insn, 20, 24);
6555
6556 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6557 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6558 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6559 else if ((!a && (op1 & 0x17) == 0x02)
6560 || (a && (op1 & 0x17) == 0x02 && !b))
6561 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6562 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6563 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6564 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6565 else if ((!a && (op1 & 0x17) == 0x03)
6566 || (a && (op1 & 0x17) == 0x03 && !b))
6567 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6568 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6569 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6570 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6571 else if ((!a && (op1 & 0x17) == 0x06)
6572 || (a && (op1 & 0x17) == 0x06 && !b))
6573 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6574 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6575 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6576 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6577 else if ((!a && (op1 & 0x17) == 0x07)
6578 || (a && (op1 & 0x17) == 0x07 && !b))
6579 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6580
6581 /* Should be unreachable. */
6582 return 1;
6583 }
6584
6585 static int
6586 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6587 arm_displaced_step_closure *dsc)
6588 {
6589 switch (bits (insn, 20, 24))
6590 {
6591 case 0x00: case 0x01: case 0x02: case 0x03:
6592 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6593
6594 case 0x04: case 0x05: case 0x06: case 0x07:
6595 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6596
6597 case 0x08: case 0x09: case 0x0a: case 0x0b:
6598 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6599 return arm_copy_unmodified (gdbarch, insn,
6600 "decode/pack/unpack/saturate/reverse", dsc);
6601
6602 case 0x18:
6603 if (bits (insn, 5, 7) == 0) /* op2. */
6604 {
6605 if (bits (insn, 12, 15) == 0xf)
6606 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6607 else
6608 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6609 }
6610 else
6611 return arm_copy_undef (gdbarch, insn, dsc);
6612
6613 case 0x1a: case 0x1b:
6614 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6615 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6616 else
6617 return arm_copy_undef (gdbarch, insn, dsc);
6618
6619 case 0x1c: case 0x1d:
6620 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6621 {
6622 if (bits (insn, 0, 3) == 0xf)
6623 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6624 else
6625 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6626 }
6627 else
6628 return arm_copy_undef (gdbarch, insn, dsc);
6629
6630 case 0x1e: case 0x1f:
6631 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6632 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6633 else
6634 return arm_copy_undef (gdbarch, insn, dsc);
6635 }
6636
6637 /* Should be unreachable. */
6638 return 1;
6639 }
6640
6641 static int
6642 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6643 struct regcache *regs,
6644 arm_displaced_step_closure *dsc)
6645 {
6646 if (bit (insn, 25))
6647 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6648 else
6649 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6650 }
6651
6652 static int
6653 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6654 struct regcache *regs,
6655 arm_displaced_step_closure *dsc)
6656 {
6657 unsigned int opcode = bits (insn, 20, 24);
6658
6659 switch (opcode)
6660 {
6661 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6662 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6663
6664 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6665 case 0x12: case 0x16:
6666 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6667
6668 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6669 case 0x13: case 0x17:
6670 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6671
6672 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6673 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6674 /* Note: no writeback for these instructions. Bit 25 will always be
6675 zero though (via caller), so the following works OK. */
6676 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6677 }
6678
6679 /* Should be unreachable. */
6680 return 1;
6681 }
6682
6683 /* Decode shifted register instructions. */
6684
6685 static int
6686 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6687 uint16_t insn2, struct regcache *regs,
6688 arm_displaced_step_closure *dsc)
6689 {
6690 /* PC is only allowed to be used in instruction MOV. */
6691
6692 unsigned int op = bits (insn1, 5, 8);
6693 unsigned int rn = bits (insn1, 0, 3);
6694
6695 if (op == 0x2 && rn == 0xf) /* MOV */
6696 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6697 else
6698 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6699 "dp (shift reg)", dsc);
6700 }
6701
6702
6703 /* Decode extension register load/store. Exactly the same as
6704 arm_decode_ext_reg_ld_st. */
6705
6706 static int
6707 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6708 uint16_t insn2, struct regcache *regs,
6709 arm_displaced_step_closure *dsc)
6710 {
6711 unsigned int opcode = bits (insn1, 4, 8);
6712
6713 switch (opcode)
6714 {
6715 case 0x04: case 0x05:
6716 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6717 "vfp/neon vmov", dsc);
6718
6719 case 0x08: case 0x0c: /* 01x00 */
6720 case 0x0a: case 0x0e: /* 01x10 */
6721 case 0x12: case 0x16: /* 10x10 */
6722 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6723 "vfp/neon vstm/vpush", dsc);
6724
6725 case 0x09: case 0x0d: /* 01x01 */
6726 case 0x0b: case 0x0f: /* 01x11 */
6727 case 0x13: case 0x17: /* 10x11 */
6728 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6729 "vfp/neon vldm/vpop", dsc);
6730
6731 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6732 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6733 "vstr", dsc);
6734 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6735 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6736 }
6737
6738 /* Should be unreachable. */
6739 return 1;
6740 }
6741
6742 static int
6743 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6744 struct regcache *regs, arm_displaced_step_closure *dsc)
6745 {
6746 unsigned int op1 = bits (insn, 20, 25);
6747 int op = bit (insn, 4);
6748 unsigned int coproc = bits (insn, 8, 11);
6749
6750 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6751 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6752 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6753 && (coproc & 0xe) != 0xa)
6754 /* stc/stc2. */
6755 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6756 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6757 && (coproc & 0xe) != 0xa)
6758 /* ldc/ldc2 imm/lit. */
6759 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6760 else if ((op1 & 0x3e) == 0x00)
6761 return arm_copy_undef (gdbarch, insn, dsc);
6762 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6763 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6764 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6765 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6766 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6767 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6768 else if ((op1 & 0x30) == 0x20 && !op)
6769 {
6770 if ((coproc & 0xe) == 0xa)
6771 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6772 else
6773 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6774 }
6775 else if ((op1 & 0x30) == 0x20 && op)
6776 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6777 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6778 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6779 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6780 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6781 else if ((op1 & 0x30) == 0x30)
6782 return arm_copy_svc (gdbarch, insn, regs, dsc);
6783 else
6784 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6785 }
6786
6787 static int
6788 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6789 uint16_t insn2, struct regcache *regs,
6790 arm_displaced_step_closure *dsc)
6791 {
6792 unsigned int coproc = bits (insn2, 8, 11);
6793 unsigned int bit_5_8 = bits (insn1, 5, 8);
6794 unsigned int bit_9 = bit (insn1, 9);
6795 unsigned int bit_4 = bit (insn1, 4);
6796
6797 if (bit_9 == 0)
6798 {
6799 if (bit_5_8 == 2)
6800 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6801 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6802 dsc);
6803 else if (bit_5_8 == 0) /* UNDEFINED. */
6804 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6805 else
6806 {
6807 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6808 if ((coproc & 0xe) == 0xa)
6809 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6810 dsc);
6811 else /* coproc is not 101x. */
6812 {
6813 if (bit_4 == 0) /* STC/STC2. */
6814 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6815 "stc/stc2", dsc);
6816 else /* LDC/LDC2 {literal, immeidate}. */
6817 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6818 regs, dsc);
6819 }
6820 }
6821 }
6822 else
6823 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6824
6825 return 0;
6826 }
6827
6828 static void
6829 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6830 arm_displaced_step_closure *dsc, int rd)
6831 {
6832 /* ADR Rd, #imm
6833
6834 Rewrite as:
6835
6836 Preparation: Rd <- PC
6837 Insn: ADD Rd, #imm
6838 Cleanup: Null.
6839 */
6840
6841 /* Rd <- PC */
6842 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6843 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6844 }
6845
6846 static int
6847 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6848 arm_displaced_step_closure *dsc,
6849 int rd, unsigned int imm)
6850 {
6851
6852 /* Encoding T2: ADDS Rd, #imm */
6853 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6854
6855 install_pc_relative (gdbarch, regs, dsc, rd);
6856
6857 return 0;
6858 }
6859
6860 static int
6861 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6862 struct regcache *regs,
6863 arm_displaced_step_closure *dsc)
6864 {
6865 unsigned int rd = bits (insn, 8, 10);
6866 unsigned int imm8 = bits (insn, 0, 7);
6867
6868 if (debug_displaced)
6869 fprintf_unfiltered (gdb_stdlog,
6870 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6871 rd, imm8, insn);
6872
6873 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6874 }
6875
6876 static int
6877 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6878 uint16_t insn2, struct regcache *regs,
6879 arm_displaced_step_closure *dsc)
6880 {
6881 unsigned int rd = bits (insn2, 8, 11);
6882 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6883 extract raw immediate encoding rather than computing immediate. When
6884 generating ADD or SUB instruction, we can simply perform OR operation to
6885 set immediate into ADD. */
6886 unsigned int imm_3_8 = insn2 & 0x70ff;
6887 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6888
6889 if (debug_displaced)
6890 fprintf_unfiltered (gdb_stdlog,
6891 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6892 rd, imm_i, imm_3_8, insn1, insn2);
6893
6894 if (bit (insn1, 7)) /* Encoding T2 */
6895 {
6896 /* Encoding T3: SUB Rd, Rd, #imm */
6897 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6898 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6899 }
6900 else /* Encoding T3 */
6901 {
6902 /* Encoding T3: ADD Rd, Rd, #imm */
6903 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6904 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6905 }
6906 dsc->numinsns = 2;
6907
6908 install_pc_relative (gdbarch, regs, dsc, rd);
6909
6910 return 0;
6911 }
6912
6913 static int
6914 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6915 struct regcache *regs,
6916 arm_displaced_step_closure *dsc)
6917 {
6918 unsigned int rt = bits (insn1, 8, 10);
6919 unsigned int pc;
6920 int imm8 = (bits (insn1, 0, 7) << 2);
6921
6922 /* LDR Rd, #imm8
6923
6924 Rwrite as:
6925
6926 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6927
6928 Insn: LDR R0, [R2, R3];
6929 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6930
6931 if (debug_displaced)
6932 fprintf_unfiltered (gdb_stdlog,
6933 "displaced: copying thumb ldr r%d [pc #%d]\n"
6934 , rt, imm8);
6935
6936 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6937 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6938 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6939 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6940 /* The assembler calculates the required value of the offset from the
6941 Align(PC,4) value of this instruction to the label. */
6942 pc = pc & 0xfffffffc;
6943
6944 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6945 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6946
6947 dsc->rd = rt;
6948 dsc->u.ldst.xfersize = 4;
6949 dsc->u.ldst.rn = 0;
6950 dsc->u.ldst.immed = 0;
6951 dsc->u.ldst.writeback = 0;
6952 dsc->u.ldst.restore_r4 = 0;
6953
6954 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6955
6956 dsc->cleanup = &cleanup_load;
6957
6958 return 0;
6959 }
6960
6961 /* Copy Thumb cbnz/cbz insruction. */
6962
6963 static int
6964 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6965 struct regcache *regs,
6966 arm_displaced_step_closure *dsc)
6967 {
6968 int non_zero = bit (insn1, 11);
6969 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6970 CORE_ADDR from = dsc->insn_addr;
6971 int rn = bits (insn1, 0, 2);
6972 int rn_val = displaced_read_reg (regs, dsc, rn);
6973
6974 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
6975 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6976 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6977 condition is false, let it be, cleanup_branch will do nothing. */
6978 if (dsc->u.branch.cond)
6979 {
6980 dsc->u.branch.cond = INST_AL;
6981 dsc->u.branch.dest = from + 4 + imm5;
6982 }
6983 else
6984 dsc->u.branch.dest = from + 2;
6985
6986 dsc->u.branch.link = 0;
6987 dsc->u.branch.exchange = 0;
6988
6989 if (debug_displaced)
6990 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
6991 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
6992 rn, rn_val, insn1, dsc->u.branch.dest);
6993
6994 dsc->modinsn[0] = THUMB_NOP;
6995
6996 dsc->cleanup = &cleanup_branch;
6997 return 0;
6998 }
6999
7000 /* Copy Table Branch Byte/Halfword */
7001 static int
7002 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7003 uint16_t insn2, struct regcache *regs,
7004 arm_displaced_step_closure *dsc)
7005 {
7006 ULONGEST rn_val, rm_val;
7007 int is_tbh = bit (insn2, 4);
7008 CORE_ADDR halfwords = 0;
7009 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7010
7011 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7012 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7013
7014 if (is_tbh)
7015 {
7016 gdb_byte buf[2];
7017
7018 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7019 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7020 }
7021 else
7022 {
7023 gdb_byte buf[1];
7024
7025 target_read_memory (rn_val + rm_val, buf, 1);
7026 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7027 }
7028
7029 if (debug_displaced)
7030 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7031 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7032 (unsigned int) rn_val, (unsigned int) rm_val,
7033 (unsigned int) halfwords);
7034
7035 dsc->u.branch.cond = INST_AL;
7036 dsc->u.branch.link = 0;
7037 dsc->u.branch.exchange = 0;
7038 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7039
7040 dsc->cleanup = &cleanup_branch;
7041
7042 return 0;
7043 }
7044
7045 static void
7046 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7047 arm_displaced_step_closure *dsc)
7048 {
7049 /* PC <- r7 */
7050 int val = displaced_read_reg (regs, dsc, 7);
7051 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7052
7053 /* r7 <- r8 */
7054 val = displaced_read_reg (regs, dsc, 8);
7055 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7056
7057 /* r8 <- tmp[0] */
7058 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7059
7060 }
7061
7062 static int
7063 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7064 struct regcache *regs,
7065 arm_displaced_step_closure *dsc)
7066 {
7067 dsc->u.block.regmask = insn1 & 0x00ff;
7068
7069 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7070 to :
7071
7072 (1) register list is full, that is, r0-r7 are used.
7073 Prepare: tmp[0] <- r8
7074
7075 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7076 MOV r8, r7; Move value of r7 to r8;
7077 POP {r7}; Store PC value into r7.
7078
7079 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7080
7081 (2) register list is not full, supposing there are N registers in
7082 register list (except PC, 0 <= N <= 7).
7083 Prepare: for each i, 0 - N, tmp[i] <- ri.
7084
7085 POP {r0, r1, ...., rN};
7086
7087 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7088 from tmp[] properly.
7089 */
7090 if (debug_displaced)
7091 fprintf_unfiltered (gdb_stdlog,
7092 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7093 dsc->u.block.regmask, insn1);
7094
7095 if (dsc->u.block.regmask == 0xff)
7096 {
7097 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7098
7099 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7100 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7101 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7102
7103 dsc->numinsns = 3;
7104 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7105 }
7106 else
7107 {
7108 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7109 unsigned int i;
7110 unsigned int new_regmask;
7111
7112 for (i = 0; i < num_in_list + 1; i++)
7113 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7114
7115 new_regmask = (1 << (num_in_list + 1)) - 1;
7116
7117 if (debug_displaced)
7118 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7119 "{..., pc}: original reg list %.4x,"
7120 " modified list %.4x\n"),
7121 (int) dsc->u.block.regmask, new_regmask);
7122
7123 dsc->u.block.regmask |= 0x8000;
7124 dsc->u.block.writeback = 0;
7125 dsc->u.block.cond = INST_AL;
7126
7127 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7128
7129 dsc->cleanup = &cleanup_block_load_pc;
7130 }
7131
7132 return 0;
7133 }
7134
7135 static void
7136 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7137 struct regcache *regs,
7138 arm_displaced_step_closure *dsc)
7139 {
7140 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7141 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7142 int err = 0;
7143
7144 /* 16-bit thumb instructions. */
7145 switch (op_bit_12_15)
7146 {
7147 /* Shift (imme), add, subtract, move and compare. */
7148 case 0: case 1: case 2: case 3:
7149 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7150 "shift/add/sub/mov/cmp",
7151 dsc);
7152 break;
7153 case 4:
7154 switch (op_bit_10_11)
7155 {
7156 case 0: /* Data-processing */
7157 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7158 "data-processing",
7159 dsc);
7160 break;
7161 case 1: /* Special data instructions and branch and exchange. */
7162 {
7163 unsigned short op = bits (insn1, 7, 9);
7164 if (op == 6 || op == 7) /* BX or BLX */
7165 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7166 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7167 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7168 else
7169 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7170 dsc);
7171 }
7172 break;
7173 default: /* LDR (literal) */
7174 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7175 }
7176 break;
7177 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7178 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7179 break;
7180 case 10:
7181 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7182 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7183 else /* Generate SP-relative address */
7184 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7185 break;
7186 case 11: /* Misc 16-bit instructions */
7187 {
7188 switch (bits (insn1, 8, 11))
7189 {
7190 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7191 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7192 break;
7193 case 12: case 13: /* POP */
7194 if (bit (insn1, 8)) /* PC is in register list. */
7195 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7196 else
7197 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7198 break;
7199 case 15: /* If-Then, and hints */
7200 if (bits (insn1, 0, 3))
7201 /* If-Then makes up to four following instructions conditional.
7202 IT instruction itself is not conditional, so handle it as a
7203 common unmodified instruction. */
7204 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7205 dsc);
7206 else
7207 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7208 break;
7209 default:
7210 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7211 }
7212 }
7213 break;
7214 case 12:
7215 if (op_bit_10_11 < 2) /* Store multiple registers */
7216 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7217 else /* Load multiple registers */
7218 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7219 break;
7220 case 13: /* Conditional branch and supervisor call */
7221 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7222 err = thumb_copy_b (gdbarch, insn1, dsc);
7223 else
7224 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7225 break;
7226 case 14: /* Unconditional branch */
7227 err = thumb_copy_b (gdbarch, insn1, dsc);
7228 break;
7229 default:
7230 err = 1;
7231 }
7232
7233 if (err)
7234 internal_error (__FILE__, __LINE__,
7235 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7236 }
7237
7238 static int
7239 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7240 uint16_t insn1, uint16_t insn2,
7241 struct regcache *regs,
7242 arm_displaced_step_closure *dsc)
7243 {
7244 int rt = bits (insn2, 12, 15);
7245 int rn = bits (insn1, 0, 3);
7246 int op1 = bits (insn1, 7, 8);
7247
7248 switch (bits (insn1, 5, 6))
7249 {
7250 case 0: /* Load byte and memory hints */
7251 if (rt == 0xf) /* PLD/PLI */
7252 {
7253 if (rn == 0xf)
7254 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7255 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7256 else
7257 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7258 "pli/pld", dsc);
7259 }
7260 else
7261 {
7262 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7263 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7264 1);
7265 else
7266 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7267 "ldrb{reg, immediate}/ldrbt",
7268 dsc);
7269 }
7270
7271 break;
7272 case 1: /* Load halfword and memory hints. */
7273 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7274 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7275 "pld/unalloc memhint", dsc);
7276 else
7277 {
7278 if (rn == 0xf)
7279 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7280 2);
7281 else
7282 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7283 "ldrh/ldrht", dsc);
7284 }
7285 break;
7286 case 2: /* Load word */
7287 {
7288 int insn2_bit_8_11 = bits (insn2, 8, 11);
7289
7290 if (rn == 0xf)
7291 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7292 else if (op1 == 0x1) /* Encoding T3 */
7293 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7294 0, 1);
7295 else /* op1 == 0x0 */
7296 {
7297 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7298 /* LDR (immediate) */
7299 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7300 dsc, bit (insn2, 8), 1);
7301 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7302 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7303 "ldrt", dsc);
7304 else
7305 /* LDR (register) */
7306 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7307 dsc, 0, 0);
7308 }
7309 break;
7310 }
7311 default:
7312 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7313 break;
7314 }
7315 return 0;
7316 }
7317
7318 static void
7319 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7320 uint16_t insn2, struct regcache *regs,
7321 arm_displaced_step_closure *dsc)
7322 {
7323 int err = 0;
7324 unsigned short op = bit (insn2, 15);
7325 unsigned int op1 = bits (insn1, 11, 12);
7326
7327 switch (op1)
7328 {
7329 case 1:
7330 {
7331 switch (bits (insn1, 9, 10))
7332 {
7333 case 0:
7334 if (bit (insn1, 6))
7335 {
7336 /* Load/store {dual, execlusive}, table branch. */
7337 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7338 && bits (insn2, 5, 7) == 0)
7339 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7340 dsc);
7341 else
7342 /* PC is not allowed to use in load/store {dual, exclusive}
7343 instructions. */
7344 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7345 "load/store dual/ex", dsc);
7346 }
7347 else /* load/store multiple */
7348 {
7349 switch (bits (insn1, 7, 8))
7350 {
7351 case 0: case 3: /* SRS, RFE */
7352 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7353 "srs/rfe", dsc);
7354 break;
7355 case 1: case 2: /* LDM/STM/PUSH/POP */
7356 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7357 break;
7358 }
7359 }
7360 break;
7361
7362 case 1:
7363 /* Data-processing (shift register). */
7364 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7365 dsc);
7366 break;
7367 default: /* Coprocessor instructions. */
7368 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7369 break;
7370 }
7371 break;
7372 }
7373 case 2: /* op1 = 2 */
7374 if (op) /* Branch and misc control. */
7375 {
7376 if (bit (insn2, 14) /* BLX/BL */
7377 || bit (insn2, 12) /* Unconditional branch */
7378 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7379 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7380 else
7381 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7382 "misc ctrl", dsc);
7383 }
7384 else
7385 {
7386 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7387 {
7388 int dp_op = bits (insn1, 4, 8);
7389 int rn = bits (insn1, 0, 3);
7390 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7391 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7392 regs, dsc);
7393 else
7394 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7395 "dp/pb", dsc);
7396 }
7397 else /* Data processing (modified immeidate) */
7398 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7399 "dp/mi", dsc);
7400 }
7401 break;
7402 case 3: /* op1 = 3 */
7403 switch (bits (insn1, 9, 10))
7404 {
7405 case 0:
7406 if (bit (insn1, 4))
7407 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7408 regs, dsc);
7409 else /* NEON Load/Store and Store single data item */
7410 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7411 "neon elt/struct load/store",
7412 dsc);
7413 break;
7414 case 1: /* op1 = 3, bits (9, 10) == 1 */
7415 switch (bits (insn1, 7, 8))
7416 {
7417 case 0: case 1: /* Data processing (register) */
7418 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7419 "dp(reg)", dsc);
7420 break;
7421 case 2: /* Multiply and absolute difference */
7422 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7423 "mul/mua/diff", dsc);
7424 break;
7425 case 3: /* Long multiply and divide */
7426 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7427 "lmul/lmua", dsc);
7428 break;
7429 }
7430 break;
7431 default: /* Coprocessor instructions */
7432 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7433 break;
7434 }
7435 break;
7436 default:
7437 err = 1;
7438 }
7439
7440 if (err)
7441 internal_error (__FILE__, __LINE__,
7442 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7443
7444 }
7445
7446 static void
7447 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7448 struct regcache *regs,
7449 arm_displaced_step_closure *dsc)
7450 {
7451 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7452 uint16_t insn1
7453 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7454
7455 if (debug_displaced)
7456 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7457 "at %.8lx\n", insn1, (unsigned long) from);
7458
7459 dsc->is_thumb = 1;
7460 dsc->insn_size = thumb_insn_size (insn1);
7461 if (thumb_insn_size (insn1) == 4)
7462 {
7463 uint16_t insn2
7464 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7465 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7466 }
7467 else
7468 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7469 }
7470
7471 void
7472 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7473 CORE_ADDR to, struct regcache *regs,
7474 arm_displaced_step_closure *dsc)
7475 {
7476 int err = 0;
7477 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7478 uint32_t insn;
7479
7480 /* Most displaced instructions use a 1-instruction scratch space, so set this
7481 here and override below if/when necessary. */
7482 dsc->numinsns = 1;
7483 dsc->insn_addr = from;
7484 dsc->scratch_base = to;
7485 dsc->cleanup = NULL;
7486 dsc->wrote_to_pc = 0;
7487
7488 if (!displaced_in_arm_mode (regs))
7489 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7490
7491 dsc->is_thumb = 0;
7492 dsc->insn_size = 4;
7493 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7494 if (debug_displaced)
7495 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7496 "at %.8lx\n", (unsigned long) insn,
7497 (unsigned long) from);
7498
7499 if ((insn & 0xf0000000) == 0xf0000000)
7500 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7501 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7502 {
7503 case 0x0: case 0x1: case 0x2: case 0x3:
7504 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7505 break;
7506
7507 case 0x4: case 0x5: case 0x6:
7508 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7509 break;
7510
7511 case 0x7:
7512 err = arm_decode_media (gdbarch, insn, dsc);
7513 break;
7514
7515 case 0x8: case 0x9: case 0xa: case 0xb:
7516 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7517 break;
7518
7519 case 0xc: case 0xd: case 0xe: case 0xf:
7520 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7521 break;
7522 }
7523
7524 if (err)
7525 internal_error (__FILE__, __LINE__,
7526 _("arm_process_displaced_insn: Instruction decode error"));
7527 }
7528
7529 /* Actually set up the scratch space for a displaced instruction. */
7530
7531 void
7532 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7533 CORE_ADDR to, arm_displaced_step_closure *dsc)
7534 {
7535 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7536 unsigned int i, len, offset;
7537 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7538 int size = dsc->is_thumb? 2 : 4;
7539 const gdb_byte *bkp_insn;
7540
7541 offset = 0;
7542 /* Poke modified instruction(s). */
7543 for (i = 0; i < dsc->numinsns; i++)
7544 {
7545 if (debug_displaced)
7546 {
7547 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7548 if (size == 4)
7549 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7550 dsc->modinsn[i]);
7551 else if (size == 2)
7552 fprintf_unfiltered (gdb_stdlog, "%.4x",
7553 (unsigned short)dsc->modinsn[i]);
7554
7555 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7556 (unsigned long) to + offset);
7557
7558 }
7559 write_memory_unsigned_integer (to + offset, size,
7560 byte_order_for_code,
7561 dsc->modinsn[i]);
7562 offset += size;
7563 }
7564
7565 /* Choose the correct breakpoint instruction. */
7566 if (dsc->is_thumb)
7567 {
7568 bkp_insn = tdep->thumb_breakpoint;
7569 len = tdep->thumb_breakpoint_size;
7570 }
7571 else
7572 {
7573 bkp_insn = tdep->arm_breakpoint;
7574 len = tdep->arm_breakpoint_size;
7575 }
7576
7577 /* Put breakpoint afterwards. */
7578 write_memory (to + offset, bkp_insn, len);
7579
7580 if (debug_displaced)
7581 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7582 paddress (gdbarch, from), paddress (gdbarch, to));
7583 }
7584
7585 /* Entry point for cleaning things up after a displaced instruction has been
7586 single-stepped. */
7587
7588 void
7589 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7590 struct displaced_step_closure *dsc_,
7591 CORE_ADDR from, CORE_ADDR to,
7592 struct regcache *regs)
7593 {
7594 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7595
7596 if (dsc->cleanup)
7597 dsc->cleanup (gdbarch, regs, dsc);
7598
7599 if (!dsc->wrote_to_pc)
7600 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7601 dsc->insn_addr + dsc->insn_size);
7602
7603 }
7604
7605 #include "bfd-in2.h"
7606 #include "libcoff.h"
7607
7608 static int
7609 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7610 {
7611 gdb_disassembler *di
7612 = static_cast<gdb_disassembler *>(info->application_data);
7613 struct gdbarch *gdbarch = di->arch ();
7614
7615 if (arm_pc_is_thumb (gdbarch, memaddr))
7616 {
7617 static asymbol *asym;
7618 static combined_entry_type ce;
7619 static struct coff_symbol_struct csym;
7620 static struct bfd fake_bfd;
7621 static bfd_target fake_target;
7622
7623 if (csym.native == NULL)
7624 {
7625 /* Create a fake symbol vector containing a Thumb symbol.
7626 This is solely so that the code in print_insn_little_arm()
7627 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7628 the presence of a Thumb symbol and switch to decoding
7629 Thumb instructions. */
7630
7631 fake_target.flavour = bfd_target_coff_flavour;
7632 fake_bfd.xvec = &fake_target;
7633 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7634 csym.native = &ce;
7635 csym.symbol.the_bfd = &fake_bfd;
7636 csym.symbol.name = "fake";
7637 asym = (asymbol *) & csym;
7638 }
7639
7640 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7641 info->symbols = &asym;
7642 }
7643 else
7644 info->symbols = NULL;
7645
7646 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7647 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7648 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7649 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7650 in default_print_insn. */
7651 if (exec_bfd != NULL)
7652 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7653
7654 return default_print_insn (memaddr, info);
7655 }
7656
7657 /* The following define instruction sequences that will cause ARM
7658 cpu's to take an undefined instruction trap. These are used to
7659 signal a breakpoint to GDB.
7660
7661 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7662 modes. A different instruction is required for each mode. The ARM
7663 cpu's can also be big or little endian. Thus four different
7664 instructions are needed to support all cases.
7665
7666 Note: ARMv4 defines several new instructions that will take the
7667 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7668 not in fact add the new instructions. The new undefined
7669 instructions in ARMv4 are all instructions that had no defined
7670 behaviour in earlier chips. There is no guarantee that they will
7671 raise an exception, but may be treated as NOP's. In practice, it
7672 may only safe to rely on instructions matching:
7673
7674 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7675 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7676 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7677
7678 Even this may only true if the condition predicate is true. The
7679 following use a condition predicate of ALWAYS so it is always TRUE.
7680
7681 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7682 and NetBSD all use a software interrupt rather than an undefined
7683 instruction to force a trap. This can be handled by by the
7684 abi-specific code during establishment of the gdbarch vector. */
7685
7686 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7687 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7688 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7689 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7690
7691 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7692 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7693 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7694 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7695
7696 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7697
7698 static int
7699 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7700 {
7701 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7702 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7703
7704 if (arm_pc_is_thumb (gdbarch, *pcptr))
7705 {
7706 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7707
7708 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7709 check whether we are replacing a 32-bit instruction. */
7710 if (tdep->thumb2_breakpoint != NULL)
7711 {
7712 gdb_byte buf[2];
7713
7714 if (target_read_memory (*pcptr, buf, 2) == 0)
7715 {
7716 unsigned short inst1;
7717
7718 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7719 if (thumb_insn_size (inst1) == 4)
7720 return ARM_BP_KIND_THUMB2;
7721 }
7722 }
7723
7724 return ARM_BP_KIND_THUMB;
7725 }
7726 else
7727 return ARM_BP_KIND_ARM;
7728
7729 }
7730
7731 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7732
7733 static const gdb_byte *
7734 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7735 {
7736 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7737
7738 switch (kind)
7739 {
7740 case ARM_BP_KIND_ARM:
7741 *size = tdep->arm_breakpoint_size;
7742 return tdep->arm_breakpoint;
7743 case ARM_BP_KIND_THUMB:
7744 *size = tdep->thumb_breakpoint_size;
7745 return tdep->thumb_breakpoint;
7746 case ARM_BP_KIND_THUMB2:
7747 *size = tdep->thumb2_breakpoint_size;
7748 return tdep->thumb2_breakpoint;
7749 default:
7750 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7751 }
7752 }
7753
7754 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7755
7756 static int
7757 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7758 struct regcache *regcache,
7759 CORE_ADDR *pcptr)
7760 {
7761 gdb_byte buf[4];
7762
7763 /* Check the memory pointed by PC is readable. */
7764 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7765 {
7766 struct arm_get_next_pcs next_pcs_ctx;
7767
7768 arm_get_next_pcs_ctor (&next_pcs_ctx,
7769 &arm_get_next_pcs_ops,
7770 gdbarch_byte_order (gdbarch),
7771 gdbarch_byte_order_for_code (gdbarch),
7772 0,
7773 regcache);
7774
7775 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7776
7777 /* If MEMADDR is the next instruction of current pc, do the
7778 software single step computation, and get the thumb mode by
7779 the destination address. */
7780 for (CORE_ADDR pc : next_pcs)
7781 {
7782 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7783 {
7784 if (IS_THUMB_ADDR (pc))
7785 {
7786 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7787 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7788 }
7789 else
7790 return ARM_BP_KIND_ARM;
7791 }
7792 }
7793 }
7794
7795 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7796 }
7797
7798 /* Extract from an array REGBUF containing the (raw) register state a
7799 function return value of type TYPE, and copy that, in virtual
7800 format, into VALBUF. */
7801
7802 static void
7803 arm_extract_return_value (struct type *type, struct regcache *regs,
7804 gdb_byte *valbuf)
7805 {
7806 struct gdbarch *gdbarch = regs->arch ();
7807 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7808
7809 if (TYPE_CODE_FLT == TYPE_CODE (type))
7810 {
7811 switch (gdbarch_tdep (gdbarch)->fp_model)
7812 {
7813 case ARM_FLOAT_FPA:
7814 {
7815 /* The value is in register F0 in internal format. We need to
7816 extract the raw value and then convert it to the desired
7817 internal type. */
7818 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7819
7820 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7821 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7822 valbuf, type);
7823 }
7824 break;
7825
7826 case ARM_FLOAT_SOFT_FPA:
7827 case ARM_FLOAT_SOFT_VFP:
7828 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7829 not using the VFP ABI code. */
7830 case ARM_FLOAT_VFP:
7831 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7832 if (TYPE_LENGTH (type) > 4)
7833 regs->cooked_read (ARM_A1_REGNUM + 1,
7834 valbuf + ARM_INT_REGISTER_SIZE);
7835 break;
7836
7837 default:
7838 internal_error (__FILE__, __LINE__,
7839 _("arm_extract_return_value: "
7840 "Floating point model not supported"));
7841 break;
7842 }
7843 }
7844 else if (TYPE_CODE (type) == TYPE_CODE_INT
7845 || TYPE_CODE (type) == TYPE_CODE_CHAR
7846 || TYPE_CODE (type) == TYPE_CODE_BOOL
7847 || TYPE_CODE (type) == TYPE_CODE_PTR
7848 || TYPE_IS_REFERENCE (type)
7849 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7850 {
7851 /* If the type is a plain integer, then the access is
7852 straight-forward. Otherwise we have to play around a bit
7853 more. */
7854 int len = TYPE_LENGTH (type);
7855 int regno = ARM_A1_REGNUM;
7856 ULONGEST tmp;
7857
7858 while (len > 0)
7859 {
7860 /* By using store_unsigned_integer we avoid having to do
7861 anything special for small big-endian values. */
7862 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7863 store_unsigned_integer (valbuf,
7864 (len > ARM_INT_REGISTER_SIZE
7865 ? ARM_INT_REGISTER_SIZE : len),
7866 byte_order, tmp);
7867 len -= ARM_INT_REGISTER_SIZE;
7868 valbuf += ARM_INT_REGISTER_SIZE;
7869 }
7870 }
7871 else
7872 {
7873 /* For a structure or union the behaviour is as if the value had
7874 been stored to word-aligned memory and then loaded into
7875 registers with 32-bit load instruction(s). */
7876 int len = TYPE_LENGTH (type);
7877 int regno = ARM_A1_REGNUM;
7878 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
7879
7880 while (len > 0)
7881 {
7882 regs->cooked_read (regno++, tmpbuf);
7883 memcpy (valbuf, tmpbuf,
7884 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
7885 len -= ARM_INT_REGISTER_SIZE;
7886 valbuf += ARM_INT_REGISTER_SIZE;
7887 }
7888 }
7889 }
7890
7891
7892 /* Will a function return an aggregate type in memory or in a
7893 register? Return 0 if an aggregate type can be returned in a
7894 register, 1 if it must be returned in memory. */
7895
7896 static int
7897 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7898 {
7899 enum type_code code;
7900
7901 type = check_typedef (type);
7902
7903 /* Simple, non-aggregate types (ie not including vectors and
7904 complex) are always returned in a register (or registers). */
7905 code = TYPE_CODE (type);
7906 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7907 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7908 return 0;
7909
7910 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7911 {
7912 /* Vector values should be returned using ARM registers if they
7913 are not over 16 bytes. */
7914 return (TYPE_LENGTH (type) > 16);
7915 }
7916
7917 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7918 {
7919 /* The AAPCS says all aggregates not larger than a word are returned
7920 in a register. */
7921 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
7922 return 0;
7923
7924 return 1;
7925 }
7926 else
7927 {
7928 int nRc;
7929
7930 /* All aggregate types that won't fit in a register must be returned
7931 in memory. */
7932 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
7933 return 1;
7934
7935 /* In the ARM ABI, "integer" like aggregate types are returned in
7936 registers. For an aggregate type to be integer like, its size
7937 must be less than or equal to ARM_INT_REGISTER_SIZE and the
7938 offset of each addressable subfield must be zero. Note that bit
7939 fields are not addressable, and all addressable subfields of
7940 unions always start at offset zero.
7941
7942 This function is based on the behaviour of GCC 2.95.1.
7943 See: gcc/arm.c: arm_return_in_memory() for details.
7944
7945 Note: All versions of GCC before GCC 2.95.2 do not set up the
7946 parameters correctly for a function returning the following
7947 structure: struct { float f;}; This should be returned in memory,
7948 not a register. Richard Earnshaw sent me a patch, but I do not
7949 know of any way to detect if a function like the above has been
7950 compiled with the correct calling convention. */
7951
7952 /* Assume all other aggregate types can be returned in a register.
7953 Run a check for structures, unions and arrays. */
7954 nRc = 0;
7955
7956 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7957 {
7958 int i;
7959 /* Need to check if this struct/union is "integer" like. For
7960 this to be true, its size must be less than or equal to
7961 ARM_INT_REGISTER_SIZE and the offset of each addressable
7962 subfield must be zero. Note that bit fields are not
7963 addressable, and unions always start at offset zero. If any
7964 of the subfields is a floating point type, the struct/union
7965 cannot be an integer type. */
7966
7967 /* For each field in the object, check:
7968 1) Is it FP? --> yes, nRc = 1;
7969 2) Is it addressable (bitpos != 0) and
7970 not packed (bitsize == 0)?
7971 --> yes, nRc = 1
7972 */
7973
7974 for (i = 0; i < TYPE_NFIELDS (type); i++)
7975 {
7976 enum type_code field_type_code;
7977
7978 field_type_code
7979 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7980 i)));
7981
7982 /* Is it a floating point type field? */
7983 if (field_type_code == TYPE_CODE_FLT)
7984 {
7985 nRc = 1;
7986 break;
7987 }
7988
7989 /* If bitpos != 0, then we have to care about it. */
7990 if (TYPE_FIELD_BITPOS (type, i) != 0)
7991 {
7992 /* Bitfields are not addressable. If the field bitsize is
7993 zero, then the field is not packed. Hence it cannot be
7994 a bitfield or any other packed type. */
7995 if (TYPE_FIELD_BITSIZE (type, i) == 0)
7996 {
7997 nRc = 1;
7998 break;
7999 }
8000 }
8001 }
8002 }
8003
8004 return nRc;
8005 }
8006 }
8007
8008 /* Write into appropriate registers a function return value of type
8009 TYPE, given in virtual format. */
8010
8011 static void
8012 arm_store_return_value (struct type *type, struct regcache *regs,
8013 const gdb_byte *valbuf)
8014 {
8015 struct gdbarch *gdbarch = regs->arch ();
8016 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8017
8018 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8019 {
8020 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8021
8022 switch (gdbarch_tdep (gdbarch)->fp_model)
8023 {
8024 case ARM_FLOAT_FPA:
8025
8026 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8027 regs->cooked_write (ARM_F0_REGNUM, buf);
8028 break;
8029
8030 case ARM_FLOAT_SOFT_FPA:
8031 case ARM_FLOAT_SOFT_VFP:
8032 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8033 not using the VFP ABI code. */
8034 case ARM_FLOAT_VFP:
8035 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8036 if (TYPE_LENGTH (type) > 4)
8037 regs->cooked_write (ARM_A1_REGNUM + 1,
8038 valbuf + ARM_INT_REGISTER_SIZE);
8039 break;
8040
8041 default:
8042 internal_error (__FILE__, __LINE__,
8043 _("arm_store_return_value: Floating "
8044 "point model not supported"));
8045 break;
8046 }
8047 }
8048 else if (TYPE_CODE (type) == TYPE_CODE_INT
8049 || TYPE_CODE (type) == TYPE_CODE_CHAR
8050 || TYPE_CODE (type) == TYPE_CODE_BOOL
8051 || TYPE_CODE (type) == TYPE_CODE_PTR
8052 || TYPE_IS_REFERENCE (type)
8053 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8054 {
8055 if (TYPE_LENGTH (type) <= 4)
8056 {
8057 /* Values of one word or less are zero/sign-extended and
8058 returned in r0. */
8059 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8060 LONGEST val = unpack_long (type, valbuf);
8061
8062 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8063 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8064 }
8065 else
8066 {
8067 /* Integral values greater than one word are stored in consecutive
8068 registers starting with r0. This will always be a multiple of
8069 the regiser size. */
8070 int len = TYPE_LENGTH (type);
8071 int regno = ARM_A1_REGNUM;
8072
8073 while (len > 0)
8074 {
8075 regs->cooked_write (regno++, valbuf);
8076 len -= ARM_INT_REGISTER_SIZE;
8077 valbuf += ARM_INT_REGISTER_SIZE;
8078 }
8079 }
8080 }
8081 else
8082 {
8083 /* For a structure or union the behaviour is as if the value had
8084 been stored to word-aligned memory and then loaded into
8085 registers with 32-bit load instruction(s). */
8086 int len = TYPE_LENGTH (type);
8087 int regno = ARM_A1_REGNUM;
8088 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8089
8090 while (len > 0)
8091 {
8092 memcpy (tmpbuf, valbuf,
8093 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8094 regs->cooked_write (regno++, tmpbuf);
8095 len -= ARM_INT_REGISTER_SIZE;
8096 valbuf += ARM_INT_REGISTER_SIZE;
8097 }
8098 }
8099 }
8100
8101
8102 /* Handle function return values. */
8103
8104 static enum return_value_convention
8105 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8106 struct type *valtype, struct regcache *regcache,
8107 gdb_byte *readbuf, const gdb_byte *writebuf)
8108 {
8109 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8110 struct type *func_type = function ? value_type (function) : NULL;
8111 enum arm_vfp_cprc_base_type vfp_base_type;
8112 int vfp_base_count;
8113
8114 if (arm_vfp_abi_for_function (gdbarch, func_type)
8115 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8116 {
8117 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8118 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8119 int i;
8120 for (i = 0; i < vfp_base_count; i++)
8121 {
8122 if (reg_char == 'q')
8123 {
8124 if (writebuf)
8125 arm_neon_quad_write (gdbarch, regcache, i,
8126 writebuf + i * unit_length);
8127
8128 if (readbuf)
8129 arm_neon_quad_read (gdbarch, regcache, i,
8130 readbuf + i * unit_length);
8131 }
8132 else
8133 {
8134 char name_buf[4];
8135 int regnum;
8136
8137 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8138 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8139 strlen (name_buf));
8140 if (writebuf)
8141 regcache->cooked_write (regnum, writebuf + i * unit_length);
8142 if (readbuf)
8143 regcache->cooked_read (regnum, readbuf + i * unit_length);
8144 }
8145 }
8146 return RETURN_VALUE_REGISTER_CONVENTION;
8147 }
8148
8149 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8150 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8151 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8152 {
8153 if (tdep->struct_return == pcc_struct_return
8154 || arm_return_in_memory (gdbarch, valtype))
8155 return RETURN_VALUE_STRUCT_CONVENTION;
8156 }
8157 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8158 {
8159 if (arm_return_in_memory (gdbarch, valtype))
8160 return RETURN_VALUE_STRUCT_CONVENTION;
8161 }
8162
8163 if (writebuf)
8164 arm_store_return_value (valtype, regcache, writebuf);
8165
8166 if (readbuf)
8167 arm_extract_return_value (valtype, regcache, readbuf);
8168
8169 return RETURN_VALUE_REGISTER_CONVENTION;
8170 }
8171
8172
8173 static int
8174 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8175 {
8176 struct gdbarch *gdbarch = get_frame_arch (frame);
8177 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8178 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8179 CORE_ADDR jb_addr;
8180 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8181
8182 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8183
8184 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8185 ARM_INT_REGISTER_SIZE))
8186 return 0;
8187
8188 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8189 return 1;
8190 }
8191
8192 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8193 return the target PC. Otherwise return 0. */
8194
8195 CORE_ADDR
8196 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8197 {
8198 const char *name;
8199 int namelen;
8200 CORE_ADDR start_addr;
8201
8202 /* Find the starting address and name of the function containing the PC. */
8203 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8204 {
8205 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8206 check here. */
8207 start_addr = arm_skip_bx_reg (frame, pc);
8208 if (start_addr != 0)
8209 return start_addr;
8210
8211 return 0;
8212 }
8213
8214 /* If PC is in a Thumb call or return stub, return the address of the
8215 target PC, which is in a register. The thunk functions are called
8216 _call_via_xx, where x is the register name. The possible names
8217 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8218 functions, named __ARM_call_via_r[0-7]. */
8219 if (startswith (name, "_call_via_")
8220 || startswith (name, "__ARM_call_via_"))
8221 {
8222 /* Use the name suffix to determine which register contains the
8223 target PC. */
8224 static const char *table[15] =
8225 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8226 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8227 };
8228 int regno;
8229 int offset = strlen (name) - 2;
8230
8231 for (regno = 0; regno <= 14; regno++)
8232 if (strcmp (&name[offset], table[regno]) == 0)
8233 return get_frame_register_unsigned (frame, regno);
8234 }
8235
8236 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8237 non-interworking calls to foo. We could decode the stubs
8238 to find the target but it's easier to use the symbol table. */
8239 namelen = strlen (name);
8240 if (name[0] == '_' && name[1] == '_'
8241 && ((namelen > 2 + strlen ("_from_thumb")
8242 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8243 || (namelen > 2 + strlen ("_from_arm")
8244 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8245 {
8246 char *target_name;
8247 int target_len = namelen - 2;
8248 struct bound_minimal_symbol minsym;
8249 struct objfile *objfile;
8250 struct obj_section *sec;
8251
8252 if (name[namelen - 1] == 'b')
8253 target_len -= strlen ("_from_thumb");
8254 else
8255 target_len -= strlen ("_from_arm");
8256
8257 target_name = (char *) alloca (target_len + 1);
8258 memcpy (target_name, name + 2, target_len);
8259 target_name[target_len] = '\0';
8260
8261 sec = find_pc_section (pc);
8262 objfile = (sec == NULL) ? NULL : sec->objfile;
8263 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8264 if (minsym.minsym != NULL)
8265 return BMSYMBOL_VALUE_ADDRESS (minsym);
8266 else
8267 return 0;
8268 }
8269
8270 return 0; /* not a stub */
8271 }
8272
8273 static void
8274 set_arm_command (const char *args, int from_tty)
8275 {
8276 printf_unfiltered (_("\
8277 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8278 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8279 }
8280
8281 static void
8282 show_arm_command (const char *args, int from_tty)
8283 {
8284 cmd_show_list (showarmcmdlist, from_tty, "");
8285 }
8286
8287 static void
8288 arm_update_current_architecture (void)
8289 {
8290 struct gdbarch_info info;
8291
8292 /* If the current architecture is not ARM, we have nothing to do. */
8293 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8294 return;
8295
8296 /* Update the architecture. */
8297 gdbarch_info_init (&info);
8298
8299 if (!gdbarch_update_p (info))
8300 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8301 }
8302
8303 static void
8304 set_fp_model_sfunc (const char *args, int from_tty,
8305 struct cmd_list_element *c)
8306 {
8307 int fp_model;
8308
8309 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8310 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8311 {
8312 arm_fp_model = (enum arm_float_model) fp_model;
8313 break;
8314 }
8315
8316 if (fp_model == ARM_FLOAT_LAST)
8317 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8318 current_fp_model);
8319
8320 arm_update_current_architecture ();
8321 }
8322
8323 static void
8324 show_fp_model (struct ui_file *file, int from_tty,
8325 struct cmd_list_element *c, const char *value)
8326 {
8327 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8328
8329 if (arm_fp_model == ARM_FLOAT_AUTO
8330 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8331 fprintf_filtered (file, _("\
8332 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8333 fp_model_strings[tdep->fp_model]);
8334 else
8335 fprintf_filtered (file, _("\
8336 The current ARM floating point model is \"%s\".\n"),
8337 fp_model_strings[arm_fp_model]);
8338 }
8339
8340 static void
8341 arm_set_abi (const char *args, int from_tty,
8342 struct cmd_list_element *c)
8343 {
8344 int arm_abi;
8345
8346 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8347 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8348 {
8349 arm_abi_global = (enum arm_abi_kind) arm_abi;
8350 break;
8351 }
8352
8353 if (arm_abi == ARM_ABI_LAST)
8354 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8355 arm_abi_string);
8356
8357 arm_update_current_architecture ();
8358 }
8359
8360 static void
8361 arm_show_abi (struct ui_file *file, int from_tty,
8362 struct cmd_list_element *c, const char *value)
8363 {
8364 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8365
8366 if (arm_abi_global == ARM_ABI_AUTO
8367 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8368 fprintf_filtered (file, _("\
8369 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8370 arm_abi_strings[tdep->arm_abi]);
8371 else
8372 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8373 arm_abi_string);
8374 }
8375
8376 static void
8377 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8378 struct cmd_list_element *c, const char *value)
8379 {
8380 fprintf_filtered (file,
8381 _("The current execution mode assumed "
8382 "(when symbols are unavailable) is \"%s\".\n"),
8383 arm_fallback_mode_string);
8384 }
8385
8386 static void
8387 arm_show_force_mode (struct ui_file *file, int from_tty,
8388 struct cmd_list_element *c, const char *value)
8389 {
8390 fprintf_filtered (file,
8391 _("The current execution mode assumed "
8392 "(even when symbols are available) is \"%s\".\n"),
8393 arm_force_mode_string);
8394 }
8395
8396 /* If the user changes the register disassembly style used for info
8397 register and other commands, we have to also switch the style used
8398 in opcodes for disassembly output. This function is run in the "set
8399 arm disassembly" command, and does that. */
8400
8401 static void
8402 set_disassembly_style_sfunc (const char *args, int from_tty,
8403 struct cmd_list_element *c)
8404 {
8405 /* Convert the short style name into the long style name (eg, reg-names-*)
8406 before calling the generic set_disassembler_options() function. */
8407 std::string long_name = std::string ("reg-names-") + disassembly_style;
8408 set_disassembler_options (&long_name[0]);
8409 }
8410
8411 static void
8412 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8413 struct cmd_list_element *c, const char *value)
8414 {
8415 struct gdbarch *gdbarch = get_current_arch ();
8416 char *options = get_disassembler_options (gdbarch);
8417 const char *style = "";
8418 int len = 0;
8419 const char *opt;
8420
8421 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8422 if (CONST_STRNEQ (opt, "reg-names-"))
8423 {
8424 style = &opt[strlen ("reg-names-")];
8425 len = strcspn (style, ",");
8426 }
8427
8428 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8429 }
8430 \f
8431 /* Return the ARM register name corresponding to register I. */
8432 static const char *
8433 arm_register_name (struct gdbarch *gdbarch, int i)
8434 {
8435 const int num_regs = gdbarch_num_regs (gdbarch);
8436
8437 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8438 && i >= num_regs && i < num_regs + 32)
8439 {
8440 static const char *const vfp_pseudo_names[] = {
8441 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8442 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8443 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8444 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8445 };
8446
8447 return vfp_pseudo_names[i - num_regs];
8448 }
8449
8450 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8451 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8452 {
8453 static const char *const neon_pseudo_names[] = {
8454 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8455 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8456 };
8457
8458 return neon_pseudo_names[i - num_regs - 32];
8459 }
8460
8461 if (i >= ARRAY_SIZE (arm_register_names))
8462 /* These registers are only supported on targets which supply
8463 an XML description. */
8464 return "";
8465
8466 return arm_register_names[i];
8467 }
8468
8469 /* Test whether the coff symbol specific value corresponds to a Thumb
8470 function. */
8471
8472 static int
8473 coff_sym_is_thumb (int val)
8474 {
8475 return (val == C_THUMBEXT
8476 || val == C_THUMBSTAT
8477 || val == C_THUMBEXTFUNC
8478 || val == C_THUMBSTATFUNC
8479 || val == C_THUMBLABEL);
8480 }
8481
8482 /* arm_coff_make_msymbol_special()
8483 arm_elf_make_msymbol_special()
8484
8485 These functions test whether the COFF or ELF symbol corresponds to
8486 an address in thumb code, and set a "special" bit in a minimal
8487 symbol to indicate that it does. */
8488
8489 static void
8490 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8491 {
8492 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8493
8494 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8495 == ST_BRANCH_TO_THUMB)
8496 MSYMBOL_SET_SPECIAL (msym);
8497 }
8498
8499 static void
8500 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8501 {
8502 if (coff_sym_is_thumb (val))
8503 MSYMBOL_SET_SPECIAL (msym);
8504 }
8505
8506 static void
8507 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8508 asymbol *sym)
8509 {
8510 const char *name = bfd_asymbol_name (sym);
8511 struct arm_per_objfile *data;
8512 struct arm_mapping_symbol new_map_sym;
8513
8514 gdb_assert (name[0] == '$');
8515 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8516 return;
8517
8518 data = arm_objfile_data_key.get (objfile);
8519 if (data == NULL)
8520 data = arm_objfile_data_key.emplace (objfile,
8521 objfile->obfd->section_count);
8522 arm_mapping_symbol_vec &map
8523 = data->section_maps[bfd_get_section (sym)->index];
8524
8525 new_map_sym.value = sym->value;
8526 new_map_sym.type = name[1];
8527
8528 /* Insert at the end, the vector will be sorted on first use. */
8529 map.push_back (new_map_sym);
8530 }
8531
8532 static void
8533 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8534 {
8535 struct gdbarch *gdbarch = regcache->arch ();
8536 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8537
8538 /* If necessary, set the T bit. */
8539 if (arm_apcs_32)
8540 {
8541 ULONGEST val, t_bit;
8542 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8543 t_bit = arm_psr_thumb_bit (gdbarch);
8544 if (arm_pc_is_thumb (gdbarch, pc))
8545 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8546 val | t_bit);
8547 else
8548 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8549 val & ~t_bit);
8550 }
8551 }
8552
8553 /* Read the contents of a NEON quad register, by reading from two
8554 double registers. This is used to implement the quad pseudo
8555 registers, and for argument passing in case the quad registers are
8556 missing; vectors are passed in quad registers when using the VFP
8557 ABI, even if a NEON unit is not present. REGNUM is the index of
8558 the quad register, in [0, 15]. */
8559
8560 static enum register_status
8561 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8562 int regnum, gdb_byte *buf)
8563 {
8564 char name_buf[4];
8565 gdb_byte reg_buf[8];
8566 int offset, double_regnum;
8567 enum register_status status;
8568
8569 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8570 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8571 strlen (name_buf));
8572
8573 /* d0 is always the least significant half of q0. */
8574 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8575 offset = 8;
8576 else
8577 offset = 0;
8578
8579 status = regcache->raw_read (double_regnum, reg_buf);
8580 if (status != REG_VALID)
8581 return status;
8582 memcpy (buf + offset, reg_buf, 8);
8583
8584 offset = 8 - offset;
8585 status = regcache->raw_read (double_regnum + 1, reg_buf);
8586 if (status != REG_VALID)
8587 return status;
8588 memcpy (buf + offset, reg_buf, 8);
8589
8590 return REG_VALID;
8591 }
8592
8593 static enum register_status
8594 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8595 int regnum, gdb_byte *buf)
8596 {
8597 const int num_regs = gdbarch_num_regs (gdbarch);
8598 char name_buf[4];
8599 gdb_byte reg_buf[8];
8600 int offset, double_regnum;
8601
8602 gdb_assert (regnum >= num_regs);
8603 regnum -= num_regs;
8604
8605 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8606 /* Quad-precision register. */
8607 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8608 else
8609 {
8610 enum register_status status;
8611
8612 /* Single-precision register. */
8613 gdb_assert (regnum < 32);
8614
8615 /* s0 is always the least significant half of d0. */
8616 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8617 offset = (regnum & 1) ? 0 : 4;
8618 else
8619 offset = (regnum & 1) ? 4 : 0;
8620
8621 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8622 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8623 strlen (name_buf));
8624
8625 status = regcache->raw_read (double_regnum, reg_buf);
8626 if (status == REG_VALID)
8627 memcpy (buf, reg_buf + offset, 4);
8628 return status;
8629 }
8630 }
8631
8632 /* Store the contents of BUF to a NEON quad register, by writing to
8633 two double registers. This is used to implement the quad pseudo
8634 registers, and for argument passing in case the quad registers are
8635 missing; vectors are passed in quad registers when using the VFP
8636 ABI, even if a NEON unit is not present. REGNUM is the index
8637 of the quad register, in [0, 15]. */
8638
8639 static void
8640 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8641 int regnum, const gdb_byte *buf)
8642 {
8643 char name_buf[4];
8644 int offset, double_regnum;
8645
8646 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8647 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8648 strlen (name_buf));
8649
8650 /* d0 is always the least significant half of q0. */
8651 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8652 offset = 8;
8653 else
8654 offset = 0;
8655
8656 regcache->raw_write (double_regnum, buf + offset);
8657 offset = 8 - offset;
8658 regcache->raw_write (double_regnum + 1, buf + offset);
8659 }
8660
8661 static void
8662 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8663 int regnum, const gdb_byte *buf)
8664 {
8665 const int num_regs = gdbarch_num_regs (gdbarch);
8666 char name_buf[4];
8667 gdb_byte reg_buf[8];
8668 int offset, double_regnum;
8669
8670 gdb_assert (regnum >= num_regs);
8671 regnum -= num_regs;
8672
8673 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8674 /* Quad-precision register. */
8675 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8676 else
8677 {
8678 /* Single-precision register. */
8679 gdb_assert (regnum < 32);
8680
8681 /* s0 is always the least significant half of d0. */
8682 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8683 offset = (regnum & 1) ? 0 : 4;
8684 else
8685 offset = (regnum & 1) ? 4 : 0;
8686
8687 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8688 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8689 strlen (name_buf));
8690
8691 regcache->raw_read (double_regnum, reg_buf);
8692 memcpy (reg_buf + offset, buf, 4);
8693 regcache->raw_write (double_regnum, reg_buf);
8694 }
8695 }
8696
8697 static struct value *
8698 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8699 {
8700 const int *reg_p = (const int *) baton;
8701 return value_of_register (*reg_p, frame);
8702 }
8703 \f
8704 static enum gdb_osabi
8705 arm_elf_osabi_sniffer (bfd *abfd)
8706 {
8707 unsigned int elfosabi;
8708 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8709
8710 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8711
8712 if (elfosabi == ELFOSABI_ARM)
8713 /* GNU tools use this value. Check note sections in this case,
8714 as well. */
8715 bfd_map_over_sections (abfd,
8716 generic_elf_osabi_sniff_abi_tag_sections,
8717 &osabi);
8718
8719 /* Anything else will be handled by the generic ELF sniffer. */
8720 return osabi;
8721 }
8722
8723 static int
8724 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8725 struct reggroup *group)
8726 {
8727 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8728 this, FPS register belongs to save_regroup, restore_reggroup, and
8729 all_reggroup, of course. */
8730 if (regnum == ARM_FPS_REGNUM)
8731 return (group == float_reggroup
8732 || group == save_reggroup
8733 || group == restore_reggroup
8734 || group == all_reggroup);
8735 else
8736 return default_register_reggroup_p (gdbarch, regnum, group);
8737 }
8738
8739 /* For backward-compatibility we allow two 'g' packet lengths with
8740 the remote protocol depending on whether FPA registers are
8741 supplied. M-profile targets do not have FPA registers, but some
8742 stubs already exist in the wild which use a 'g' packet which
8743 supplies them albeit with dummy values. The packet format which
8744 includes FPA registers should be considered deprecated for
8745 M-profile targets. */
8746
8747 static void
8748 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8749 {
8750 if (gdbarch_tdep (gdbarch)->is_m)
8751 {
8752 const target_desc *tdesc;
8753
8754 /* If we know from the executable this is an M-profile target,
8755 cater for remote targets whose register set layout is the
8756 same as the FPA layout. */
8757 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8758 register_remote_g_packet_guess (gdbarch,
8759 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
8760 tdesc);
8761
8762 /* The regular M-profile layout. */
8763 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
8764 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
8765 tdesc);
8766
8767 /* M-profile plus M4F VFP. */
8768 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
8769 register_remote_g_packet_guess (gdbarch,
8770 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
8771 tdesc);
8772 }
8773
8774 /* Otherwise we don't have a useful guess. */
8775 }
8776
8777 /* Implement the code_of_frame_writable gdbarch method. */
8778
8779 static int
8780 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8781 {
8782 if (gdbarch_tdep (gdbarch)->is_m
8783 && get_frame_type (frame) == SIGTRAMP_FRAME)
8784 {
8785 /* M-profile exception frames return to some magic PCs, where
8786 isn't writable at all. */
8787 return 0;
8788 }
8789 else
8790 return 1;
8791 }
8792
8793 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8794 to be postfixed by a version (eg armv7hl). */
8795
8796 static const char *
8797 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8798 {
8799 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8800 return "arm(v[^- ]*)?";
8801 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8802 }
8803
8804 /* Initialize the current architecture based on INFO. If possible,
8805 re-use an architecture from ARCHES, which is a list of
8806 architectures already created during this debugging session.
8807
8808 Called e.g. at program startup, when reading a core file, and when
8809 reading a binary file. */
8810
8811 static struct gdbarch *
8812 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8813 {
8814 struct gdbarch_tdep *tdep;
8815 struct gdbarch *gdbarch;
8816 struct gdbarch_list *best_arch;
8817 enum arm_abi_kind arm_abi = arm_abi_global;
8818 enum arm_float_model fp_model = arm_fp_model;
8819 struct tdesc_arch_data *tdesc_data = NULL;
8820 int i, is_m = 0;
8821 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8822 int have_wmmx_registers = 0;
8823 int have_neon = 0;
8824 int have_fpa_registers = 1;
8825 const struct target_desc *tdesc = info.target_desc;
8826
8827 /* If we have an object to base this architecture on, try to determine
8828 its ABI. */
8829
8830 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8831 {
8832 int ei_osabi, e_flags;
8833
8834 switch (bfd_get_flavour (info.abfd))
8835 {
8836 case bfd_target_coff_flavour:
8837 /* Assume it's an old APCS-style ABI. */
8838 /* XXX WinCE? */
8839 arm_abi = ARM_ABI_APCS;
8840 break;
8841
8842 case bfd_target_elf_flavour:
8843 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8844 e_flags = elf_elfheader (info.abfd)->e_flags;
8845
8846 if (ei_osabi == ELFOSABI_ARM)
8847 {
8848 /* GNU tools used to use this value, but do not for EABI
8849 objects. There's nowhere to tag an EABI version
8850 anyway, so assume APCS. */
8851 arm_abi = ARM_ABI_APCS;
8852 }
8853 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8854 {
8855 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8856
8857 switch (eabi_ver)
8858 {
8859 case EF_ARM_EABI_UNKNOWN:
8860 /* Assume GNU tools. */
8861 arm_abi = ARM_ABI_APCS;
8862 break;
8863
8864 case EF_ARM_EABI_VER4:
8865 case EF_ARM_EABI_VER5:
8866 arm_abi = ARM_ABI_AAPCS;
8867 /* EABI binaries default to VFP float ordering.
8868 They may also contain build attributes that can
8869 be used to identify if the VFP argument-passing
8870 ABI is in use. */
8871 if (fp_model == ARM_FLOAT_AUTO)
8872 {
8873 #ifdef HAVE_ELF
8874 switch (bfd_elf_get_obj_attr_int (info.abfd,
8875 OBJ_ATTR_PROC,
8876 Tag_ABI_VFP_args))
8877 {
8878 case AEABI_VFP_args_base:
8879 /* "The user intended FP parameter/result
8880 passing to conform to AAPCS, base
8881 variant". */
8882 fp_model = ARM_FLOAT_SOFT_VFP;
8883 break;
8884 case AEABI_VFP_args_vfp:
8885 /* "The user intended FP parameter/result
8886 passing to conform to AAPCS, VFP
8887 variant". */
8888 fp_model = ARM_FLOAT_VFP;
8889 break;
8890 case AEABI_VFP_args_toolchain:
8891 /* "The user intended FP parameter/result
8892 passing to conform to tool chain-specific
8893 conventions" - we don't know any such
8894 conventions, so leave it as "auto". */
8895 break;
8896 case AEABI_VFP_args_compatible:
8897 /* "Code is compatible with both the base
8898 and VFP variants; the user did not permit
8899 non-variadic functions to pass FP
8900 parameters/results" - leave it as
8901 "auto". */
8902 break;
8903 default:
8904 /* Attribute value not mentioned in the
8905 November 2012 ABI, so leave it as
8906 "auto". */
8907 break;
8908 }
8909 #else
8910 fp_model = ARM_FLOAT_SOFT_VFP;
8911 #endif
8912 }
8913 break;
8914
8915 default:
8916 /* Leave it as "auto". */
8917 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8918 break;
8919 }
8920
8921 #ifdef HAVE_ELF
8922 /* Detect M-profile programs. This only works if the
8923 executable file includes build attributes; GCC does
8924 copy them to the executable, but e.g. RealView does
8925 not. */
8926 int attr_arch
8927 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8928 Tag_CPU_arch);
8929 int attr_profile
8930 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8931 Tag_CPU_arch_profile);
8932
8933 /* GCC specifies the profile for v6-M; RealView only
8934 specifies the profile for architectures starting with
8935 V7 (as opposed to architectures with a tag
8936 numerically greater than TAG_CPU_ARCH_V7). */
8937 if (!tdesc_has_registers (tdesc)
8938 && (attr_arch == TAG_CPU_ARCH_V6_M
8939 || attr_arch == TAG_CPU_ARCH_V6S_M
8940 || attr_profile == 'M'))
8941 is_m = 1;
8942 #endif
8943 }
8944
8945 if (fp_model == ARM_FLOAT_AUTO)
8946 {
8947 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
8948 {
8949 case 0:
8950 /* Leave it as "auto". Strictly speaking this case
8951 means FPA, but almost nobody uses that now, and
8952 many toolchains fail to set the appropriate bits
8953 for the floating-point model they use. */
8954 break;
8955 case EF_ARM_SOFT_FLOAT:
8956 fp_model = ARM_FLOAT_SOFT_FPA;
8957 break;
8958 case EF_ARM_VFP_FLOAT:
8959 fp_model = ARM_FLOAT_VFP;
8960 break;
8961 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
8962 fp_model = ARM_FLOAT_SOFT_VFP;
8963 break;
8964 }
8965 }
8966
8967 if (e_flags & EF_ARM_BE8)
8968 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
8969
8970 break;
8971
8972 default:
8973 /* Leave it as "auto". */
8974 break;
8975 }
8976 }
8977
8978 /* Check any target description for validity. */
8979 if (tdesc_has_registers (tdesc))
8980 {
8981 /* For most registers we require GDB's default names; but also allow
8982 the numeric names for sp / lr / pc, as a convenience. */
8983 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
8984 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
8985 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
8986
8987 const struct tdesc_feature *feature;
8988 int valid_p;
8989
8990 feature = tdesc_find_feature (tdesc,
8991 "org.gnu.gdb.arm.core");
8992 if (feature == NULL)
8993 {
8994 feature = tdesc_find_feature (tdesc,
8995 "org.gnu.gdb.arm.m-profile");
8996 if (feature == NULL)
8997 return NULL;
8998 else
8999 is_m = 1;
9000 }
9001
9002 tdesc_data = tdesc_data_alloc ();
9003
9004 valid_p = 1;
9005 for (i = 0; i < ARM_SP_REGNUM; i++)
9006 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9007 arm_register_names[i]);
9008 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9009 ARM_SP_REGNUM,
9010 arm_sp_names);
9011 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9012 ARM_LR_REGNUM,
9013 arm_lr_names);
9014 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9015 ARM_PC_REGNUM,
9016 arm_pc_names);
9017 if (is_m)
9018 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9019 ARM_PS_REGNUM, "xpsr");
9020 else
9021 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9022 ARM_PS_REGNUM, "cpsr");
9023
9024 if (!valid_p)
9025 {
9026 tdesc_data_cleanup (tdesc_data);
9027 return NULL;
9028 }
9029
9030 feature = tdesc_find_feature (tdesc,
9031 "org.gnu.gdb.arm.fpa");
9032 if (feature != NULL)
9033 {
9034 valid_p = 1;
9035 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9036 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9037 arm_register_names[i]);
9038 if (!valid_p)
9039 {
9040 tdesc_data_cleanup (tdesc_data);
9041 return NULL;
9042 }
9043 }
9044 else
9045 have_fpa_registers = 0;
9046
9047 feature = tdesc_find_feature (tdesc,
9048 "org.gnu.gdb.xscale.iwmmxt");
9049 if (feature != NULL)
9050 {
9051 static const char *const iwmmxt_names[] = {
9052 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9053 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9054 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9055 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9056 };
9057
9058 valid_p = 1;
9059 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9060 valid_p
9061 &= tdesc_numbered_register (feature, tdesc_data, i,
9062 iwmmxt_names[i - ARM_WR0_REGNUM]);
9063
9064 /* Check for the control registers, but do not fail if they
9065 are missing. */
9066 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9067 tdesc_numbered_register (feature, tdesc_data, i,
9068 iwmmxt_names[i - ARM_WR0_REGNUM]);
9069
9070 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9071 valid_p
9072 &= tdesc_numbered_register (feature, tdesc_data, i,
9073 iwmmxt_names[i - ARM_WR0_REGNUM]);
9074
9075 if (!valid_p)
9076 {
9077 tdesc_data_cleanup (tdesc_data);
9078 return NULL;
9079 }
9080
9081 have_wmmx_registers = 1;
9082 }
9083
9084 /* If we have a VFP unit, check whether the single precision registers
9085 are present. If not, then we will synthesize them as pseudo
9086 registers. */
9087 feature = tdesc_find_feature (tdesc,
9088 "org.gnu.gdb.arm.vfp");
9089 if (feature != NULL)
9090 {
9091 static const char *const vfp_double_names[] = {
9092 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9093 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9094 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9095 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9096 };
9097
9098 /* Require the double precision registers. There must be either
9099 16 or 32. */
9100 valid_p = 1;
9101 for (i = 0; i < 32; i++)
9102 {
9103 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9104 ARM_D0_REGNUM + i,
9105 vfp_double_names[i]);
9106 if (!valid_p)
9107 break;
9108 }
9109 if (!valid_p && i == 16)
9110 valid_p = 1;
9111
9112 /* Also require FPSCR. */
9113 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9114 ARM_FPSCR_REGNUM, "fpscr");
9115 if (!valid_p)
9116 {
9117 tdesc_data_cleanup (tdesc_data);
9118 return NULL;
9119 }
9120
9121 if (tdesc_unnumbered_register (feature, "s0") == 0)
9122 have_vfp_pseudos = 1;
9123
9124 vfp_register_count = i;
9125
9126 /* If we have VFP, also check for NEON. The architecture allows
9127 NEON without VFP (integer vector operations only), but GDB
9128 does not support that. */
9129 feature = tdesc_find_feature (tdesc,
9130 "org.gnu.gdb.arm.neon");
9131 if (feature != NULL)
9132 {
9133 /* NEON requires 32 double-precision registers. */
9134 if (i != 32)
9135 {
9136 tdesc_data_cleanup (tdesc_data);
9137 return NULL;
9138 }
9139
9140 /* If there are quad registers defined by the stub, use
9141 their type; otherwise (normally) provide them with
9142 the default type. */
9143 if (tdesc_unnumbered_register (feature, "q0") == 0)
9144 have_neon_pseudos = 1;
9145
9146 have_neon = 1;
9147 }
9148 }
9149 }
9150
9151 /* If there is already a candidate, use it. */
9152 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9153 best_arch != NULL;
9154 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9155 {
9156 if (arm_abi != ARM_ABI_AUTO
9157 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9158 continue;
9159
9160 if (fp_model != ARM_FLOAT_AUTO
9161 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9162 continue;
9163
9164 /* There are various other properties in tdep that we do not
9165 need to check here: those derived from a target description,
9166 since gdbarches with a different target description are
9167 automatically disqualified. */
9168
9169 /* Do check is_m, though, since it might come from the binary. */
9170 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9171 continue;
9172
9173 /* Found a match. */
9174 break;
9175 }
9176
9177 if (best_arch != NULL)
9178 {
9179 if (tdesc_data != NULL)
9180 tdesc_data_cleanup (tdesc_data);
9181 return best_arch->gdbarch;
9182 }
9183
9184 tdep = XCNEW (struct gdbarch_tdep);
9185 gdbarch = gdbarch_alloc (&info, tdep);
9186
9187 /* Record additional information about the architecture we are defining.
9188 These are gdbarch discriminators, like the OSABI. */
9189 tdep->arm_abi = arm_abi;
9190 tdep->fp_model = fp_model;
9191 tdep->is_m = is_m;
9192 tdep->have_fpa_registers = have_fpa_registers;
9193 tdep->have_wmmx_registers = have_wmmx_registers;
9194 gdb_assert (vfp_register_count == 0
9195 || vfp_register_count == 16
9196 || vfp_register_count == 32);
9197 tdep->vfp_register_count = vfp_register_count;
9198 tdep->have_vfp_pseudos = have_vfp_pseudos;
9199 tdep->have_neon_pseudos = have_neon_pseudos;
9200 tdep->have_neon = have_neon;
9201
9202 arm_register_g_packet_guesses (gdbarch);
9203
9204 /* Breakpoints. */
9205 switch (info.byte_order_for_code)
9206 {
9207 case BFD_ENDIAN_BIG:
9208 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9209 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9210 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9211 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9212
9213 break;
9214
9215 case BFD_ENDIAN_LITTLE:
9216 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9217 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9218 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9219 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9220
9221 break;
9222
9223 default:
9224 internal_error (__FILE__, __LINE__,
9225 _("arm_gdbarch_init: bad byte order for float format"));
9226 }
9227
9228 /* On ARM targets char defaults to unsigned. */
9229 set_gdbarch_char_signed (gdbarch, 0);
9230
9231 /* wchar_t is unsigned under the AAPCS. */
9232 if (tdep->arm_abi == ARM_ABI_AAPCS)
9233 set_gdbarch_wchar_signed (gdbarch, 0);
9234 else
9235 set_gdbarch_wchar_signed (gdbarch, 1);
9236
9237 /* Compute type alignment. */
9238 set_gdbarch_type_align (gdbarch, arm_type_align);
9239
9240 /* Note: for displaced stepping, this includes the breakpoint, and one word
9241 of additional scratch space. This setting isn't used for anything beside
9242 displaced stepping at present. */
9243 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9244
9245 /* This should be low enough for everything. */
9246 tdep->lowest_pc = 0x20;
9247 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9248
9249 /* The default, for both APCS and AAPCS, is to return small
9250 structures in registers. */
9251 tdep->struct_return = reg_struct_return;
9252
9253 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9254 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9255
9256 if (is_m)
9257 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9258
9259 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9260
9261 frame_base_set_default (gdbarch, &arm_normal_base);
9262
9263 /* Address manipulation. */
9264 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9265
9266 /* Advance PC across function entry code. */
9267 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9268
9269 /* Detect whether PC is at a point where the stack has been destroyed. */
9270 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9271
9272 /* Skip trampolines. */
9273 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9274
9275 /* The stack grows downward. */
9276 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9277
9278 /* Breakpoint manipulation. */
9279 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9280 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9281 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9282 arm_breakpoint_kind_from_current_state);
9283
9284 /* Information about registers, etc. */
9285 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9286 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9287 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9288 set_gdbarch_register_type (gdbarch, arm_register_type);
9289 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9290
9291 /* This "info float" is FPA-specific. Use the generic version if we
9292 do not have FPA. */
9293 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9294 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9295
9296 /* Internal <-> external register number maps. */
9297 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9298 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9299
9300 set_gdbarch_register_name (gdbarch, arm_register_name);
9301
9302 /* Returning results. */
9303 set_gdbarch_return_value (gdbarch, arm_return_value);
9304
9305 /* Disassembly. */
9306 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9307
9308 /* Minsymbol frobbing. */
9309 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9310 set_gdbarch_coff_make_msymbol_special (gdbarch,
9311 arm_coff_make_msymbol_special);
9312 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9313
9314 /* Thumb-2 IT block support. */
9315 set_gdbarch_adjust_breakpoint_address (gdbarch,
9316 arm_adjust_breakpoint_address);
9317
9318 /* Virtual tables. */
9319 set_gdbarch_vbit_in_delta (gdbarch, 1);
9320
9321 /* Hook in the ABI-specific overrides, if they have been registered. */
9322 gdbarch_init_osabi (info, gdbarch);
9323
9324 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9325
9326 /* Add some default predicates. */
9327 if (is_m)
9328 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9329 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9330 dwarf2_append_unwinders (gdbarch);
9331 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9332 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9333 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9334
9335 /* Now we have tuned the configuration, set a few final things,
9336 based on what the OS ABI has told us. */
9337
9338 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9339 binaries are always marked. */
9340 if (tdep->arm_abi == ARM_ABI_AUTO)
9341 tdep->arm_abi = ARM_ABI_APCS;
9342
9343 /* Watchpoints are not steppable. */
9344 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9345
9346 /* We used to default to FPA for generic ARM, but almost nobody
9347 uses that now, and we now provide a way for the user to force
9348 the model. So default to the most useful variant. */
9349 if (tdep->fp_model == ARM_FLOAT_AUTO)
9350 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9351
9352 if (tdep->jb_pc >= 0)
9353 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9354
9355 /* Floating point sizes and format. */
9356 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9357 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9358 {
9359 set_gdbarch_double_format
9360 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9361 set_gdbarch_long_double_format
9362 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9363 }
9364 else
9365 {
9366 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9367 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9368 }
9369
9370 if (have_vfp_pseudos)
9371 {
9372 /* NOTE: These are the only pseudo registers used by
9373 the ARM target at the moment. If more are added, a
9374 little more care in numbering will be needed. */
9375
9376 int num_pseudos = 32;
9377 if (have_neon_pseudos)
9378 num_pseudos += 16;
9379 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9380 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9381 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9382 }
9383
9384 if (tdesc_data)
9385 {
9386 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9387
9388 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9389
9390 /* Override tdesc_register_type to adjust the types of VFP
9391 registers for NEON. */
9392 set_gdbarch_register_type (gdbarch, arm_register_type);
9393 }
9394
9395 /* Add standard register aliases. We add aliases even for those
9396 nanes which are used by the current architecture - it's simpler,
9397 and does no harm, since nothing ever lists user registers. */
9398 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9399 user_reg_add (gdbarch, arm_register_aliases[i].name,
9400 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9401
9402 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9403 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9404
9405 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9406
9407 return gdbarch;
9408 }
9409
9410 static void
9411 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9412 {
9413 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9414
9415 if (tdep == NULL)
9416 return;
9417
9418 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9419 (unsigned long) tdep->lowest_pc);
9420 }
9421
9422 #if GDB_SELF_TEST
9423 namespace selftests
9424 {
9425 static void arm_record_test (void);
9426 }
9427 #endif
9428
9429 void
9430 _initialize_arm_tdep (void)
9431 {
9432 long length;
9433 int i, j;
9434 char regdesc[1024], *rdptr = regdesc;
9435 size_t rest = sizeof (regdesc);
9436
9437 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9438
9439 /* Add ourselves to objfile event chain. */
9440 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9441
9442 /* Register an ELF OS ABI sniffer for ARM binaries. */
9443 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9444 bfd_target_elf_flavour,
9445 arm_elf_osabi_sniffer);
9446
9447 /* Add root prefix command for all "set arm"/"show arm" commands. */
9448 add_prefix_cmd ("arm", no_class, set_arm_command,
9449 _("Various ARM-specific commands."),
9450 &setarmcmdlist, "set arm ", 0, &setlist);
9451
9452 add_prefix_cmd ("arm", no_class, show_arm_command,
9453 _("Various ARM-specific commands."),
9454 &showarmcmdlist, "show arm ", 0, &showlist);
9455
9456
9457 arm_disassembler_options = xstrdup ("reg-names-std");
9458 const disasm_options_t *disasm_options
9459 = &disassembler_options_arm ()->options;
9460 int num_disassembly_styles = 0;
9461 for (i = 0; disasm_options->name[i] != NULL; i++)
9462 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9463 num_disassembly_styles++;
9464
9465 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9466 valid_disassembly_styles = XNEWVEC (const char *,
9467 num_disassembly_styles + 1);
9468 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9469 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9470 {
9471 size_t offset = strlen ("reg-names-");
9472 const char *style = disasm_options->name[i];
9473 valid_disassembly_styles[j++] = &style[offset];
9474 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9475 disasm_options->description[i]);
9476 rdptr += length;
9477 rest -= length;
9478 }
9479 /* Mark the end of valid options. */
9480 valid_disassembly_styles[num_disassembly_styles] = NULL;
9481
9482 /* Create the help text. */
9483 std::string helptext = string_printf ("%s%s%s",
9484 _("The valid values are:\n"),
9485 regdesc,
9486 _("The default is \"std\"."));
9487
9488 add_setshow_enum_cmd("disassembler", no_class,
9489 valid_disassembly_styles, &disassembly_style,
9490 _("Set the disassembly style."),
9491 _("Show the disassembly style."),
9492 helptext.c_str (),
9493 set_disassembly_style_sfunc,
9494 show_disassembly_style_sfunc,
9495 &setarmcmdlist, &showarmcmdlist);
9496
9497 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9498 _("Set usage of ARM 32-bit mode."),
9499 _("Show usage of ARM 32-bit mode."),
9500 _("When off, a 26-bit PC will be used."),
9501 NULL,
9502 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9503 mode is %s. */
9504 &setarmcmdlist, &showarmcmdlist);
9505
9506 /* Add a command to allow the user to force the FPU model. */
9507 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9508 _("Set the floating point type."),
9509 _("Show the floating point type."),
9510 _("auto - Determine the FP typefrom the OS-ABI.\n\
9511 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9512 fpa - FPA co-processor (GCC compiled).\n\
9513 softvfp - Software FP with pure-endian doubles.\n\
9514 vfp - VFP co-processor."),
9515 set_fp_model_sfunc, show_fp_model,
9516 &setarmcmdlist, &showarmcmdlist);
9517
9518 /* Add a command to allow the user to force the ABI. */
9519 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9520 _("Set the ABI."),
9521 _("Show the ABI."),
9522 NULL, arm_set_abi, arm_show_abi,
9523 &setarmcmdlist, &showarmcmdlist);
9524
9525 /* Add two commands to allow the user to force the assumed
9526 execution mode. */
9527 add_setshow_enum_cmd ("fallback-mode", class_support,
9528 arm_mode_strings, &arm_fallback_mode_string,
9529 _("Set the mode assumed when symbols are unavailable."),
9530 _("Show the mode assumed when symbols are unavailable."),
9531 NULL, NULL, arm_show_fallback_mode,
9532 &setarmcmdlist, &showarmcmdlist);
9533 add_setshow_enum_cmd ("force-mode", class_support,
9534 arm_mode_strings, &arm_force_mode_string,
9535 _("Set the mode assumed even when symbols are available."),
9536 _("Show the mode assumed even when symbols are available."),
9537 NULL, NULL, arm_show_force_mode,
9538 &setarmcmdlist, &showarmcmdlist);
9539
9540 /* Debugging flag. */
9541 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9542 _("Set ARM debugging."),
9543 _("Show ARM debugging."),
9544 _("When on, arm-specific debugging is enabled."),
9545 NULL,
9546 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9547 &setdebuglist, &showdebuglist);
9548
9549 #if GDB_SELF_TEST
9550 selftests::register_test ("arm-record", selftests::arm_record_test);
9551
9552 const target_desc *tdesc;
9553
9554 tdesc = arm_read_description (ARM_FP_TYPE_VFPV2);
9555 selftests::record_xml_tdesc ("arm/arm-with-vfpv2.xml", tdesc);
9556 tdesc = arm_read_description (ARM_FP_TYPE_VFPV3);
9557 selftests::record_xml_tdesc ("arm/arm-with-vfpv3.xml",tdesc);
9558 tdesc = arm_read_description (ARM_FP_TYPE_IWMMXT);
9559 selftests::record_xml_tdesc ("arm/arm-with-iwmmxt.xml", tdesc);
9560 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
9561 selftests::record_xml_tdesc ("arm/arm-with-m.xml", tdesc);
9562 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
9563 selftests::record_xml_tdesc ("arm/arm-with-m-vfp-d16.xml", tdesc);
9564 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
9565 selftests::record_xml_tdesc ("arm/arm-with-m-fpa-layout.xml", tdesc);
9566 tdesc = aarch32_read_description ();
9567 selftests::record_xml_tdesc ("arm/arm-with-neon.xml", tdesc);
9568 #endif
9569
9570 }
9571
9572 /* ARM-reversible process record data structures. */
9573
9574 #define ARM_INSN_SIZE_BYTES 4
9575 #define THUMB_INSN_SIZE_BYTES 2
9576 #define THUMB2_INSN_SIZE_BYTES 4
9577
9578
9579 /* Position of the bit within a 32-bit ARM instruction
9580 that defines whether the instruction is a load or store. */
9581 #define INSN_S_L_BIT_NUM 20
9582
9583 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9584 do \
9585 { \
9586 unsigned int reg_len = LENGTH; \
9587 if (reg_len) \
9588 { \
9589 REGS = XNEWVEC (uint32_t, reg_len); \
9590 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9591 } \
9592 } \
9593 while (0)
9594
9595 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9596 do \
9597 { \
9598 unsigned int mem_len = LENGTH; \
9599 if (mem_len) \
9600 { \
9601 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9602 memcpy(&MEMS->len, &RECORD_BUF[0], \
9603 sizeof(struct arm_mem_r) * LENGTH); \
9604 } \
9605 } \
9606 while (0)
9607
9608 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9609 #define INSN_RECORDED(ARM_RECORD) \
9610 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9611
9612 /* ARM memory record structure. */
9613 struct arm_mem_r
9614 {
9615 uint32_t len; /* Record length. */
9616 uint32_t addr; /* Memory address. */
9617 };
9618
9619 /* ARM instruction record contains opcode of current insn
9620 and execution state (before entry to decode_insn()),
9621 contains list of to-be-modified registers and
9622 memory blocks (on return from decode_insn()). */
9623
9624 typedef struct insn_decode_record_t
9625 {
9626 struct gdbarch *gdbarch;
9627 struct regcache *regcache;
9628 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9629 uint32_t arm_insn; /* Should accommodate thumb. */
9630 uint32_t cond; /* Condition code. */
9631 uint32_t opcode; /* Insn opcode. */
9632 uint32_t decode; /* Insn decode bits. */
9633 uint32_t mem_rec_count; /* No of mem records. */
9634 uint32_t reg_rec_count; /* No of reg records. */
9635 uint32_t *arm_regs; /* Registers to be saved for this record. */
9636 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9637 } insn_decode_record;
9638
9639
9640 /* Checks ARM SBZ and SBO mandatory fields. */
9641
9642 static int
9643 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9644 {
9645 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9646
9647 if (!len)
9648 return 1;
9649
9650 if (!sbo)
9651 ones = ~ones;
9652
9653 while (ones)
9654 {
9655 if (!(ones & sbo))
9656 {
9657 return 0;
9658 }
9659 ones = ones >> 1;
9660 }
9661 return 1;
9662 }
9663
9664 enum arm_record_result
9665 {
9666 ARM_RECORD_SUCCESS = 0,
9667 ARM_RECORD_FAILURE = 1
9668 };
9669
9670 typedef enum
9671 {
9672 ARM_RECORD_STRH=1,
9673 ARM_RECORD_STRD
9674 } arm_record_strx_t;
9675
9676 typedef enum
9677 {
9678 ARM_RECORD=1,
9679 THUMB_RECORD,
9680 THUMB2_RECORD
9681 } record_type_t;
9682
9683
9684 static int
9685 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9686 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9687 {
9688
9689 struct regcache *reg_cache = arm_insn_r->regcache;
9690 ULONGEST u_regval[2]= {0};
9691
9692 uint32_t reg_src1 = 0, reg_src2 = 0;
9693 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9694
9695 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9696 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9697
9698 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9699 {
9700 /* 1) Handle misc store, immediate offset. */
9701 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9702 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9703 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9704 regcache_raw_read_unsigned (reg_cache, reg_src1,
9705 &u_regval[0]);
9706 if (ARM_PC_REGNUM == reg_src1)
9707 {
9708 /* If R15 was used as Rn, hence current PC+8. */
9709 u_regval[0] = u_regval[0] + 8;
9710 }
9711 offset_8 = (immed_high << 4) | immed_low;
9712 /* Calculate target store address. */
9713 if (14 == arm_insn_r->opcode)
9714 {
9715 tgt_mem_addr = u_regval[0] + offset_8;
9716 }
9717 else
9718 {
9719 tgt_mem_addr = u_regval[0] - offset_8;
9720 }
9721 if (ARM_RECORD_STRH == str_type)
9722 {
9723 record_buf_mem[0] = 2;
9724 record_buf_mem[1] = tgt_mem_addr;
9725 arm_insn_r->mem_rec_count = 1;
9726 }
9727 else if (ARM_RECORD_STRD == str_type)
9728 {
9729 record_buf_mem[0] = 4;
9730 record_buf_mem[1] = tgt_mem_addr;
9731 record_buf_mem[2] = 4;
9732 record_buf_mem[3] = tgt_mem_addr + 4;
9733 arm_insn_r->mem_rec_count = 2;
9734 }
9735 }
9736 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9737 {
9738 /* 2) Store, register offset. */
9739 /* Get Rm. */
9740 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9741 /* Get Rn. */
9742 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9743 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9744 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9745 if (15 == reg_src2)
9746 {
9747 /* If R15 was used as Rn, hence current PC+8. */
9748 u_regval[0] = u_regval[0] + 8;
9749 }
9750 /* Calculate target store address, Rn +/- Rm, register offset. */
9751 if (12 == arm_insn_r->opcode)
9752 {
9753 tgt_mem_addr = u_regval[0] + u_regval[1];
9754 }
9755 else
9756 {
9757 tgt_mem_addr = u_regval[1] - u_regval[0];
9758 }
9759 if (ARM_RECORD_STRH == str_type)
9760 {
9761 record_buf_mem[0] = 2;
9762 record_buf_mem[1] = tgt_mem_addr;
9763 arm_insn_r->mem_rec_count = 1;
9764 }
9765 else if (ARM_RECORD_STRD == str_type)
9766 {
9767 record_buf_mem[0] = 4;
9768 record_buf_mem[1] = tgt_mem_addr;
9769 record_buf_mem[2] = 4;
9770 record_buf_mem[3] = tgt_mem_addr + 4;
9771 arm_insn_r->mem_rec_count = 2;
9772 }
9773 }
9774 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9775 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9776 {
9777 /* 3) Store, immediate pre-indexed. */
9778 /* 5) Store, immediate post-indexed. */
9779 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9780 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9781 offset_8 = (immed_high << 4) | immed_low;
9782 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9783 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9784 /* Calculate target store address, Rn +/- Rm, register offset. */
9785 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9786 {
9787 tgt_mem_addr = u_regval[0] + offset_8;
9788 }
9789 else
9790 {
9791 tgt_mem_addr = u_regval[0] - offset_8;
9792 }
9793 if (ARM_RECORD_STRH == str_type)
9794 {
9795 record_buf_mem[0] = 2;
9796 record_buf_mem[1] = tgt_mem_addr;
9797 arm_insn_r->mem_rec_count = 1;
9798 }
9799 else if (ARM_RECORD_STRD == str_type)
9800 {
9801 record_buf_mem[0] = 4;
9802 record_buf_mem[1] = tgt_mem_addr;
9803 record_buf_mem[2] = 4;
9804 record_buf_mem[3] = tgt_mem_addr + 4;
9805 arm_insn_r->mem_rec_count = 2;
9806 }
9807 /* Record Rn also as it changes. */
9808 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9809 arm_insn_r->reg_rec_count = 1;
9810 }
9811 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9812 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9813 {
9814 /* 4) Store, register pre-indexed. */
9815 /* 6) Store, register post -indexed. */
9816 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9817 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9818 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9819 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9820 /* Calculate target store address, Rn +/- Rm, register offset. */
9821 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9822 {
9823 tgt_mem_addr = u_regval[0] + u_regval[1];
9824 }
9825 else
9826 {
9827 tgt_mem_addr = u_regval[1] - u_regval[0];
9828 }
9829 if (ARM_RECORD_STRH == str_type)
9830 {
9831 record_buf_mem[0] = 2;
9832 record_buf_mem[1] = tgt_mem_addr;
9833 arm_insn_r->mem_rec_count = 1;
9834 }
9835 else if (ARM_RECORD_STRD == str_type)
9836 {
9837 record_buf_mem[0] = 4;
9838 record_buf_mem[1] = tgt_mem_addr;
9839 record_buf_mem[2] = 4;
9840 record_buf_mem[3] = tgt_mem_addr + 4;
9841 arm_insn_r->mem_rec_count = 2;
9842 }
9843 /* Record Rn also as it changes. */
9844 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9845 arm_insn_r->reg_rec_count = 1;
9846 }
9847 return 0;
9848 }
9849
9850 /* Handling ARM extension space insns. */
9851
9852 static int
9853 arm_record_extension_space (insn_decode_record *arm_insn_r)
9854 {
9855 int ret = 0; /* Return value: -1:record failure ; 0:success */
9856 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9857 uint32_t record_buf[8], record_buf_mem[8];
9858 uint32_t reg_src1 = 0;
9859 struct regcache *reg_cache = arm_insn_r->regcache;
9860 ULONGEST u_regval = 0;
9861
9862 gdb_assert (!INSN_RECORDED(arm_insn_r));
9863 /* Handle unconditional insn extension space. */
9864
9865 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9866 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9867 if (arm_insn_r->cond)
9868 {
9869 /* PLD has no affect on architectural state, it just affects
9870 the caches. */
9871 if (5 == ((opcode1 & 0xE0) >> 5))
9872 {
9873 /* BLX(1) */
9874 record_buf[0] = ARM_PS_REGNUM;
9875 record_buf[1] = ARM_LR_REGNUM;
9876 arm_insn_r->reg_rec_count = 2;
9877 }
9878 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9879 }
9880
9881
9882 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9883 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9884 {
9885 ret = -1;
9886 /* Undefined instruction on ARM V5; need to handle if later
9887 versions define it. */
9888 }
9889
9890 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9891 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9892 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9893
9894 /* Handle arithmetic insn extension space. */
9895 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9896 && !INSN_RECORDED(arm_insn_r))
9897 {
9898 /* Handle MLA(S) and MUL(S). */
9899 if (in_inclusive_range (insn_op1, 0U, 3U))
9900 {
9901 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9902 record_buf[1] = ARM_PS_REGNUM;
9903 arm_insn_r->reg_rec_count = 2;
9904 }
9905 else if (in_inclusive_range (insn_op1, 4U, 15U))
9906 {
9907 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9908 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9909 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9910 record_buf[2] = ARM_PS_REGNUM;
9911 arm_insn_r->reg_rec_count = 3;
9912 }
9913 }
9914
9915 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9916 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9917 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9918
9919 /* Handle control insn extension space. */
9920
9921 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9922 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9923 {
9924 if (!bit (arm_insn_r->arm_insn,25))
9925 {
9926 if (!bits (arm_insn_r->arm_insn, 4, 7))
9927 {
9928 if ((0 == insn_op1) || (2 == insn_op1))
9929 {
9930 /* MRS. */
9931 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9932 arm_insn_r->reg_rec_count = 1;
9933 }
9934 else if (1 == insn_op1)
9935 {
9936 /* CSPR is going to be changed. */
9937 record_buf[0] = ARM_PS_REGNUM;
9938 arm_insn_r->reg_rec_count = 1;
9939 }
9940 else if (3 == insn_op1)
9941 {
9942 /* SPSR is going to be changed. */
9943 /* We need to get SPSR value, which is yet to be done. */
9944 return -1;
9945 }
9946 }
9947 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9948 {
9949 if (1 == insn_op1)
9950 {
9951 /* BX. */
9952 record_buf[0] = ARM_PS_REGNUM;
9953 arm_insn_r->reg_rec_count = 1;
9954 }
9955 else if (3 == insn_op1)
9956 {
9957 /* CLZ. */
9958 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9959 arm_insn_r->reg_rec_count = 1;
9960 }
9961 }
9962 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
9963 {
9964 /* BLX. */
9965 record_buf[0] = ARM_PS_REGNUM;
9966 record_buf[1] = ARM_LR_REGNUM;
9967 arm_insn_r->reg_rec_count = 2;
9968 }
9969 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
9970 {
9971 /* QADD, QSUB, QDADD, QDSUB */
9972 record_buf[0] = ARM_PS_REGNUM;
9973 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9974 arm_insn_r->reg_rec_count = 2;
9975 }
9976 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
9977 {
9978 /* BKPT. */
9979 record_buf[0] = ARM_PS_REGNUM;
9980 record_buf[1] = ARM_LR_REGNUM;
9981 arm_insn_r->reg_rec_count = 2;
9982
9983 /* Save SPSR also;how? */
9984 return -1;
9985 }
9986 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
9987 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
9988 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
9989 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
9990 )
9991 {
9992 if (0 == insn_op1 || 1 == insn_op1)
9993 {
9994 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
9995 /* We dont do optimization for SMULW<y> where we
9996 need only Rd. */
9997 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9998 record_buf[1] = ARM_PS_REGNUM;
9999 arm_insn_r->reg_rec_count = 2;
10000 }
10001 else if (2 == insn_op1)
10002 {
10003 /* SMLAL<x><y>. */
10004 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10005 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10006 arm_insn_r->reg_rec_count = 2;
10007 }
10008 else if (3 == insn_op1)
10009 {
10010 /* SMUL<x><y>. */
10011 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10012 arm_insn_r->reg_rec_count = 1;
10013 }
10014 }
10015 }
10016 else
10017 {
10018 /* MSR : immediate form. */
10019 if (1 == insn_op1)
10020 {
10021 /* CSPR is going to be changed. */
10022 record_buf[0] = ARM_PS_REGNUM;
10023 arm_insn_r->reg_rec_count = 1;
10024 }
10025 else if (3 == insn_op1)
10026 {
10027 /* SPSR is going to be changed. */
10028 /* we need to get SPSR value, which is yet to be done */
10029 return -1;
10030 }
10031 }
10032 }
10033
10034 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10035 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10036 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10037
10038 /* Handle load/store insn extension space. */
10039
10040 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10041 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10042 && !INSN_RECORDED(arm_insn_r))
10043 {
10044 /* SWP/SWPB. */
10045 if (0 == insn_op1)
10046 {
10047 /* These insn, changes register and memory as well. */
10048 /* SWP or SWPB insn. */
10049 /* Get memory address given by Rn. */
10050 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10051 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10052 /* SWP insn ?, swaps word. */
10053 if (8 == arm_insn_r->opcode)
10054 {
10055 record_buf_mem[0] = 4;
10056 }
10057 else
10058 {
10059 /* SWPB insn, swaps only byte. */
10060 record_buf_mem[0] = 1;
10061 }
10062 record_buf_mem[1] = u_regval;
10063 arm_insn_r->mem_rec_count = 1;
10064 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10065 arm_insn_r->reg_rec_count = 1;
10066 }
10067 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10068 {
10069 /* STRH. */
10070 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10071 ARM_RECORD_STRH);
10072 }
10073 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10074 {
10075 /* LDRD. */
10076 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10077 record_buf[1] = record_buf[0] + 1;
10078 arm_insn_r->reg_rec_count = 2;
10079 }
10080 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10081 {
10082 /* STRD. */
10083 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10084 ARM_RECORD_STRD);
10085 }
10086 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10087 {
10088 /* LDRH, LDRSB, LDRSH. */
10089 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10090 arm_insn_r->reg_rec_count = 1;
10091 }
10092
10093 }
10094
10095 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10096 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10097 && !INSN_RECORDED(arm_insn_r))
10098 {
10099 ret = -1;
10100 /* Handle coprocessor insn extension space. */
10101 }
10102
10103 /* To be done for ARMv5 and later; as of now we return -1. */
10104 if (-1 == ret)
10105 return ret;
10106
10107 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10108 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10109
10110 return ret;
10111 }
10112
10113 /* Handling opcode 000 insns. */
10114
10115 static int
10116 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10117 {
10118 struct regcache *reg_cache = arm_insn_r->regcache;
10119 uint32_t record_buf[8], record_buf_mem[8];
10120 ULONGEST u_regval[2] = {0};
10121
10122 uint32_t reg_src1 = 0;
10123 uint32_t opcode1 = 0;
10124
10125 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10126 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10127 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10128
10129 if (!((opcode1 & 0x19) == 0x10))
10130 {
10131 /* Data-processing (register) and Data-processing (register-shifted
10132 register */
10133 /* Out of 11 shifter operands mode, all the insn modifies destination
10134 register, which is specified by 13-16 decode. */
10135 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10136 record_buf[1] = ARM_PS_REGNUM;
10137 arm_insn_r->reg_rec_count = 2;
10138 }
10139 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10140 {
10141 /* Miscellaneous instructions */
10142
10143 if (3 == arm_insn_r->decode && 0x12 == opcode1
10144 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10145 {
10146 /* Handle BLX, branch and link/exchange. */
10147 if (9 == arm_insn_r->opcode)
10148 {
10149 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10150 and R14 stores the return address. */
10151 record_buf[0] = ARM_PS_REGNUM;
10152 record_buf[1] = ARM_LR_REGNUM;
10153 arm_insn_r->reg_rec_count = 2;
10154 }
10155 }
10156 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10157 {
10158 /* Handle enhanced software breakpoint insn, BKPT. */
10159 /* CPSR is changed to be executed in ARM state, disabling normal
10160 interrupts, entering abort mode. */
10161 /* According to high vector configuration PC is set. */
10162 /* user hit breakpoint and type reverse, in
10163 that case, we need to go back with previous CPSR and
10164 Program Counter. */
10165 record_buf[0] = ARM_PS_REGNUM;
10166 record_buf[1] = ARM_LR_REGNUM;
10167 arm_insn_r->reg_rec_count = 2;
10168
10169 /* Save SPSR also; how? */
10170 return -1;
10171 }
10172 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10173 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10174 {
10175 /* Handle BX, branch and link/exchange. */
10176 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10177 record_buf[0] = ARM_PS_REGNUM;
10178 arm_insn_r->reg_rec_count = 1;
10179 }
10180 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10181 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10182 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10183 {
10184 /* Count leading zeros: CLZ. */
10185 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10186 arm_insn_r->reg_rec_count = 1;
10187 }
10188 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10189 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10190 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10191 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10192 {
10193 /* Handle MRS insn. */
10194 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10195 arm_insn_r->reg_rec_count = 1;
10196 }
10197 }
10198 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10199 {
10200 /* Multiply and multiply-accumulate */
10201
10202 /* Handle multiply instructions. */
10203 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10204 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10205 {
10206 /* Handle MLA and MUL. */
10207 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10208 record_buf[1] = ARM_PS_REGNUM;
10209 arm_insn_r->reg_rec_count = 2;
10210 }
10211 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10212 {
10213 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10214 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10215 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10216 record_buf[2] = ARM_PS_REGNUM;
10217 arm_insn_r->reg_rec_count = 3;
10218 }
10219 }
10220 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10221 {
10222 /* Synchronization primitives */
10223
10224 /* Handling SWP, SWPB. */
10225 /* These insn, changes register and memory as well. */
10226 /* SWP or SWPB insn. */
10227
10228 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10229 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10230 /* SWP insn ?, swaps word. */
10231 if (8 == arm_insn_r->opcode)
10232 {
10233 record_buf_mem[0] = 4;
10234 }
10235 else
10236 {
10237 /* SWPB insn, swaps only byte. */
10238 record_buf_mem[0] = 1;
10239 }
10240 record_buf_mem[1] = u_regval[0];
10241 arm_insn_r->mem_rec_count = 1;
10242 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10243 arm_insn_r->reg_rec_count = 1;
10244 }
10245 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10246 || 15 == arm_insn_r->decode)
10247 {
10248 if ((opcode1 & 0x12) == 2)
10249 {
10250 /* Extra load/store (unprivileged) */
10251 return -1;
10252 }
10253 else
10254 {
10255 /* Extra load/store */
10256 switch (bits (arm_insn_r->arm_insn, 5, 6))
10257 {
10258 case 1:
10259 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10260 {
10261 /* STRH (register), STRH (immediate) */
10262 arm_record_strx (arm_insn_r, &record_buf[0],
10263 &record_buf_mem[0], ARM_RECORD_STRH);
10264 }
10265 else if ((opcode1 & 0x05) == 0x1)
10266 {
10267 /* LDRH (register) */
10268 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10269 arm_insn_r->reg_rec_count = 1;
10270
10271 if (bit (arm_insn_r->arm_insn, 21))
10272 {
10273 /* Write back to Rn. */
10274 record_buf[arm_insn_r->reg_rec_count++]
10275 = bits (arm_insn_r->arm_insn, 16, 19);
10276 }
10277 }
10278 else if ((opcode1 & 0x05) == 0x5)
10279 {
10280 /* LDRH (immediate), LDRH (literal) */
10281 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10282
10283 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10284 arm_insn_r->reg_rec_count = 1;
10285
10286 if (rn != 15)
10287 {
10288 /*LDRH (immediate) */
10289 if (bit (arm_insn_r->arm_insn, 21))
10290 {
10291 /* Write back to Rn. */
10292 record_buf[arm_insn_r->reg_rec_count++] = rn;
10293 }
10294 }
10295 }
10296 else
10297 return -1;
10298 break;
10299 case 2:
10300 if ((opcode1 & 0x05) == 0x0)
10301 {
10302 /* LDRD (register) */
10303 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10304 record_buf[1] = record_buf[0] + 1;
10305 arm_insn_r->reg_rec_count = 2;
10306
10307 if (bit (arm_insn_r->arm_insn, 21))
10308 {
10309 /* Write back to Rn. */
10310 record_buf[arm_insn_r->reg_rec_count++]
10311 = bits (arm_insn_r->arm_insn, 16, 19);
10312 }
10313 }
10314 else if ((opcode1 & 0x05) == 0x1)
10315 {
10316 /* LDRSB (register) */
10317 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10318 arm_insn_r->reg_rec_count = 1;
10319
10320 if (bit (arm_insn_r->arm_insn, 21))
10321 {
10322 /* Write back to Rn. */
10323 record_buf[arm_insn_r->reg_rec_count++]
10324 = bits (arm_insn_r->arm_insn, 16, 19);
10325 }
10326 }
10327 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10328 {
10329 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10330 LDRSB (literal) */
10331 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10332
10333 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10334 arm_insn_r->reg_rec_count = 1;
10335
10336 if (rn != 15)
10337 {
10338 /*LDRD (immediate), LDRSB (immediate) */
10339 if (bit (arm_insn_r->arm_insn, 21))
10340 {
10341 /* Write back to Rn. */
10342 record_buf[arm_insn_r->reg_rec_count++] = rn;
10343 }
10344 }
10345 }
10346 else
10347 return -1;
10348 break;
10349 case 3:
10350 if ((opcode1 & 0x05) == 0x0)
10351 {
10352 /* STRD (register) */
10353 arm_record_strx (arm_insn_r, &record_buf[0],
10354 &record_buf_mem[0], ARM_RECORD_STRD);
10355 }
10356 else if ((opcode1 & 0x05) == 0x1)
10357 {
10358 /* LDRSH (register) */
10359 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10360 arm_insn_r->reg_rec_count = 1;
10361
10362 if (bit (arm_insn_r->arm_insn, 21))
10363 {
10364 /* Write back to Rn. */
10365 record_buf[arm_insn_r->reg_rec_count++]
10366 = bits (arm_insn_r->arm_insn, 16, 19);
10367 }
10368 }
10369 else if ((opcode1 & 0x05) == 0x4)
10370 {
10371 /* STRD (immediate) */
10372 arm_record_strx (arm_insn_r, &record_buf[0],
10373 &record_buf_mem[0], ARM_RECORD_STRD);
10374 }
10375 else if ((opcode1 & 0x05) == 0x5)
10376 {
10377 /* LDRSH (immediate), LDRSH (literal) */
10378 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10379 arm_insn_r->reg_rec_count = 1;
10380
10381 if (bit (arm_insn_r->arm_insn, 21))
10382 {
10383 /* Write back to Rn. */
10384 record_buf[arm_insn_r->reg_rec_count++]
10385 = bits (arm_insn_r->arm_insn, 16, 19);
10386 }
10387 }
10388 else
10389 return -1;
10390 break;
10391 default:
10392 return -1;
10393 }
10394 }
10395 }
10396 else
10397 {
10398 return -1;
10399 }
10400
10401 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10402 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10403 return 0;
10404 }
10405
10406 /* Handling opcode 001 insns. */
10407
10408 static int
10409 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10410 {
10411 uint32_t record_buf[8], record_buf_mem[8];
10412
10413 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10414 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10415
10416 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10417 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10418 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10419 )
10420 {
10421 /* Handle MSR insn. */
10422 if (9 == arm_insn_r->opcode)
10423 {
10424 /* CSPR is going to be changed. */
10425 record_buf[0] = ARM_PS_REGNUM;
10426 arm_insn_r->reg_rec_count = 1;
10427 }
10428 else
10429 {
10430 /* SPSR is going to be changed. */
10431 }
10432 }
10433 else if (arm_insn_r->opcode <= 15)
10434 {
10435 /* Normal data processing insns. */
10436 /* Out of 11 shifter operands mode, all the insn modifies destination
10437 register, which is specified by 13-16 decode. */
10438 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10439 record_buf[1] = ARM_PS_REGNUM;
10440 arm_insn_r->reg_rec_count = 2;
10441 }
10442 else
10443 {
10444 return -1;
10445 }
10446
10447 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10448 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10449 return 0;
10450 }
10451
10452 static int
10453 arm_record_media (insn_decode_record *arm_insn_r)
10454 {
10455 uint32_t record_buf[8];
10456
10457 switch (bits (arm_insn_r->arm_insn, 22, 24))
10458 {
10459 case 0:
10460 /* Parallel addition and subtraction, signed */
10461 case 1:
10462 /* Parallel addition and subtraction, unsigned */
10463 case 2:
10464 case 3:
10465 /* Packing, unpacking, saturation and reversal */
10466 {
10467 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10468
10469 record_buf[arm_insn_r->reg_rec_count++] = rd;
10470 }
10471 break;
10472
10473 case 4:
10474 case 5:
10475 /* Signed multiplies */
10476 {
10477 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10478 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10479
10480 record_buf[arm_insn_r->reg_rec_count++] = rd;
10481 if (op1 == 0x0)
10482 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10483 else if (op1 == 0x4)
10484 record_buf[arm_insn_r->reg_rec_count++]
10485 = bits (arm_insn_r->arm_insn, 12, 15);
10486 }
10487 break;
10488
10489 case 6:
10490 {
10491 if (bit (arm_insn_r->arm_insn, 21)
10492 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10493 {
10494 /* SBFX */
10495 record_buf[arm_insn_r->reg_rec_count++]
10496 = bits (arm_insn_r->arm_insn, 12, 15);
10497 }
10498 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10499 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10500 {
10501 /* USAD8 and USADA8 */
10502 record_buf[arm_insn_r->reg_rec_count++]
10503 = bits (arm_insn_r->arm_insn, 16, 19);
10504 }
10505 }
10506 break;
10507
10508 case 7:
10509 {
10510 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10511 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10512 {
10513 /* Permanently UNDEFINED */
10514 return -1;
10515 }
10516 else
10517 {
10518 /* BFC, BFI and UBFX */
10519 record_buf[arm_insn_r->reg_rec_count++]
10520 = bits (arm_insn_r->arm_insn, 12, 15);
10521 }
10522 }
10523 break;
10524
10525 default:
10526 return -1;
10527 }
10528
10529 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10530
10531 return 0;
10532 }
10533
10534 /* Handle ARM mode instructions with opcode 010. */
10535
10536 static int
10537 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10538 {
10539 struct regcache *reg_cache = arm_insn_r->regcache;
10540
10541 uint32_t reg_base , reg_dest;
10542 uint32_t offset_12, tgt_mem_addr;
10543 uint32_t record_buf[8], record_buf_mem[8];
10544 unsigned char wback;
10545 ULONGEST u_regval;
10546
10547 /* Calculate wback. */
10548 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10549 || (bit (arm_insn_r->arm_insn, 21) == 1);
10550
10551 arm_insn_r->reg_rec_count = 0;
10552 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10553
10554 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10555 {
10556 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10557 and LDRT. */
10558
10559 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10560 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10561
10562 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10563 preceeds a LDR instruction having R15 as reg_base, it
10564 emulates a branch and link instruction, and hence we need to save
10565 CPSR and PC as well. */
10566 if (ARM_PC_REGNUM == reg_dest)
10567 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10568
10569 /* If wback is true, also save the base register, which is going to be
10570 written to. */
10571 if (wback)
10572 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10573 }
10574 else
10575 {
10576 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10577
10578 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10579 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10580
10581 /* Handle bit U. */
10582 if (bit (arm_insn_r->arm_insn, 23))
10583 {
10584 /* U == 1: Add the offset. */
10585 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10586 }
10587 else
10588 {
10589 /* U == 0: subtract the offset. */
10590 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10591 }
10592
10593 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10594 bytes. */
10595 if (bit (arm_insn_r->arm_insn, 22))
10596 {
10597 /* STRB and STRBT: 1 byte. */
10598 record_buf_mem[0] = 1;
10599 }
10600 else
10601 {
10602 /* STR and STRT: 4 bytes. */
10603 record_buf_mem[0] = 4;
10604 }
10605
10606 /* Handle bit P. */
10607 if (bit (arm_insn_r->arm_insn, 24))
10608 record_buf_mem[1] = tgt_mem_addr;
10609 else
10610 record_buf_mem[1] = (uint32_t) u_regval;
10611
10612 arm_insn_r->mem_rec_count = 1;
10613
10614 /* If wback is true, also save the base register, which is going to be
10615 written to. */
10616 if (wback)
10617 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10618 }
10619
10620 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10621 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10622 return 0;
10623 }
10624
10625 /* Handling opcode 011 insns. */
10626
10627 static int
10628 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10629 {
10630 struct regcache *reg_cache = arm_insn_r->regcache;
10631
10632 uint32_t shift_imm = 0;
10633 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10634 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10635 uint32_t record_buf[8], record_buf_mem[8];
10636
10637 LONGEST s_word;
10638 ULONGEST u_regval[2];
10639
10640 if (bit (arm_insn_r->arm_insn, 4))
10641 return arm_record_media (arm_insn_r);
10642
10643 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10644 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10645
10646 /* Handle enhanced store insns and LDRD DSP insn,
10647 order begins according to addressing modes for store insns
10648 STRH insn. */
10649
10650 /* LDR or STR? */
10651 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10652 {
10653 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10654 /* LDR insn has a capability to do branching, if
10655 MOV LR, PC is precedded by LDR insn having Rn as R15
10656 in that case, it emulates branch and link insn, and hence we
10657 need to save CSPR and PC as well. */
10658 if (15 != reg_dest)
10659 {
10660 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10661 arm_insn_r->reg_rec_count = 1;
10662 }
10663 else
10664 {
10665 record_buf[0] = reg_dest;
10666 record_buf[1] = ARM_PS_REGNUM;
10667 arm_insn_r->reg_rec_count = 2;
10668 }
10669 }
10670 else
10671 {
10672 if (! bits (arm_insn_r->arm_insn, 4, 11))
10673 {
10674 /* Store insn, register offset and register pre-indexed,
10675 register post-indexed. */
10676 /* Get Rm. */
10677 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10678 /* Get Rn. */
10679 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10680 regcache_raw_read_unsigned (reg_cache, reg_src1
10681 , &u_regval[0]);
10682 regcache_raw_read_unsigned (reg_cache, reg_src2
10683 , &u_regval[1]);
10684 if (15 == reg_src2)
10685 {
10686 /* If R15 was used as Rn, hence current PC+8. */
10687 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10688 u_regval[0] = u_regval[0] + 8;
10689 }
10690 /* Calculate target store address, Rn +/- Rm, register offset. */
10691 /* U == 1. */
10692 if (bit (arm_insn_r->arm_insn, 23))
10693 {
10694 tgt_mem_addr = u_regval[0] + u_regval[1];
10695 }
10696 else
10697 {
10698 tgt_mem_addr = u_regval[1] - u_regval[0];
10699 }
10700
10701 switch (arm_insn_r->opcode)
10702 {
10703 /* STR. */
10704 case 8:
10705 case 12:
10706 /* STR. */
10707 case 9:
10708 case 13:
10709 /* STRT. */
10710 case 1:
10711 case 5:
10712 /* STR. */
10713 case 0:
10714 case 4:
10715 record_buf_mem[0] = 4;
10716 break;
10717
10718 /* STRB. */
10719 case 10:
10720 case 14:
10721 /* STRB. */
10722 case 11:
10723 case 15:
10724 /* STRBT. */
10725 case 3:
10726 case 7:
10727 /* STRB. */
10728 case 2:
10729 case 6:
10730 record_buf_mem[0] = 1;
10731 break;
10732
10733 default:
10734 gdb_assert_not_reached ("no decoding pattern found");
10735 break;
10736 }
10737 record_buf_mem[1] = tgt_mem_addr;
10738 arm_insn_r->mem_rec_count = 1;
10739
10740 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10741 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10742 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10743 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10744 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10745 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10746 )
10747 {
10748 /* Rn is going to be changed in pre-indexed mode and
10749 post-indexed mode as well. */
10750 record_buf[0] = reg_src2;
10751 arm_insn_r->reg_rec_count = 1;
10752 }
10753 }
10754 else
10755 {
10756 /* Store insn, scaled register offset; scaled pre-indexed. */
10757 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10758 /* Get Rm. */
10759 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10760 /* Get Rn. */
10761 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10762 /* Get shift_imm. */
10763 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10764 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10765 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10766 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10767 /* Offset_12 used as shift. */
10768 switch (offset_12)
10769 {
10770 case 0:
10771 /* Offset_12 used as index. */
10772 offset_12 = u_regval[0] << shift_imm;
10773 break;
10774
10775 case 1:
10776 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10777 break;
10778
10779 case 2:
10780 if (!shift_imm)
10781 {
10782 if (bit (u_regval[0], 31))
10783 {
10784 offset_12 = 0xFFFFFFFF;
10785 }
10786 else
10787 {
10788 offset_12 = 0;
10789 }
10790 }
10791 else
10792 {
10793 /* This is arithmetic shift. */
10794 offset_12 = s_word >> shift_imm;
10795 }
10796 break;
10797
10798 case 3:
10799 if (!shift_imm)
10800 {
10801 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10802 &u_regval[1]);
10803 /* Get C flag value and shift it by 31. */
10804 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10805 | (u_regval[0]) >> 1);
10806 }
10807 else
10808 {
10809 offset_12 = (u_regval[0] >> shift_imm) \
10810 | (u_regval[0] <<
10811 (sizeof(uint32_t) - shift_imm));
10812 }
10813 break;
10814
10815 default:
10816 gdb_assert_not_reached ("no decoding pattern found");
10817 break;
10818 }
10819
10820 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10821 /* bit U set. */
10822 if (bit (arm_insn_r->arm_insn, 23))
10823 {
10824 tgt_mem_addr = u_regval[1] + offset_12;
10825 }
10826 else
10827 {
10828 tgt_mem_addr = u_regval[1] - offset_12;
10829 }
10830
10831 switch (arm_insn_r->opcode)
10832 {
10833 /* STR. */
10834 case 8:
10835 case 12:
10836 /* STR. */
10837 case 9:
10838 case 13:
10839 /* STRT. */
10840 case 1:
10841 case 5:
10842 /* STR. */
10843 case 0:
10844 case 4:
10845 record_buf_mem[0] = 4;
10846 break;
10847
10848 /* STRB. */
10849 case 10:
10850 case 14:
10851 /* STRB. */
10852 case 11:
10853 case 15:
10854 /* STRBT. */
10855 case 3:
10856 case 7:
10857 /* STRB. */
10858 case 2:
10859 case 6:
10860 record_buf_mem[0] = 1;
10861 break;
10862
10863 default:
10864 gdb_assert_not_reached ("no decoding pattern found");
10865 break;
10866 }
10867 record_buf_mem[1] = tgt_mem_addr;
10868 arm_insn_r->mem_rec_count = 1;
10869
10870 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10871 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10872 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10873 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10874 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10875 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10876 )
10877 {
10878 /* Rn is going to be changed in register scaled pre-indexed
10879 mode,and scaled post indexed mode. */
10880 record_buf[0] = reg_src2;
10881 arm_insn_r->reg_rec_count = 1;
10882 }
10883 }
10884 }
10885
10886 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10887 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10888 return 0;
10889 }
10890
10891 /* Handle ARM mode instructions with opcode 100. */
10892
10893 static int
10894 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10895 {
10896 struct regcache *reg_cache = arm_insn_r->regcache;
10897 uint32_t register_count = 0, register_bits;
10898 uint32_t reg_base, addr_mode;
10899 uint32_t record_buf[24], record_buf_mem[48];
10900 uint32_t wback;
10901 ULONGEST u_regval;
10902
10903 /* Fetch the list of registers. */
10904 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10905 arm_insn_r->reg_rec_count = 0;
10906
10907 /* Fetch the base register that contains the address we are loading data
10908 to. */
10909 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10910
10911 /* Calculate wback. */
10912 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10913
10914 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10915 {
10916 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10917
10918 /* Find out which registers are going to be loaded from memory. */
10919 while (register_bits)
10920 {
10921 if (register_bits & 0x00000001)
10922 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10923 register_bits = register_bits >> 1;
10924 register_count++;
10925 }
10926
10927
10928 /* If wback is true, also save the base register, which is going to be
10929 written to. */
10930 if (wback)
10931 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10932
10933 /* Save the CPSR register. */
10934 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10935 }
10936 else
10937 {
10938 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10939
10940 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10941
10942 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10943
10944 /* Find out how many registers are going to be stored to memory. */
10945 while (register_bits)
10946 {
10947 if (register_bits & 0x00000001)
10948 register_count++;
10949 register_bits = register_bits >> 1;
10950 }
10951
10952 switch (addr_mode)
10953 {
10954 /* STMDA (STMED): Decrement after. */
10955 case 0:
10956 record_buf_mem[1] = (uint32_t) u_regval
10957 - register_count * ARM_INT_REGISTER_SIZE + 4;
10958 break;
10959 /* STM (STMIA, STMEA): Increment after. */
10960 case 1:
10961 record_buf_mem[1] = (uint32_t) u_regval;
10962 break;
10963 /* STMDB (STMFD): Decrement before. */
10964 case 2:
10965 record_buf_mem[1] = (uint32_t) u_regval
10966 - register_count * ARM_INT_REGISTER_SIZE;
10967 break;
10968 /* STMIB (STMFA): Increment before. */
10969 case 3:
10970 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
10971 break;
10972 default:
10973 gdb_assert_not_reached ("no decoding pattern found");
10974 break;
10975 }
10976
10977 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
10978 arm_insn_r->mem_rec_count = 1;
10979
10980 /* If wback is true, also save the base register, which is going to be
10981 written to. */
10982 if (wback)
10983 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10984 }
10985
10986 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10987 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10988 return 0;
10989 }
10990
10991 /* Handling opcode 101 insns. */
10992
10993 static int
10994 arm_record_b_bl (insn_decode_record *arm_insn_r)
10995 {
10996 uint32_t record_buf[8];
10997
10998 /* Handle B, BL, BLX(1) insns. */
10999 /* B simply branches so we do nothing here. */
11000 /* Note: BLX(1) doesnt fall here but instead it falls into
11001 extension space. */
11002 if (bit (arm_insn_r->arm_insn, 24))
11003 {
11004 record_buf[0] = ARM_LR_REGNUM;
11005 arm_insn_r->reg_rec_count = 1;
11006 }
11007
11008 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11009
11010 return 0;
11011 }
11012
11013 static int
11014 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11015 {
11016 printf_unfiltered (_("Process record does not support instruction "
11017 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11018 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11019
11020 return -1;
11021 }
11022
11023 /* Record handler for vector data transfer instructions. */
11024
11025 static int
11026 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11027 {
11028 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11029 uint32_t record_buf[4];
11030
11031 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11032 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11033 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11034 bit_l = bit (arm_insn_r->arm_insn, 20);
11035 bit_c = bit (arm_insn_r->arm_insn, 8);
11036
11037 /* Handle VMOV instruction. */
11038 if (bit_l && bit_c)
11039 {
11040 record_buf[0] = reg_t;
11041 arm_insn_r->reg_rec_count = 1;
11042 }
11043 else if (bit_l && !bit_c)
11044 {
11045 /* Handle VMOV instruction. */
11046 if (bits_a == 0x00)
11047 {
11048 record_buf[0] = reg_t;
11049 arm_insn_r->reg_rec_count = 1;
11050 }
11051 /* Handle VMRS instruction. */
11052 else if (bits_a == 0x07)
11053 {
11054 if (reg_t == 15)
11055 reg_t = ARM_PS_REGNUM;
11056
11057 record_buf[0] = reg_t;
11058 arm_insn_r->reg_rec_count = 1;
11059 }
11060 }
11061 else if (!bit_l && !bit_c)
11062 {
11063 /* Handle VMOV instruction. */
11064 if (bits_a == 0x00)
11065 {
11066 record_buf[0] = ARM_D0_REGNUM + reg_v;
11067
11068 arm_insn_r->reg_rec_count = 1;
11069 }
11070 /* Handle VMSR instruction. */
11071 else if (bits_a == 0x07)
11072 {
11073 record_buf[0] = ARM_FPSCR_REGNUM;
11074 arm_insn_r->reg_rec_count = 1;
11075 }
11076 }
11077 else if (!bit_l && bit_c)
11078 {
11079 /* Handle VMOV instruction. */
11080 if (!(bits_a & 0x04))
11081 {
11082 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11083 + ARM_D0_REGNUM;
11084 arm_insn_r->reg_rec_count = 1;
11085 }
11086 /* Handle VDUP instruction. */
11087 else
11088 {
11089 if (bit (arm_insn_r->arm_insn, 21))
11090 {
11091 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11092 record_buf[0] = reg_v + ARM_D0_REGNUM;
11093 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11094 arm_insn_r->reg_rec_count = 2;
11095 }
11096 else
11097 {
11098 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11099 record_buf[0] = reg_v + ARM_D0_REGNUM;
11100 arm_insn_r->reg_rec_count = 1;
11101 }
11102 }
11103 }
11104
11105 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11106 return 0;
11107 }
11108
11109 /* Record handler for extension register load/store instructions. */
11110
11111 static int
11112 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11113 {
11114 uint32_t opcode, single_reg;
11115 uint8_t op_vldm_vstm;
11116 uint32_t record_buf[8], record_buf_mem[128];
11117 ULONGEST u_regval = 0;
11118
11119 struct regcache *reg_cache = arm_insn_r->regcache;
11120
11121 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11122 single_reg = !bit (arm_insn_r->arm_insn, 8);
11123 op_vldm_vstm = opcode & 0x1b;
11124
11125 /* Handle VMOV instructions. */
11126 if ((opcode & 0x1e) == 0x04)
11127 {
11128 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11129 {
11130 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11131 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11132 arm_insn_r->reg_rec_count = 2;
11133 }
11134 else
11135 {
11136 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11137 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11138
11139 if (single_reg)
11140 {
11141 /* The first S register number m is REG_M:M (M is bit 5),
11142 the corresponding D register number is REG_M:M / 2, which
11143 is REG_M. */
11144 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11145 /* The second S register number is REG_M:M + 1, the
11146 corresponding D register number is (REG_M:M + 1) / 2.
11147 IOW, if bit M is 1, the first and second S registers
11148 are mapped to different D registers, otherwise, they are
11149 in the same D register. */
11150 if (bit_m)
11151 {
11152 record_buf[arm_insn_r->reg_rec_count++]
11153 = ARM_D0_REGNUM + reg_m + 1;
11154 }
11155 }
11156 else
11157 {
11158 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11159 arm_insn_r->reg_rec_count = 1;
11160 }
11161 }
11162 }
11163 /* Handle VSTM and VPUSH instructions. */
11164 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11165 || op_vldm_vstm == 0x12)
11166 {
11167 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11168 uint32_t memory_index = 0;
11169
11170 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11171 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11172 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11173 imm_off32 = imm_off8 << 2;
11174 memory_count = imm_off8;
11175
11176 if (bit (arm_insn_r->arm_insn, 23))
11177 start_address = u_regval;
11178 else
11179 start_address = u_regval - imm_off32;
11180
11181 if (bit (arm_insn_r->arm_insn, 21))
11182 {
11183 record_buf[0] = reg_rn;
11184 arm_insn_r->reg_rec_count = 1;
11185 }
11186
11187 while (memory_count > 0)
11188 {
11189 if (single_reg)
11190 {
11191 record_buf_mem[memory_index] = 4;
11192 record_buf_mem[memory_index + 1] = start_address;
11193 start_address = start_address + 4;
11194 memory_index = memory_index + 2;
11195 }
11196 else
11197 {
11198 record_buf_mem[memory_index] = 4;
11199 record_buf_mem[memory_index + 1] = start_address;
11200 record_buf_mem[memory_index + 2] = 4;
11201 record_buf_mem[memory_index + 3] = start_address + 4;
11202 start_address = start_address + 8;
11203 memory_index = memory_index + 4;
11204 }
11205 memory_count--;
11206 }
11207 arm_insn_r->mem_rec_count = (memory_index >> 1);
11208 }
11209 /* Handle VLDM instructions. */
11210 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11211 || op_vldm_vstm == 0x13)
11212 {
11213 uint32_t reg_count, reg_vd;
11214 uint32_t reg_index = 0;
11215 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11216
11217 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11218 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11219
11220 /* REG_VD is the first D register number. If the instruction
11221 loads memory to S registers (SINGLE_REG is TRUE), the register
11222 number is (REG_VD << 1 | bit D), so the corresponding D
11223 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11224 if (!single_reg)
11225 reg_vd = reg_vd | (bit_d << 4);
11226
11227 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11228 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11229
11230 /* If the instruction loads memory to D register, REG_COUNT should
11231 be divided by 2, according to the ARM Architecture Reference
11232 Manual. If the instruction loads memory to S register, divide by
11233 2 as well because two S registers are mapped to D register. */
11234 reg_count = reg_count / 2;
11235 if (single_reg && bit_d)
11236 {
11237 /* Increase the register count if S register list starts from
11238 an odd number (bit d is one). */
11239 reg_count++;
11240 }
11241
11242 while (reg_count > 0)
11243 {
11244 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11245 reg_count--;
11246 }
11247 arm_insn_r->reg_rec_count = reg_index;
11248 }
11249 /* VSTR Vector store register. */
11250 else if ((opcode & 0x13) == 0x10)
11251 {
11252 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11253 uint32_t memory_index = 0;
11254
11255 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11256 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11257 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11258 imm_off32 = imm_off8 << 2;
11259
11260 if (bit (arm_insn_r->arm_insn, 23))
11261 start_address = u_regval + imm_off32;
11262 else
11263 start_address = u_regval - imm_off32;
11264
11265 if (single_reg)
11266 {
11267 record_buf_mem[memory_index] = 4;
11268 record_buf_mem[memory_index + 1] = start_address;
11269 arm_insn_r->mem_rec_count = 1;
11270 }
11271 else
11272 {
11273 record_buf_mem[memory_index] = 4;
11274 record_buf_mem[memory_index + 1] = start_address;
11275 record_buf_mem[memory_index + 2] = 4;
11276 record_buf_mem[memory_index + 3] = start_address + 4;
11277 arm_insn_r->mem_rec_count = 2;
11278 }
11279 }
11280 /* VLDR Vector load register. */
11281 else if ((opcode & 0x13) == 0x11)
11282 {
11283 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11284
11285 if (!single_reg)
11286 {
11287 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11288 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11289 }
11290 else
11291 {
11292 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11293 /* Record register D rather than pseudo register S. */
11294 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11295 }
11296 arm_insn_r->reg_rec_count = 1;
11297 }
11298
11299 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11300 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11301 return 0;
11302 }
11303
11304 /* Record handler for arm/thumb mode VFP data processing instructions. */
11305
11306 static int
11307 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11308 {
11309 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11310 uint32_t record_buf[4];
11311 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11312 enum insn_types curr_insn_type = INSN_INV;
11313
11314 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11315 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11316 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11317 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11318 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11319 bit_d = bit (arm_insn_r->arm_insn, 22);
11320 /* Mask off the "D" bit. */
11321 opc1 = opc1 & ~0x04;
11322
11323 /* Handle VMLA, VMLS. */
11324 if (opc1 == 0x00)
11325 {
11326 if (bit (arm_insn_r->arm_insn, 10))
11327 {
11328 if (bit (arm_insn_r->arm_insn, 6))
11329 curr_insn_type = INSN_T0;
11330 else
11331 curr_insn_type = INSN_T1;
11332 }
11333 else
11334 {
11335 if (dp_op_sz)
11336 curr_insn_type = INSN_T1;
11337 else
11338 curr_insn_type = INSN_T2;
11339 }
11340 }
11341 /* Handle VNMLA, VNMLS, VNMUL. */
11342 else if (opc1 == 0x01)
11343 {
11344 if (dp_op_sz)
11345 curr_insn_type = INSN_T1;
11346 else
11347 curr_insn_type = INSN_T2;
11348 }
11349 /* Handle VMUL. */
11350 else if (opc1 == 0x02 && !(opc3 & 0x01))
11351 {
11352 if (bit (arm_insn_r->arm_insn, 10))
11353 {
11354 if (bit (arm_insn_r->arm_insn, 6))
11355 curr_insn_type = INSN_T0;
11356 else
11357 curr_insn_type = INSN_T1;
11358 }
11359 else
11360 {
11361 if (dp_op_sz)
11362 curr_insn_type = INSN_T1;
11363 else
11364 curr_insn_type = INSN_T2;
11365 }
11366 }
11367 /* Handle VADD, VSUB. */
11368 else if (opc1 == 0x03)
11369 {
11370 if (!bit (arm_insn_r->arm_insn, 9))
11371 {
11372 if (bit (arm_insn_r->arm_insn, 6))
11373 curr_insn_type = INSN_T0;
11374 else
11375 curr_insn_type = INSN_T1;
11376 }
11377 else
11378 {
11379 if (dp_op_sz)
11380 curr_insn_type = INSN_T1;
11381 else
11382 curr_insn_type = INSN_T2;
11383 }
11384 }
11385 /* Handle VDIV. */
11386 else if (opc1 == 0x08)
11387 {
11388 if (dp_op_sz)
11389 curr_insn_type = INSN_T1;
11390 else
11391 curr_insn_type = INSN_T2;
11392 }
11393 /* Handle all other vfp data processing instructions. */
11394 else if (opc1 == 0x0b)
11395 {
11396 /* Handle VMOV. */
11397 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11398 {
11399 if (bit (arm_insn_r->arm_insn, 4))
11400 {
11401 if (bit (arm_insn_r->arm_insn, 6))
11402 curr_insn_type = INSN_T0;
11403 else
11404 curr_insn_type = INSN_T1;
11405 }
11406 else
11407 {
11408 if (dp_op_sz)
11409 curr_insn_type = INSN_T1;
11410 else
11411 curr_insn_type = INSN_T2;
11412 }
11413 }
11414 /* Handle VNEG and VABS. */
11415 else if ((opc2 == 0x01 && opc3 == 0x01)
11416 || (opc2 == 0x00 && opc3 == 0x03))
11417 {
11418 if (!bit (arm_insn_r->arm_insn, 11))
11419 {
11420 if (bit (arm_insn_r->arm_insn, 6))
11421 curr_insn_type = INSN_T0;
11422 else
11423 curr_insn_type = INSN_T1;
11424 }
11425 else
11426 {
11427 if (dp_op_sz)
11428 curr_insn_type = INSN_T1;
11429 else
11430 curr_insn_type = INSN_T2;
11431 }
11432 }
11433 /* Handle VSQRT. */
11434 else if (opc2 == 0x01 && opc3 == 0x03)
11435 {
11436 if (dp_op_sz)
11437 curr_insn_type = INSN_T1;
11438 else
11439 curr_insn_type = INSN_T2;
11440 }
11441 /* Handle VCVT. */
11442 else if (opc2 == 0x07 && opc3 == 0x03)
11443 {
11444 if (!dp_op_sz)
11445 curr_insn_type = INSN_T1;
11446 else
11447 curr_insn_type = INSN_T2;
11448 }
11449 else if (opc3 & 0x01)
11450 {
11451 /* Handle VCVT. */
11452 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11453 {
11454 if (!bit (arm_insn_r->arm_insn, 18))
11455 curr_insn_type = INSN_T2;
11456 else
11457 {
11458 if (dp_op_sz)
11459 curr_insn_type = INSN_T1;
11460 else
11461 curr_insn_type = INSN_T2;
11462 }
11463 }
11464 /* Handle VCVT. */
11465 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11466 {
11467 if (dp_op_sz)
11468 curr_insn_type = INSN_T1;
11469 else
11470 curr_insn_type = INSN_T2;
11471 }
11472 /* Handle VCVTB, VCVTT. */
11473 else if ((opc2 & 0x0e) == 0x02)
11474 curr_insn_type = INSN_T2;
11475 /* Handle VCMP, VCMPE. */
11476 else if ((opc2 & 0x0e) == 0x04)
11477 curr_insn_type = INSN_T3;
11478 }
11479 }
11480
11481 switch (curr_insn_type)
11482 {
11483 case INSN_T0:
11484 reg_vd = reg_vd | (bit_d << 4);
11485 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11486 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11487 arm_insn_r->reg_rec_count = 2;
11488 break;
11489
11490 case INSN_T1:
11491 reg_vd = reg_vd | (bit_d << 4);
11492 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11493 arm_insn_r->reg_rec_count = 1;
11494 break;
11495
11496 case INSN_T2:
11497 reg_vd = (reg_vd << 1) | bit_d;
11498 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11499 arm_insn_r->reg_rec_count = 1;
11500 break;
11501
11502 case INSN_T3:
11503 record_buf[0] = ARM_FPSCR_REGNUM;
11504 arm_insn_r->reg_rec_count = 1;
11505 break;
11506
11507 default:
11508 gdb_assert_not_reached ("no decoding pattern found");
11509 break;
11510 }
11511
11512 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11513 return 0;
11514 }
11515
11516 /* Handling opcode 110 insns. */
11517
11518 static int
11519 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11520 {
11521 uint32_t op1, op1_ebit, coproc;
11522
11523 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11524 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11525 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11526
11527 if ((coproc & 0x0e) == 0x0a)
11528 {
11529 /* Handle extension register ld/st instructions. */
11530 if (!(op1 & 0x20))
11531 return arm_record_exreg_ld_st_insn (arm_insn_r);
11532
11533 /* 64-bit transfers between arm core and extension registers. */
11534 if ((op1 & 0x3e) == 0x04)
11535 return arm_record_exreg_ld_st_insn (arm_insn_r);
11536 }
11537 else
11538 {
11539 /* Handle coprocessor ld/st instructions. */
11540 if (!(op1 & 0x3a))
11541 {
11542 /* Store. */
11543 if (!op1_ebit)
11544 return arm_record_unsupported_insn (arm_insn_r);
11545 else
11546 /* Load. */
11547 return arm_record_unsupported_insn (arm_insn_r);
11548 }
11549
11550 /* Move to coprocessor from two arm core registers. */
11551 if (op1 == 0x4)
11552 return arm_record_unsupported_insn (arm_insn_r);
11553
11554 /* Move to two arm core registers from coprocessor. */
11555 if (op1 == 0x5)
11556 {
11557 uint32_t reg_t[2];
11558
11559 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11560 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11561 arm_insn_r->reg_rec_count = 2;
11562
11563 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11564 return 0;
11565 }
11566 }
11567 return arm_record_unsupported_insn (arm_insn_r);
11568 }
11569
11570 /* Handling opcode 111 insns. */
11571
11572 static int
11573 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11574 {
11575 uint32_t op, op1_ebit, coproc, bits_24_25;
11576 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11577 struct regcache *reg_cache = arm_insn_r->regcache;
11578
11579 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11580 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11581 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11582 op = bit (arm_insn_r->arm_insn, 4);
11583 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11584
11585 /* Handle arm SWI/SVC system call instructions. */
11586 if (bits_24_25 == 0x3)
11587 {
11588 if (tdep->arm_syscall_record != NULL)
11589 {
11590 ULONGEST svc_operand, svc_number;
11591
11592 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11593
11594 if (svc_operand) /* OABI. */
11595 svc_number = svc_operand - 0x900000;
11596 else /* EABI. */
11597 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11598
11599 return tdep->arm_syscall_record (reg_cache, svc_number);
11600 }
11601 else
11602 {
11603 printf_unfiltered (_("no syscall record support\n"));
11604 return -1;
11605 }
11606 }
11607 else if (bits_24_25 == 0x02)
11608 {
11609 if (op)
11610 {
11611 if ((coproc & 0x0e) == 0x0a)
11612 {
11613 /* 8, 16, and 32-bit transfer */
11614 return arm_record_vdata_transfer_insn (arm_insn_r);
11615 }
11616 else
11617 {
11618 if (op1_ebit)
11619 {
11620 /* MRC, MRC2 */
11621 uint32_t record_buf[1];
11622
11623 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11624 if (record_buf[0] == 15)
11625 record_buf[0] = ARM_PS_REGNUM;
11626
11627 arm_insn_r->reg_rec_count = 1;
11628 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11629 record_buf);
11630 return 0;
11631 }
11632 else
11633 {
11634 /* MCR, MCR2 */
11635 return -1;
11636 }
11637 }
11638 }
11639 else
11640 {
11641 if ((coproc & 0x0e) == 0x0a)
11642 {
11643 /* VFP data-processing instructions. */
11644 return arm_record_vfp_data_proc_insn (arm_insn_r);
11645 }
11646 else
11647 {
11648 /* CDP, CDP2 */
11649 return -1;
11650 }
11651 }
11652 }
11653 else
11654 {
11655 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11656
11657 if (op1 == 5)
11658 {
11659 if ((coproc & 0x0e) != 0x0a)
11660 {
11661 /* MRRC, MRRC2 */
11662 return -1;
11663 }
11664 }
11665 else if (op1 == 4 || op1 == 5)
11666 {
11667 if ((coproc & 0x0e) == 0x0a)
11668 {
11669 /* 64-bit transfers between ARM core and extension */
11670 return -1;
11671 }
11672 else if (op1 == 4)
11673 {
11674 /* MCRR, MCRR2 */
11675 return -1;
11676 }
11677 }
11678 else if (op1 == 0 || op1 == 1)
11679 {
11680 /* UNDEFINED */
11681 return -1;
11682 }
11683 else
11684 {
11685 if ((coproc & 0x0e) == 0x0a)
11686 {
11687 /* Extension register load/store */
11688 }
11689 else
11690 {
11691 /* STC, STC2, LDC, LDC2 */
11692 }
11693 return -1;
11694 }
11695 }
11696
11697 return -1;
11698 }
11699
11700 /* Handling opcode 000 insns. */
11701
11702 static int
11703 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11704 {
11705 uint32_t record_buf[8];
11706 uint32_t reg_src1 = 0;
11707
11708 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11709
11710 record_buf[0] = ARM_PS_REGNUM;
11711 record_buf[1] = reg_src1;
11712 thumb_insn_r->reg_rec_count = 2;
11713
11714 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11715
11716 return 0;
11717 }
11718
11719
11720 /* Handling opcode 001 insns. */
11721
11722 static int
11723 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11724 {
11725 uint32_t record_buf[8];
11726 uint32_t reg_src1 = 0;
11727
11728 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11729
11730 record_buf[0] = ARM_PS_REGNUM;
11731 record_buf[1] = reg_src1;
11732 thumb_insn_r->reg_rec_count = 2;
11733
11734 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11735
11736 return 0;
11737 }
11738
11739 /* Handling opcode 010 insns. */
11740
11741 static int
11742 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11743 {
11744 struct regcache *reg_cache = thumb_insn_r->regcache;
11745 uint32_t record_buf[8], record_buf_mem[8];
11746
11747 uint32_t reg_src1 = 0, reg_src2 = 0;
11748 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11749
11750 ULONGEST u_regval[2] = {0};
11751
11752 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11753
11754 if (bit (thumb_insn_r->arm_insn, 12))
11755 {
11756 /* Handle load/store register offset. */
11757 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11758
11759 if (in_inclusive_range (opB, 4U, 7U))
11760 {
11761 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11762 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11763 record_buf[0] = reg_src1;
11764 thumb_insn_r->reg_rec_count = 1;
11765 }
11766 else if (in_inclusive_range (opB, 0U, 2U))
11767 {
11768 /* STR(2), STRB(2), STRH(2) . */
11769 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11770 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11771 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11772 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11773 if (0 == opB)
11774 record_buf_mem[0] = 4; /* STR (2). */
11775 else if (2 == opB)
11776 record_buf_mem[0] = 1; /* STRB (2). */
11777 else if (1 == opB)
11778 record_buf_mem[0] = 2; /* STRH (2). */
11779 record_buf_mem[1] = u_regval[0] + u_regval[1];
11780 thumb_insn_r->mem_rec_count = 1;
11781 }
11782 }
11783 else if (bit (thumb_insn_r->arm_insn, 11))
11784 {
11785 /* Handle load from literal pool. */
11786 /* LDR(3). */
11787 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11788 record_buf[0] = reg_src1;
11789 thumb_insn_r->reg_rec_count = 1;
11790 }
11791 else if (opcode1)
11792 {
11793 /* Special data instructions and branch and exchange */
11794 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11795 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11796 if ((3 == opcode2) && (!opcode3))
11797 {
11798 /* Branch with exchange. */
11799 record_buf[0] = ARM_PS_REGNUM;
11800 thumb_insn_r->reg_rec_count = 1;
11801 }
11802 else
11803 {
11804 /* Format 8; special data processing insns. */
11805 record_buf[0] = ARM_PS_REGNUM;
11806 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11807 | bits (thumb_insn_r->arm_insn, 0, 2));
11808 thumb_insn_r->reg_rec_count = 2;
11809 }
11810 }
11811 else
11812 {
11813 /* Format 5; data processing insns. */
11814 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11815 if (bit (thumb_insn_r->arm_insn, 7))
11816 {
11817 reg_src1 = reg_src1 + 8;
11818 }
11819 record_buf[0] = ARM_PS_REGNUM;
11820 record_buf[1] = reg_src1;
11821 thumb_insn_r->reg_rec_count = 2;
11822 }
11823
11824 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11825 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11826 record_buf_mem);
11827
11828 return 0;
11829 }
11830
11831 /* Handling opcode 001 insns. */
11832
11833 static int
11834 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11835 {
11836 struct regcache *reg_cache = thumb_insn_r->regcache;
11837 uint32_t record_buf[8], record_buf_mem[8];
11838
11839 uint32_t reg_src1 = 0;
11840 uint32_t opcode = 0, immed_5 = 0;
11841
11842 ULONGEST u_regval = 0;
11843
11844 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11845
11846 if (opcode)
11847 {
11848 /* LDR(1). */
11849 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11850 record_buf[0] = reg_src1;
11851 thumb_insn_r->reg_rec_count = 1;
11852 }
11853 else
11854 {
11855 /* STR(1). */
11856 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11857 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11858 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11859 record_buf_mem[0] = 4;
11860 record_buf_mem[1] = u_regval + (immed_5 * 4);
11861 thumb_insn_r->mem_rec_count = 1;
11862 }
11863
11864 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11865 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11866 record_buf_mem);
11867
11868 return 0;
11869 }
11870
11871 /* Handling opcode 100 insns. */
11872
11873 static int
11874 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11875 {
11876 struct regcache *reg_cache = thumb_insn_r->regcache;
11877 uint32_t record_buf[8], record_buf_mem[8];
11878
11879 uint32_t reg_src1 = 0;
11880 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11881
11882 ULONGEST u_regval = 0;
11883
11884 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11885
11886 if (3 == opcode)
11887 {
11888 /* LDR(4). */
11889 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11890 record_buf[0] = reg_src1;
11891 thumb_insn_r->reg_rec_count = 1;
11892 }
11893 else if (1 == opcode)
11894 {
11895 /* LDRH(1). */
11896 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11897 record_buf[0] = reg_src1;
11898 thumb_insn_r->reg_rec_count = 1;
11899 }
11900 else if (2 == opcode)
11901 {
11902 /* STR(3). */
11903 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11904 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11905 record_buf_mem[0] = 4;
11906 record_buf_mem[1] = u_regval + (immed_8 * 4);
11907 thumb_insn_r->mem_rec_count = 1;
11908 }
11909 else if (0 == opcode)
11910 {
11911 /* STRH(1). */
11912 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11913 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11914 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11915 record_buf_mem[0] = 2;
11916 record_buf_mem[1] = u_regval + (immed_5 * 2);
11917 thumb_insn_r->mem_rec_count = 1;
11918 }
11919
11920 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11921 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11922 record_buf_mem);
11923
11924 return 0;
11925 }
11926
11927 /* Handling opcode 101 insns. */
11928
11929 static int
11930 thumb_record_misc (insn_decode_record *thumb_insn_r)
11931 {
11932 struct regcache *reg_cache = thumb_insn_r->regcache;
11933
11934 uint32_t opcode = 0;
11935 uint32_t register_bits = 0, register_count = 0;
11936 uint32_t index = 0, start_address = 0;
11937 uint32_t record_buf[24], record_buf_mem[48];
11938 uint32_t reg_src1;
11939
11940 ULONGEST u_regval = 0;
11941
11942 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11943
11944 if (opcode == 0 || opcode == 1)
11945 {
11946 /* ADR and ADD (SP plus immediate) */
11947
11948 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11949 record_buf[0] = reg_src1;
11950 thumb_insn_r->reg_rec_count = 1;
11951 }
11952 else
11953 {
11954 /* Miscellaneous 16-bit instructions */
11955 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11956
11957 switch (opcode2)
11958 {
11959 case 6:
11960 /* SETEND and CPS */
11961 break;
11962 case 0:
11963 /* ADD/SUB (SP plus immediate) */
11964 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11965 record_buf[0] = ARM_SP_REGNUM;
11966 thumb_insn_r->reg_rec_count = 1;
11967 break;
11968 case 1: /* fall through */
11969 case 3: /* fall through */
11970 case 9: /* fall through */
11971 case 11:
11972 /* CBNZ, CBZ */
11973 break;
11974 case 2:
11975 /* SXTH, SXTB, UXTH, UXTB */
11976 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11977 thumb_insn_r->reg_rec_count = 1;
11978 break;
11979 case 4: /* fall through */
11980 case 5:
11981 /* PUSH. */
11982 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11983 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11984 while (register_bits)
11985 {
11986 if (register_bits & 0x00000001)
11987 register_count++;
11988 register_bits = register_bits >> 1;
11989 }
11990 start_address = u_regval - \
11991 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11992 thumb_insn_r->mem_rec_count = register_count;
11993 while (register_count)
11994 {
11995 record_buf_mem[(register_count * 2) - 1] = start_address;
11996 record_buf_mem[(register_count * 2) - 2] = 4;
11997 start_address = start_address + 4;
11998 register_count--;
11999 }
12000 record_buf[0] = ARM_SP_REGNUM;
12001 thumb_insn_r->reg_rec_count = 1;
12002 break;
12003 case 10:
12004 /* REV, REV16, REVSH */
12005 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12006 thumb_insn_r->reg_rec_count = 1;
12007 break;
12008 case 12: /* fall through */
12009 case 13:
12010 /* POP. */
12011 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12012 while (register_bits)
12013 {
12014 if (register_bits & 0x00000001)
12015 record_buf[index++] = register_count;
12016 register_bits = register_bits >> 1;
12017 register_count++;
12018 }
12019 record_buf[index++] = ARM_PS_REGNUM;
12020 record_buf[index++] = ARM_SP_REGNUM;
12021 thumb_insn_r->reg_rec_count = index;
12022 break;
12023 case 0xe:
12024 /* BKPT insn. */
12025 /* Handle enhanced software breakpoint insn, BKPT. */
12026 /* CPSR is changed to be executed in ARM state, disabling normal
12027 interrupts, entering abort mode. */
12028 /* According to high vector configuration PC is set. */
12029 /* User hits breakpoint and type reverse, in that case, we need to go back with
12030 previous CPSR and Program Counter. */
12031 record_buf[0] = ARM_PS_REGNUM;
12032 record_buf[1] = ARM_LR_REGNUM;
12033 thumb_insn_r->reg_rec_count = 2;
12034 /* We need to save SPSR value, which is not yet done. */
12035 printf_unfiltered (_("Process record does not support instruction "
12036 "0x%0x at address %s.\n"),
12037 thumb_insn_r->arm_insn,
12038 paddress (thumb_insn_r->gdbarch,
12039 thumb_insn_r->this_addr));
12040 return -1;
12041
12042 case 0xf:
12043 /* If-Then, and hints */
12044 break;
12045 default:
12046 return -1;
12047 };
12048 }
12049
12050 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12051 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12052 record_buf_mem);
12053
12054 return 0;
12055 }
12056
12057 /* Handling opcode 110 insns. */
12058
12059 static int
12060 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12061 {
12062 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12063 struct regcache *reg_cache = thumb_insn_r->regcache;
12064
12065 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12066 uint32_t reg_src1 = 0;
12067 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12068 uint32_t index = 0, start_address = 0;
12069 uint32_t record_buf[24], record_buf_mem[48];
12070
12071 ULONGEST u_regval = 0;
12072
12073 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12074 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12075
12076 if (1 == opcode2)
12077 {
12078
12079 /* LDMIA. */
12080 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12081 /* Get Rn. */
12082 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12083 while (register_bits)
12084 {
12085 if (register_bits & 0x00000001)
12086 record_buf[index++] = register_count;
12087 register_bits = register_bits >> 1;
12088 register_count++;
12089 }
12090 record_buf[index++] = reg_src1;
12091 thumb_insn_r->reg_rec_count = index;
12092 }
12093 else if (0 == opcode2)
12094 {
12095 /* It handles both STMIA. */
12096 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12097 /* Get Rn. */
12098 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12099 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12100 while (register_bits)
12101 {
12102 if (register_bits & 0x00000001)
12103 register_count++;
12104 register_bits = register_bits >> 1;
12105 }
12106 start_address = u_regval;
12107 thumb_insn_r->mem_rec_count = register_count;
12108 while (register_count)
12109 {
12110 record_buf_mem[(register_count * 2) - 1] = start_address;
12111 record_buf_mem[(register_count * 2) - 2] = 4;
12112 start_address = start_address + 4;
12113 register_count--;
12114 }
12115 }
12116 else if (0x1F == opcode1)
12117 {
12118 /* Handle arm syscall insn. */
12119 if (tdep->arm_syscall_record != NULL)
12120 {
12121 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12122 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12123 }
12124 else
12125 {
12126 printf_unfiltered (_("no syscall record support\n"));
12127 return -1;
12128 }
12129 }
12130
12131 /* B (1), conditional branch is automatically taken care in process_record,
12132 as PC is saved there. */
12133
12134 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12135 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12136 record_buf_mem);
12137
12138 return ret;
12139 }
12140
12141 /* Handling opcode 111 insns. */
12142
12143 static int
12144 thumb_record_branch (insn_decode_record *thumb_insn_r)
12145 {
12146 uint32_t record_buf[8];
12147 uint32_t bits_h = 0;
12148
12149 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12150
12151 if (2 == bits_h || 3 == bits_h)
12152 {
12153 /* BL */
12154 record_buf[0] = ARM_LR_REGNUM;
12155 thumb_insn_r->reg_rec_count = 1;
12156 }
12157 else if (1 == bits_h)
12158 {
12159 /* BLX(1). */
12160 record_buf[0] = ARM_PS_REGNUM;
12161 record_buf[1] = ARM_LR_REGNUM;
12162 thumb_insn_r->reg_rec_count = 2;
12163 }
12164
12165 /* B(2) is automatically taken care in process_record, as PC is
12166 saved there. */
12167
12168 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12169
12170 return 0;
12171 }
12172
12173 /* Handler for thumb2 load/store multiple instructions. */
12174
12175 static int
12176 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12177 {
12178 struct regcache *reg_cache = thumb2_insn_r->regcache;
12179
12180 uint32_t reg_rn, op;
12181 uint32_t register_bits = 0, register_count = 0;
12182 uint32_t index = 0, start_address = 0;
12183 uint32_t record_buf[24], record_buf_mem[48];
12184
12185 ULONGEST u_regval = 0;
12186
12187 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12188 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12189
12190 if (0 == op || 3 == op)
12191 {
12192 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12193 {
12194 /* Handle RFE instruction. */
12195 record_buf[0] = ARM_PS_REGNUM;
12196 thumb2_insn_r->reg_rec_count = 1;
12197 }
12198 else
12199 {
12200 /* Handle SRS instruction after reading banked SP. */
12201 return arm_record_unsupported_insn (thumb2_insn_r);
12202 }
12203 }
12204 else if (1 == op || 2 == op)
12205 {
12206 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12207 {
12208 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12209 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12210 while (register_bits)
12211 {
12212 if (register_bits & 0x00000001)
12213 record_buf[index++] = register_count;
12214
12215 register_count++;
12216 register_bits = register_bits >> 1;
12217 }
12218 record_buf[index++] = reg_rn;
12219 record_buf[index++] = ARM_PS_REGNUM;
12220 thumb2_insn_r->reg_rec_count = index;
12221 }
12222 else
12223 {
12224 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12225 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12226 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12227 while (register_bits)
12228 {
12229 if (register_bits & 0x00000001)
12230 register_count++;
12231
12232 register_bits = register_bits >> 1;
12233 }
12234
12235 if (1 == op)
12236 {
12237 /* Start address calculation for LDMDB/LDMEA. */
12238 start_address = u_regval;
12239 }
12240 else if (2 == op)
12241 {
12242 /* Start address calculation for LDMDB/LDMEA. */
12243 start_address = u_regval - register_count * 4;
12244 }
12245
12246 thumb2_insn_r->mem_rec_count = register_count;
12247 while (register_count)
12248 {
12249 record_buf_mem[register_count * 2 - 1] = start_address;
12250 record_buf_mem[register_count * 2 - 2] = 4;
12251 start_address = start_address + 4;
12252 register_count--;
12253 }
12254 record_buf[0] = reg_rn;
12255 record_buf[1] = ARM_PS_REGNUM;
12256 thumb2_insn_r->reg_rec_count = 2;
12257 }
12258 }
12259
12260 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12261 record_buf_mem);
12262 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12263 record_buf);
12264 return ARM_RECORD_SUCCESS;
12265 }
12266
12267 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12268 instructions. */
12269
12270 static int
12271 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12272 {
12273 struct regcache *reg_cache = thumb2_insn_r->regcache;
12274
12275 uint32_t reg_rd, reg_rn, offset_imm;
12276 uint32_t reg_dest1, reg_dest2;
12277 uint32_t address, offset_addr;
12278 uint32_t record_buf[8], record_buf_mem[8];
12279 uint32_t op1, op2, op3;
12280
12281 ULONGEST u_regval[2];
12282
12283 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12284 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12285 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12286
12287 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12288 {
12289 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12290 {
12291 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12292 record_buf[0] = reg_dest1;
12293 record_buf[1] = ARM_PS_REGNUM;
12294 thumb2_insn_r->reg_rec_count = 2;
12295 }
12296
12297 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12298 {
12299 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12300 record_buf[2] = reg_dest2;
12301 thumb2_insn_r->reg_rec_count = 3;
12302 }
12303 }
12304 else
12305 {
12306 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12307 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12308
12309 if (0 == op1 && 0 == op2)
12310 {
12311 /* Handle STREX. */
12312 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12313 address = u_regval[0] + (offset_imm * 4);
12314 record_buf_mem[0] = 4;
12315 record_buf_mem[1] = address;
12316 thumb2_insn_r->mem_rec_count = 1;
12317 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12318 record_buf[0] = reg_rd;
12319 thumb2_insn_r->reg_rec_count = 1;
12320 }
12321 else if (1 == op1 && 0 == op2)
12322 {
12323 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12324 record_buf[0] = reg_rd;
12325 thumb2_insn_r->reg_rec_count = 1;
12326 address = u_regval[0];
12327 record_buf_mem[1] = address;
12328
12329 if (4 == op3)
12330 {
12331 /* Handle STREXB. */
12332 record_buf_mem[0] = 1;
12333 thumb2_insn_r->mem_rec_count = 1;
12334 }
12335 else if (5 == op3)
12336 {
12337 /* Handle STREXH. */
12338 record_buf_mem[0] = 2 ;
12339 thumb2_insn_r->mem_rec_count = 1;
12340 }
12341 else if (7 == op3)
12342 {
12343 /* Handle STREXD. */
12344 address = u_regval[0];
12345 record_buf_mem[0] = 4;
12346 record_buf_mem[2] = 4;
12347 record_buf_mem[3] = address + 4;
12348 thumb2_insn_r->mem_rec_count = 2;
12349 }
12350 }
12351 else
12352 {
12353 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12354
12355 if (bit (thumb2_insn_r->arm_insn, 24))
12356 {
12357 if (bit (thumb2_insn_r->arm_insn, 23))
12358 offset_addr = u_regval[0] + (offset_imm * 4);
12359 else
12360 offset_addr = u_regval[0] - (offset_imm * 4);
12361
12362 address = offset_addr;
12363 }
12364 else
12365 address = u_regval[0];
12366
12367 record_buf_mem[0] = 4;
12368 record_buf_mem[1] = address;
12369 record_buf_mem[2] = 4;
12370 record_buf_mem[3] = address + 4;
12371 thumb2_insn_r->mem_rec_count = 2;
12372 record_buf[0] = reg_rn;
12373 thumb2_insn_r->reg_rec_count = 1;
12374 }
12375 }
12376
12377 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12378 record_buf);
12379 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12380 record_buf_mem);
12381 return ARM_RECORD_SUCCESS;
12382 }
12383
12384 /* Handler for thumb2 data processing (shift register and modified immediate)
12385 instructions. */
12386
12387 static int
12388 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12389 {
12390 uint32_t reg_rd, op;
12391 uint32_t record_buf[8];
12392
12393 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12394 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12395
12396 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12397 {
12398 record_buf[0] = ARM_PS_REGNUM;
12399 thumb2_insn_r->reg_rec_count = 1;
12400 }
12401 else
12402 {
12403 record_buf[0] = reg_rd;
12404 record_buf[1] = ARM_PS_REGNUM;
12405 thumb2_insn_r->reg_rec_count = 2;
12406 }
12407
12408 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12409 record_buf);
12410 return ARM_RECORD_SUCCESS;
12411 }
12412
12413 /* Generic handler for thumb2 instructions which effect destination and PS
12414 registers. */
12415
12416 static int
12417 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12418 {
12419 uint32_t reg_rd;
12420 uint32_t record_buf[8];
12421
12422 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12423
12424 record_buf[0] = reg_rd;
12425 record_buf[1] = ARM_PS_REGNUM;
12426 thumb2_insn_r->reg_rec_count = 2;
12427
12428 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12429 record_buf);
12430 return ARM_RECORD_SUCCESS;
12431 }
12432
12433 /* Handler for thumb2 branch and miscellaneous control instructions. */
12434
12435 static int
12436 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12437 {
12438 uint32_t op, op1, op2;
12439 uint32_t record_buf[8];
12440
12441 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12442 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12443 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12444
12445 /* Handle MSR insn. */
12446 if (!(op1 & 0x2) && 0x38 == op)
12447 {
12448 if (!(op2 & 0x3))
12449 {
12450 /* CPSR is going to be changed. */
12451 record_buf[0] = ARM_PS_REGNUM;
12452 thumb2_insn_r->reg_rec_count = 1;
12453 }
12454 else
12455 {
12456 arm_record_unsupported_insn(thumb2_insn_r);
12457 return -1;
12458 }
12459 }
12460 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12461 {
12462 /* BLX. */
12463 record_buf[0] = ARM_PS_REGNUM;
12464 record_buf[1] = ARM_LR_REGNUM;
12465 thumb2_insn_r->reg_rec_count = 2;
12466 }
12467
12468 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12469 record_buf);
12470 return ARM_RECORD_SUCCESS;
12471 }
12472
12473 /* Handler for thumb2 store single data item instructions. */
12474
12475 static int
12476 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12477 {
12478 struct regcache *reg_cache = thumb2_insn_r->regcache;
12479
12480 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12481 uint32_t address, offset_addr;
12482 uint32_t record_buf[8], record_buf_mem[8];
12483 uint32_t op1, op2;
12484
12485 ULONGEST u_regval[2];
12486
12487 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12488 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12489 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12490 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12491
12492 if (bit (thumb2_insn_r->arm_insn, 23))
12493 {
12494 /* T2 encoding. */
12495 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12496 offset_addr = u_regval[0] + offset_imm;
12497 address = offset_addr;
12498 }
12499 else
12500 {
12501 /* T3 encoding. */
12502 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12503 {
12504 /* Handle STRB (register). */
12505 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12506 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12507 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12508 offset_addr = u_regval[1] << shift_imm;
12509 address = u_regval[0] + offset_addr;
12510 }
12511 else
12512 {
12513 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12514 if (bit (thumb2_insn_r->arm_insn, 10))
12515 {
12516 if (bit (thumb2_insn_r->arm_insn, 9))
12517 offset_addr = u_regval[0] + offset_imm;
12518 else
12519 offset_addr = u_regval[0] - offset_imm;
12520
12521 address = offset_addr;
12522 }
12523 else
12524 address = u_regval[0];
12525 }
12526 }
12527
12528 switch (op1)
12529 {
12530 /* Store byte instructions. */
12531 case 4:
12532 case 0:
12533 record_buf_mem[0] = 1;
12534 break;
12535 /* Store half word instructions. */
12536 case 1:
12537 case 5:
12538 record_buf_mem[0] = 2;
12539 break;
12540 /* Store word instructions. */
12541 case 2:
12542 case 6:
12543 record_buf_mem[0] = 4;
12544 break;
12545
12546 default:
12547 gdb_assert_not_reached ("no decoding pattern found");
12548 break;
12549 }
12550
12551 record_buf_mem[1] = address;
12552 thumb2_insn_r->mem_rec_count = 1;
12553 record_buf[0] = reg_rn;
12554 thumb2_insn_r->reg_rec_count = 1;
12555
12556 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12557 record_buf);
12558 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12559 record_buf_mem);
12560 return ARM_RECORD_SUCCESS;
12561 }
12562
12563 /* Handler for thumb2 load memory hints instructions. */
12564
12565 static int
12566 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12567 {
12568 uint32_t record_buf[8];
12569 uint32_t reg_rt, reg_rn;
12570
12571 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12572 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12573
12574 if (ARM_PC_REGNUM != reg_rt)
12575 {
12576 record_buf[0] = reg_rt;
12577 record_buf[1] = reg_rn;
12578 record_buf[2] = ARM_PS_REGNUM;
12579 thumb2_insn_r->reg_rec_count = 3;
12580
12581 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12582 record_buf);
12583 return ARM_RECORD_SUCCESS;
12584 }
12585
12586 return ARM_RECORD_FAILURE;
12587 }
12588
12589 /* Handler for thumb2 load word instructions. */
12590
12591 static int
12592 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12593 {
12594 uint32_t record_buf[8];
12595
12596 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12597 record_buf[1] = ARM_PS_REGNUM;
12598 thumb2_insn_r->reg_rec_count = 2;
12599
12600 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12601 record_buf);
12602 return ARM_RECORD_SUCCESS;
12603 }
12604
12605 /* Handler for thumb2 long multiply, long multiply accumulate, and
12606 divide instructions. */
12607
12608 static int
12609 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12610 {
12611 uint32_t opcode1 = 0, opcode2 = 0;
12612 uint32_t record_buf[8];
12613
12614 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12615 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12616
12617 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12618 {
12619 /* Handle SMULL, UMULL, SMULAL. */
12620 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12621 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12622 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12623 record_buf[2] = ARM_PS_REGNUM;
12624 thumb2_insn_r->reg_rec_count = 3;
12625 }
12626 else if (1 == opcode1 || 3 == opcode2)
12627 {
12628 /* Handle SDIV and UDIV. */
12629 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12630 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12631 record_buf[2] = ARM_PS_REGNUM;
12632 thumb2_insn_r->reg_rec_count = 3;
12633 }
12634 else
12635 return ARM_RECORD_FAILURE;
12636
12637 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12638 record_buf);
12639 return ARM_RECORD_SUCCESS;
12640 }
12641
12642 /* Record handler for thumb32 coprocessor instructions. */
12643
12644 static int
12645 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12646 {
12647 if (bit (thumb2_insn_r->arm_insn, 25))
12648 return arm_record_coproc_data_proc (thumb2_insn_r);
12649 else
12650 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12651 }
12652
12653 /* Record handler for advance SIMD structure load/store instructions. */
12654
12655 static int
12656 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12657 {
12658 struct regcache *reg_cache = thumb2_insn_r->regcache;
12659 uint32_t l_bit, a_bit, b_bits;
12660 uint32_t record_buf[128], record_buf_mem[128];
12661 uint32_t reg_rn, reg_vd, address, f_elem;
12662 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12663 uint8_t f_ebytes;
12664
12665 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12666 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12667 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12668 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12669 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12670 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12671 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12672 f_elem = 8 / f_ebytes;
12673
12674 if (!l_bit)
12675 {
12676 ULONGEST u_regval = 0;
12677 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12678 address = u_regval;
12679
12680 if (!a_bit)
12681 {
12682 /* Handle VST1. */
12683 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12684 {
12685 if (b_bits == 0x07)
12686 bf_regs = 1;
12687 else if (b_bits == 0x0a)
12688 bf_regs = 2;
12689 else if (b_bits == 0x06)
12690 bf_regs = 3;
12691 else if (b_bits == 0x02)
12692 bf_regs = 4;
12693 else
12694 bf_regs = 0;
12695
12696 for (index_r = 0; index_r < bf_regs; index_r++)
12697 {
12698 for (index_e = 0; index_e < f_elem; index_e++)
12699 {
12700 record_buf_mem[index_m++] = f_ebytes;
12701 record_buf_mem[index_m++] = address;
12702 address = address + f_ebytes;
12703 thumb2_insn_r->mem_rec_count += 1;
12704 }
12705 }
12706 }
12707 /* Handle VST2. */
12708 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12709 {
12710 if (b_bits == 0x09 || b_bits == 0x08)
12711 bf_regs = 1;
12712 else if (b_bits == 0x03)
12713 bf_regs = 2;
12714 else
12715 bf_regs = 0;
12716
12717 for (index_r = 0; index_r < bf_regs; index_r++)
12718 for (index_e = 0; index_e < f_elem; index_e++)
12719 {
12720 for (loop_t = 0; loop_t < 2; loop_t++)
12721 {
12722 record_buf_mem[index_m++] = f_ebytes;
12723 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12724 thumb2_insn_r->mem_rec_count += 1;
12725 }
12726 address = address + (2 * f_ebytes);
12727 }
12728 }
12729 /* Handle VST3. */
12730 else if ((b_bits & 0x0e) == 0x04)
12731 {
12732 for (index_e = 0; index_e < f_elem; index_e++)
12733 {
12734 for (loop_t = 0; loop_t < 3; loop_t++)
12735 {
12736 record_buf_mem[index_m++] = f_ebytes;
12737 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12738 thumb2_insn_r->mem_rec_count += 1;
12739 }
12740 address = address + (3 * f_ebytes);
12741 }
12742 }
12743 /* Handle VST4. */
12744 else if (!(b_bits & 0x0e))
12745 {
12746 for (index_e = 0; index_e < f_elem; index_e++)
12747 {
12748 for (loop_t = 0; loop_t < 4; loop_t++)
12749 {
12750 record_buf_mem[index_m++] = f_ebytes;
12751 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12752 thumb2_insn_r->mem_rec_count += 1;
12753 }
12754 address = address + (4 * f_ebytes);
12755 }
12756 }
12757 }
12758 else
12759 {
12760 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12761
12762 if (bft_size == 0x00)
12763 f_ebytes = 1;
12764 else if (bft_size == 0x01)
12765 f_ebytes = 2;
12766 else if (bft_size == 0x02)
12767 f_ebytes = 4;
12768 else
12769 f_ebytes = 0;
12770
12771 /* Handle VST1. */
12772 if (!(b_bits & 0x0b) || b_bits == 0x08)
12773 thumb2_insn_r->mem_rec_count = 1;
12774 /* Handle VST2. */
12775 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12776 thumb2_insn_r->mem_rec_count = 2;
12777 /* Handle VST3. */
12778 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12779 thumb2_insn_r->mem_rec_count = 3;
12780 /* Handle VST4. */
12781 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12782 thumb2_insn_r->mem_rec_count = 4;
12783
12784 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12785 {
12786 record_buf_mem[index_m] = f_ebytes;
12787 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12788 }
12789 }
12790 }
12791 else
12792 {
12793 if (!a_bit)
12794 {
12795 /* Handle VLD1. */
12796 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12797 thumb2_insn_r->reg_rec_count = 1;
12798 /* Handle VLD2. */
12799 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12800 thumb2_insn_r->reg_rec_count = 2;
12801 /* Handle VLD3. */
12802 else if ((b_bits & 0x0e) == 0x04)
12803 thumb2_insn_r->reg_rec_count = 3;
12804 /* Handle VLD4. */
12805 else if (!(b_bits & 0x0e))
12806 thumb2_insn_r->reg_rec_count = 4;
12807 }
12808 else
12809 {
12810 /* Handle VLD1. */
12811 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12812 thumb2_insn_r->reg_rec_count = 1;
12813 /* Handle VLD2. */
12814 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12815 thumb2_insn_r->reg_rec_count = 2;
12816 /* Handle VLD3. */
12817 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12818 thumb2_insn_r->reg_rec_count = 3;
12819 /* Handle VLD4. */
12820 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12821 thumb2_insn_r->reg_rec_count = 4;
12822
12823 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12824 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12825 }
12826 }
12827
12828 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12829 {
12830 record_buf[index_r] = reg_rn;
12831 thumb2_insn_r->reg_rec_count += 1;
12832 }
12833
12834 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12835 record_buf);
12836 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12837 record_buf_mem);
12838 return 0;
12839 }
12840
12841 /* Decodes thumb2 instruction type and invokes its record handler. */
12842
12843 static unsigned int
12844 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12845 {
12846 uint32_t op, op1, op2;
12847
12848 op = bit (thumb2_insn_r->arm_insn, 15);
12849 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12850 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12851
12852 if (op1 == 0x01)
12853 {
12854 if (!(op2 & 0x64 ))
12855 {
12856 /* Load/store multiple instruction. */
12857 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12858 }
12859 else if ((op2 & 0x64) == 0x4)
12860 {
12861 /* Load/store (dual/exclusive) and table branch instruction. */
12862 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12863 }
12864 else if ((op2 & 0x60) == 0x20)
12865 {
12866 /* Data-processing (shifted register). */
12867 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12868 }
12869 else if (op2 & 0x40)
12870 {
12871 /* Co-processor instructions. */
12872 return thumb2_record_coproc_insn (thumb2_insn_r);
12873 }
12874 }
12875 else if (op1 == 0x02)
12876 {
12877 if (op)
12878 {
12879 /* Branches and miscellaneous control instructions. */
12880 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12881 }
12882 else if (op2 & 0x20)
12883 {
12884 /* Data-processing (plain binary immediate) instruction. */
12885 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12886 }
12887 else
12888 {
12889 /* Data-processing (modified immediate). */
12890 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12891 }
12892 }
12893 else if (op1 == 0x03)
12894 {
12895 if (!(op2 & 0x71 ))
12896 {
12897 /* Store single data item. */
12898 return thumb2_record_str_single_data (thumb2_insn_r);
12899 }
12900 else if (!((op2 & 0x71) ^ 0x10))
12901 {
12902 /* Advanced SIMD or structure load/store instructions. */
12903 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12904 }
12905 else if (!((op2 & 0x67) ^ 0x01))
12906 {
12907 /* Load byte, memory hints instruction. */
12908 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12909 }
12910 else if (!((op2 & 0x67) ^ 0x03))
12911 {
12912 /* Load halfword, memory hints instruction. */
12913 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12914 }
12915 else if (!((op2 & 0x67) ^ 0x05))
12916 {
12917 /* Load word instruction. */
12918 return thumb2_record_ld_word (thumb2_insn_r);
12919 }
12920 else if (!((op2 & 0x70) ^ 0x20))
12921 {
12922 /* Data-processing (register) instruction. */
12923 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12924 }
12925 else if (!((op2 & 0x78) ^ 0x30))
12926 {
12927 /* Multiply, multiply accumulate, abs diff instruction. */
12928 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12929 }
12930 else if (!((op2 & 0x78) ^ 0x38))
12931 {
12932 /* Long multiply, long multiply accumulate, and divide. */
12933 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12934 }
12935 else if (op2 & 0x40)
12936 {
12937 /* Co-processor instructions. */
12938 return thumb2_record_coproc_insn (thumb2_insn_r);
12939 }
12940 }
12941
12942 return -1;
12943 }
12944
12945 namespace {
12946 /* Abstract memory reader. */
12947
12948 class abstract_memory_reader
12949 {
12950 public:
12951 /* Read LEN bytes of target memory at address MEMADDR, placing the
12952 results in GDB's memory at BUF. Return true on success. */
12953
12954 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12955 };
12956
12957 /* Instruction reader from real target. */
12958
12959 class instruction_reader : public abstract_memory_reader
12960 {
12961 public:
12962 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
12963 {
12964 if (target_read_memory (memaddr, buf, len))
12965 return false;
12966 else
12967 return true;
12968 }
12969 };
12970
12971 } // namespace
12972
12973 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12974 and positive val on fauilure. */
12975
12976 static int
12977 extract_arm_insn (abstract_memory_reader& reader,
12978 insn_decode_record *insn_record, uint32_t insn_size)
12979 {
12980 gdb_byte buf[insn_size];
12981
12982 memset (&buf[0], 0, insn_size);
12983
12984 if (!reader.read (insn_record->this_addr, buf, insn_size))
12985 return 1;
12986 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12987 insn_size,
12988 gdbarch_byte_order_for_code (insn_record->gdbarch));
12989 return 0;
12990 }
12991
12992 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12993
12994 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12995 dispatch it. */
12996
12997 static int
12998 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
12999 record_type_t record_type, uint32_t insn_size)
13000 {
13001
13002 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13003 instruction. */
13004 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13005 {
13006 arm_record_data_proc_misc_ld_str, /* 000. */
13007 arm_record_data_proc_imm, /* 001. */
13008 arm_record_ld_st_imm_offset, /* 010. */
13009 arm_record_ld_st_reg_offset, /* 011. */
13010 arm_record_ld_st_multiple, /* 100. */
13011 arm_record_b_bl, /* 101. */
13012 arm_record_asimd_vfp_coproc, /* 110. */
13013 arm_record_coproc_data_proc /* 111. */
13014 };
13015
13016 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13017 instruction. */
13018 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13019 { \
13020 thumb_record_shift_add_sub, /* 000. */
13021 thumb_record_add_sub_cmp_mov, /* 001. */
13022 thumb_record_ld_st_reg_offset, /* 010. */
13023 thumb_record_ld_st_imm_offset, /* 011. */
13024 thumb_record_ld_st_stack, /* 100. */
13025 thumb_record_misc, /* 101. */
13026 thumb_record_ldm_stm_swi, /* 110. */
13027 thumb_record_branch /* 111. */
13028 };
13029
13030 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13031 uint32_t insn_id = 0;
13032
13033 if (extract_arm_insn (reader, arm_record, insn_size))
13034 {
13035 if (record_debug)
13036 {
13037 printf_unfiltered (_("Process record: error reading memory at "
13038 "addr %s len = %d.\n"),
13039 paddress (arm_record->gdbarch,
13040 arm_record->this_addr), insn_size);
13041 }
13042 return -1;
13043 }
13044 else if (ARM_RECORD == record_type)
13045 {
13046 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13047 insn_id = bits (arm_record->arm_insn, 25, 27);
13048
13049 if (arm_record->cond == 0xf)
13050 ret = arm_record_extension_space (arm_record);
13051 else
13052 {
13053 /* If this insn has fallen into extension space
13054 then we need not decode it anymore. */
13055 ret = arm_handle_insn[insn_id] (arm_record);
13056 }
13057 if (ret != ARM_RECORD_SUCCESS)
13058 {
13059 arm_record_unsupported_insn (arm_record);
13060 ret = -1;
13061 }
13062 }
13063 else if (THUMB_RECORD == record_type)
13064 {
13065 /* As thumb does not have condition codes, we set negative. */
13066 arm_record->cond = -1;
13067 insn_id = bits (arm_record->arm_insn, 13, 15);
13068 ret = thumb_handle_insn[insn_id] (arm_record);
13069 if (ret != ARM_RECORD_SUCCESS)
13070 {
13071 arm_record_unsupported_insn (arm_record);
13072 ret = -1;
13073 }
13074 }
13075 else if (THUMB2_RECORD == record_type)
13076 {
13077 /* As thumb does not have condition codes, we set negative. */
13078 arm_record->cond = -1;
13079
13080 /* Swap first half of 32bit thumb instruction with second half. */
13081 arm_record->arm_insn
13082 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13083
13084 ret = thumb2_record_decode_insn_handler (arm_record);
13085
13086 if (ret != ARM_RECORD_SUCCESS)
13087 {
13088 arm_record_unsupported_insn (arm_record);
13089 ret = -1;
13090 }
13091 }
13092 else
13093 {
13094 /* Throw assertion. */
13095 gdb_assert_not_reached ("not a valid instruction, could not decode");
13096 }
13097
13098 return ret;
13099 }
13100
13101 #if GDB_SELF_TEST
13102 namespace selftests {
13103
13104 /* Provide both 16-bit and 32-bit thumb instructions. */
13105
13106 class instruction_reader_thumb : public abstract_memory_reader
13107 {
13108 public:
13109 template<size_t SIZE>
13110 instruction_reader_thumb (enum bfd_endian endian,
13111 const uint16_t (&insns)[SIZE])
13112 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13113 {}
13114
13115 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13116 {
13117 SELF_CHECK (len == 4 || len == 2);
13118 SELF_CHECK (memaddr % 2 == 0);
13119 SELF_CHECK ((memaddr / 2) < m_insns_size);
13120
13121 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13122 if (len == 4)
13123 {
13124 store_unsigned_integer (&buf[2], 2, m_endian,
13125 m_insns[memaddr / 2 + 1]);
13126 }
13127 return true;
13128 }
13129
13130 private:
13131 enum bfd_endian m_endian;
13132 const uint16_t *m_insns;
13133 size_t m_insns_size;
13134 };
13135
13136 static void
13137 arm_record_test (void)
13138 {
13139 struct gdbarch_info info;
13140 gdbarch_info_init (&info);
13141 info.bfd_arch_info = bfd_scan_arch ("arm");
13142
13143 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13144
13145 SELF_CHECK (gdbarch != NULL);
13146
13147 /* 16-bit Thumb instructions. */
13148 {
13149 insn_decode_record arm_record;
13150
13151 memset (&arm_record, 0, sizeof (insn_decode_record));
13152 arm_record.gdbarch = gdbarch;
13153
13154 static const uint16_t insns[] = {
13155 /* db b2 uxtb r3, r3 */
13156 0xb2db,
13157 /* cd 58 ldr r5, [r1, r3] */
13158 0x58cd,
13159 };
13160
13161 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13162 instruction_reader_thumb reader (endian, insns);
13163 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13164 THUMB_INSN_SIZE_BYTES);
13165
13166 SELF_CHECK (ret == 0);
13167 SELF_CHECK (arm_record.mem_rec_count == 0);
13168 SELF_CHECK (arm_record.reg_rec_count == 1);
13169 SELF_CHECK (arm_record.arm_regs[0] == 3);
13170
13171 arm_record.this_addr += 2;
13172 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13173 THUMB_INSN_SIZE_BYTES);
13174
13175 SELF_CHECK (ret == 0);
13176 SELF_CHECK (arm_record.mem_rec_count == 0);
13177 SELF_CHECK (arm_record.reg_rec_count == 1);
13178 SELF_CHECK (arm_record.arm_regs[0] == 5);
13179 }
13180
13181 /* 32-bit Thumb-2 instructions. */
13182 {
13183 insn_decode_record arm_record;
13184
13185 memset (&arm_record, 0, sizeof (insn_decode_record));
13186 arm_record.gdbarch = gdbarch;
13187
13188 static const uint16_t insns[] = {
13189 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13190 0xee1d, 0x7f70,
13191 };
13192
13193 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13194 instruction_reader_thumb reader (endian, insns);
13195 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13196 THUMB2_INSN_SIZE_BYTES);
13197
13198 SELF_CHECK (ret == 0);
13199 SELF_CHECK (arm_record.mem_rec_count == 0);
13200 SELF_CHECK (arm_record.reg_rec_count == 1);
13201 SELF_CHECK (arm_record.arm_regs[0] == 7);
13202 }
13203 }
13204 } // namespace selftests
13205 #endif /* GDB_SELF_TEST */
13206
13207 /* Cleans up local record registers and memory allocations. */
13208
13209 static void
13210 deallocate_reg_mem (insn_decode_record *record)
13211 {
13212 xfree (record->arm_regs);
13213 xfree (record->arm_mems);
13214 }
13215
13216
13217 /* Parse the current instruction and record the values of the registers and
13218 memory that will be changed in current instruction to record_arch_list".
13219 Return -1 if something is wrong. */
13220
13221 int
13222 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13223 CORE_ADDR insn_addr)
13224 {
13225
13226 uint32_t no_of_rec = 0;
13227 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13228 ULONGEST t_bit = 0, insn_id = 0;
13229
13230 ULONGEST u_regval = 0;
13231
13232 insn_decode_record arm_record;
13233
13234 memset (&arm_record, 0, sizeof (insn_decode_record));
13235 arm_record.regcache = regcache;
13236 arm_record.this_addr = insn_addr;
13237 arm_record.gdbarch = gdbarch;
13238
13239
13240 if (record_debug > 1)
13241 {
13242 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13243 "addr = %s\n",
13244 paddress (gdbarch, arm_record.this_addr));
13245 }
13246
13247 instruction_reader reader;
13248 if (extract_arm_insn (reader, &arm_record, 2))
13249 {
13250 if (record_debug)
13251 {
13252 printf_unfiltered (_("Process record: error reading memory at "
13253 "addr %s len = %d.\n"),
13254 paddress (arm_record.gdbarch,
13255 arm_record.this_addr), 2);
13256 }
13257 return -1;
13258 }
13259
13260 /* Check the insn, whether it is thumb or arm one. */
13261
13262 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13263 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13264
13265
13266 if (!(u_regval & t_bit))
13267 {
13268 /* We are decoding arm insn. */
13269 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13270 }
13271 else
13272 {
13273 insn_id = bits (arm_record.arm_insn, 11, 15);
13274 /* is it thumb2 insn? */
13275 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13276 {
13277 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13278 THUMB2_INSN_SIZE_BYTES);
13279 }
13280 else
13281 {
13282 /* We are decoding thumb insn. */
13283 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13284 THUMB_INSN_SIZE_BYTES);
13285 }
13286 }
13287
13288 if (0 == ret)
13289 {
13290 /* Record registers. */
13291 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13292 if (arm_record.arm_regs)
13293 {
13294 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13295 {
13296 if (record_full_arch_list_add_reg
13297 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13298 ret = -1;
13299 }
13300 }
13301 /* Record memories. */
13302 if (arm_record.arm_mems)
13303 {
13304 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13305 {
13306 if (record_full_arch_list_add_mem
13307 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13308 arm_record.arm_mems[no_of_rec].len))
13309 ret = -1;
13310 }
13311 }
13312
13313 if (record_full_arch_list_add_end ())
13314 ret = -1;
13315 }
13316
13317
13318 deallocate_reg_mem (&arm_record);
13319
13320 return ret;
13321 }
13322
13323 /* See arm-tdep.h. */
13324
13325 const target_desc *
13326 arm_read_description (arm_fp_type fp_type)
13327 {
13328 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13329
13330 if (tdesc == nullptr)
13331 {
13332 tdesc = arm_create_target_description (fp_type);
13333 tdesc_arm_list[fp_type] = tdesc;
13334 }
13335
13336 return tdesc;
13337 }
13338
13339 /* See arm-tdep.h. */
13340
13341 const target_desc *
13342 arm_read_mprofile_description (arm_m_profile_type m_type)
13343 {
13344 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13345
13346 if (tdesc == nullptr)
13347 {
13348 tdesc = arm_create_mprofile_target_description (m_type);
13349 tdesc_arm_mprofile_list[m_type] = tdesc;
13350 }
13351
13352 return tdesc;
13353 }