]> git.ipfire.org Git - thirdparty/binutils-gdb.git/blob - gdb/arm-tdep.c
Better make rule for arch/ files built for IPA
[thirdparty/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2-frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observer.h"
48
49 #include "arch/arm.h"
50 #include "arch/arm-get-next-pcs.h"
51 #include "arm-tdep.h"
52 #include "gdb/sim-arm.h"
53
54 #include "elf-bfd.h"
55 #include "coff/internal.h"
56 #include "elf/arm.h"
57
58 #include "vec.h"
59
60 #include "record.h"
61 #include "record-full.h"
62 #include <algorithm>
63
64 #include "features/arm/arm-with-m.c"
65 #include "features/arm/arm-with-m-fpa-layout.c"
66 #include "features/arm/arm-with-m-vfp-d16.c"
67 #include "features/arm/arm-with-iwmmxt.c"
68 #include "features/arm/arm-with-vfpv2.c"
69 #include "features/arm/arm-with-vfpv3.c"
70 #include "features/arm/arm-with-neon.c"
71
72 #if GDB_SELF_TEST
73 #include "selftest.h"
74 #endif
75
76 static int arm_debug;
77
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
81
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
84
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
87
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 MSYMBOL_TARGET_FLAG_1 (msym)
90
91 /* Per-objfile data used for mapping symbols. */
92 static const struct objfile_data *arm_objfile_data_key;
93
94 struct arm_mapping_symbol
95 {
96 bfd_vma value;
97 char type;
98 };
99 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
100 DEF_VEC_O(arm_mapping_symbol_s);
101
102 struct arm_per_objfile
103 {
104 VEC(arm_mapping_symbol_s) **section_maps;
105 };
106
107 /* The list of available "set arm ..." and "show arm ..." commands. */
108 static struct cmd_list_element *setarmcmdlist = NULL;
109 static struct cmd_list_element *showarmcmdlist = NULL;
110
111 /* The type of floating-point to use. Keep this in sync with enum
112 arm_float_model, and the help string in _initialize_arm_tdep. */
113 static const char *const fp_model_strings[] =
114 {
115 "auto",
116 "softfpa",
117 "fpa",
118 "softvfp",
119 "vfp",
120 NULL
121 };
122
123 /* A variable that can be configured by the user. */
124 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
125 static const char *current_fp_model = "auto";
126
127 /* The ABI to use. Keep this in sync with arm_abi_kind. */
128 static const char *const arm_abi_strings[] =
129 {
130 "auto",
131 "APCS",
132 "AAPCS",
133 NULL
134 };
135
136 /* A variable that can be configured by the user. */
137 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
138 static const char *arm_abi_string = "auto";
139
140 /* The execution mode to assume. */
141 static const char *const arm_mode_strings[] =
142 {
143 "auto",
144 "arm",
145 "thumb",
146 NULL
147 };
148
149 static const char *arm_fallback_mode_string = "auto";
150 static const char *arm_force_mode_string = "auto";
151
152 /* The standard register names, and all the valid aliases for them. Note
153 that `fp', `sp' and `pc' are not added in this alias list, because they
154 have been added as builtin user registers in
155 std-regs.c:_initialize_frame_reg. */
156 static const struct
157 {
158 const char *name;
159 int regnum;
160 } arm_register_aliases[] = {
161 /* Basic register numbers. */
162 { "r0", 0 },
163 { "r1", 1 },
164 { "r2", 2 },
165 { "r3", 3 },
166 { "r4", 4 },
167 { "r5", 5 },
168 { "r6", 6 },
169 { "r7", 7 },
170 { "r8", 8 },
171 { "r9", 9 },
172 { "r10", 10 },
173 { "r11", 11 },
174 { "r12", 12 },
175 { "r13", 13 },
176 { "r14", 14 },
177 { "r15", 15 },
178 /* Synonyms (argument and variable registers). */
179 { "a1", 0 },
180 { "a2", 1 },
181 { "a3", 2 },
182 { "a4", 3 },
183 { "v1", 4 },
184 { "v2", 5 },
185 { "v3", 6 },
186 { "v4", 7 },
187 { "v5", 8 },
188 { "v6", 9 },
189 { "v7", 10 },
190 { "v8", 11 },
191 /* Other platform-specific names for r9. */
192 { "sb", 9 },
193 { "tr", 9 },
194 /* Special names. */
195 { "ip", 12 },
196 { "lr", 14 },
197 /* Names used by GCC (not listed in the ARM EABI). */
198 { "sl", 10 },
199 /* A special name from the older ATPCS. */
200 { "wr", 7 },
201 };
202
203 static const char *const arm_register_names[] =
204 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
205 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
206 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
207 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
208 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
209 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
210 "fps", "cpsr" }; /* 24 25 */
211
212 /* Holds the current set of options to be passed to the disassembler. */
213 static char *arm_disassembler_options;
214
215 /* Valid register name styles. */
216 static const char **valid_disassembly_styles;
217
218 /* Disassembly style to use. Default to "std" register names. */
219 static const char *disassembly_style;
220
221 /* This is used to keep the bfd arch_info in sync with the disassembly
222 style. */
223 static void set_disassembly_style_sfunc (const char *, int,
224 struct cmd_list_element *);
225 static void show_disassembly_style_sfunc (struct ui_file *, int,
226 struct cmd_list_element *,
227 const char *);
228
229 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
230 struct regcache *regcache,
231 int regnum, gdb_byte *buf);
232 static void arm_neon_quad_write (struct gdbarch *gdbarch,
233 struct regcache *regcache,
234 int regnum, const gdb_byte *buf);
235
236 static CORE_ADDR
237 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
238
239
240 /* get_next_pcs operations. */
241 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
242 arm_get_next_pcs_read_memory_unsigned_integer,
243 arm_get_next_pcs_syscall_next_pc,
244 arm_get_next_pcs_addr_bits_remove,
245 arm_get_next_pcs_is_thumb,
246 NULL,
247 };
248
249 struct arm_prologue_cache
250 {
251 /* The stack pointer at the time this frame was created; i.e. the
252 caller's stack pointer when this function was called. It is used
253 to identify this frame. */
254 CORE_ADDR prev_sp;
255
256 /* The frame base for this frame is just prev_sp - frame size.
257 FRAMESIZE is the distance from the frame pointer to the
258 initial stack pointer. */
259
260 int framesize;
261
262 /* The register used to hold the frame pointer for this frame. */
263 int framereg;
264
265 /* Saved register offsets. */
266 struct trad_frame_saved_reg *saved_regs;
267 };
268
269 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
270 CORE_ADDR prologue_start,
271 CORE_ADDR prologue_end,
272 struct arm_prologue_cache *cache);
273
274 /* Architecture version for displaced stepping. This effects the behaviour of
275 certain instructions, and really should not be hard-wired. */
276
277 #define DISPLACED_STEPPING_ARCH_VERSION 5
278
279 /* Set to true if the 32-bit mode is in use. */
280
281 int arm_apcs_32 = 1;
282
283 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
284
285 int
286 arm_psr_thumb_bit (struct gdbarch *gdbarch)
287 {
288 if (gdbarch_tdep (gdbarch)->is_m)
289 return XPSR_T;
290 else
291 return CPSR_T;
292 }
293
294 /* Determine if the processor is currently executing in Thumb mode. */
295
296 int
297 arm_is_thumb (struct regcache *regcache)
298 {
299 ULONGEST cpsr;
300 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
301
302 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
303
304 return (cpsr & t_bit) != 0;
305 }
306
307 /* Determine if FRAME is executing in Thumb mode. */
308
309 int
310 arm_frame_is_thumb (struct frame_info *frame)
311 {
312 CORE_ADDR cpsr;
313 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
314
315 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
316 directly (from a signal frame or dummy frame) or by interpreting
317 the saved LR (from a prologue or DWARF frame). So consult it and
318 trust the unwinders. */
319 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
320
321 return (cpsr & t_bit) != 0;
322 }
323
324 /* Callback for VEC_lower_bound. */
325
326 static inline int
327 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
328 const struct arm_mapping_symbol *rhs)
329 {
330 return lhs->value < rhs->value;
331 }
332
333 /* Search for the mapping symbol covering MEMADDR. If one is found,
334 return its type. Otherwise, return 0. If START is non-NULL,
335 set *START to the location of the mapping symbol. */
336
337 static char
338 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
339 {
340 struct obj_section *sec;
341
342 /* If there are mapping symbols, consult them. */
343 sec = find_pc_section (memaddr);
344 if (sec != NULL)
345 {
346 struct arm_per_objfile *data;
347 VEC(arm_mapping_symbol_s) *map;
348 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
349 0 };
350 unsigned int idx;
351
352 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
353 arm_objfile_data_key);
354 if (data != NULL)
355 {
356 map = data->section_maps[sec->the_bfd_section->index];
357 if (!VEC_empty (arm_mapping_symbol_s, map))
358 {
359 struct arm_mapping_symbol *map_sym;
360
361 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
362 arm_compare_mapping_symbols);
363
364 /* VEC_lower_bound finds the earliest ordered insertion
365 point. If the following symbol starts at this exact
366 address, we use that; otherwise, the preceding
367 mapping symbol covers this address. */
368 if (idx < VEC_length (arm_mapping_symbol_s, map))
369 {
370 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
371 if (map_sym->value == map_key.value)
372 {
373 if (start)
374 *start = map_sym->value + obj_section_addr (sec);
375 return map_sym->type;
376 }
377 }
378
379 if (idx > 0)
380 {
381 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
382 if (start)
383 *start = map_sym->value + obj_section_addr (sec);
384 return map_sym->type;
385 }
386 }
387 }
388 }
389
390 return 0;
391 }
392
393 /* Determine if the program counter specified in MEMADDR is in a Thumb
394 function. This function should be called for addresses unrelated to
395 any executing frame; otherwise, prefer arm_frame_is_thumb. */
396
397 int
398 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
399 {
400 struct bound_minimal_symbol sym;
401 char type;
402 arm_displaced_step_closure *dsc
403 = ((arm_displaced_step_closure * )
404 get_displaced_step_closure_by_addr (memaddr));
405
406 /* If checking the mode of displaced instruction in copy area, the mode
407 should be determined by instruction on the original address. */
408 if (dsc)
409 {
410 if (debug_displaced)
411 fprintf_unfiltered (gdb_stdlog,
412 "displaced: check mode of %.8lx instead of %.8lx\n",
413 (unsigned long) dsc->insn_addr,
414 (unsigned long) memaddr);
415 memaddr = dsc->insn_addr;
416 }
417
418 /* If bit 0 of the address is set, assume this is a Thumb address. */
419 if (IS_THUMB_ADDR (memaddr))
420 return 1;
421
422 /* If the user wants to override the symbol table, let him. */
423 if (strcmp (arm_force_mode_string, "arm") == 0)
424 return 0;
425 if (strcmp (arm_force_mode_string, "thumb") == 0)
426 return 1;
427
428 /* ARM v6-M and v7-M are always in Thumb mode. */
429 if (gdbarch_tdep (gdbarch)->is_m)
430 return 1;
431
432 /* If there are mapping symbols, consult them. */
433 type = arm_find_mapping_symbol (memaddr, NULL);
434 if (type)
435 return type == 't';
436
437 /* Thumb functions have a "special" bit set in minimal symbols. */
438 sym = lookup_minimal_symbol_by_pc (memaddr);
439 if (sym.minsym)
440 return (MSYMBOL_IS_SPECIAL (sym.minsym));
441
442 /* If the user wants to override the fallback mode, let them. */
443 if (strcmp (arm_fallback_mode_string, "arm") == 0)
444 return 0;
445 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
446 return 1;
447
448 /* If we couldn't find any symbol, but we're talking to a running
449 target, then trust the current value of $cpsr. This lets
450 "display/i $pc" always show the correct mode (though if there is
451 a symbol table we will not reach here, so it still may not be
452 displayed in the mode it will be executed). */
453 if (target_has_registers)
454 return arm_frame_is_thumb (get_current_frame ());
455
456 /* Otherwise we're out of luck; we assume ARM. */
457 return 0;
458 }
459
460 /* Determine if the address specified equals any of these magic return
461 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
462 architectures.
463
464 From ARMv6-M Reference Manual B1.5.8
465 Table B1-5 Exception return behavior
466
467 EXC_RETURN Return To Return Stack
468 0xFFFFFFF1 Handler mode Main
469 0xFFFFFFF9 Thread mode Main
470 0xFFFFFFFD Thread mode Process
471
472 From ARMv7-M Reference Manual B1.5.8
473 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
474
475 EXC_RETURN Return To Return Stack
476 0xFFFFFFF1 Handler mode Main
477 0xFFFFFFF9 Thread mode Main
478 0xFFFFFFFD Thread mode Process
479
480 Table B1-9 EXC_RETURN definition of exception return behavior, with
481 FP
482
483 EXC_RETURN Return To Return Stack Frame Type
484 0xFFFFFFE1 Handler mode Main Extended
485 0xFFFFFFE9 Thread mode Main Extended
486 0xFFFFFFED Thread mode Process Extended
487 0xFFFFFFF1 Handler mode Main Basic
488 0xFFFFFFF9 Thread mode Main Basic
489 0xFFFFFFFD Thread mode Process Basic
490
491 For more details see "B1.5.8 Exception return behavior"
492 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
493
494 static int
495 arm_m_addr_is_magic (CORE_ADDR addr)
496 {
497 switch (addr)
498 {
499 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
500 the exception return behavior. */
501 case 0xffffffe1:
502 case 0xffffffe9:
503 case 0xffffffed:
504 case 0xfffffff1:
505 case 0xfffffff9:
506 case 0xfffffffd:
507 /* Address is magic. */
508 return 1;
509
510 default:
511 /* Address is not magic. */
512 return 0;
513 }
514 }
515
516 /* Remove useless bits from addresses in a running program. */
517 static CORE_ADDR
518 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
519 {
520 /* On M-profile devices, do not strip the low bit from EXC_RETURN
521 (the magic exception return address). */
522 if (gdbarch_tdep (gdbarch)->is_m
523 && arm_m_addr_is_magic (val))
524 return val;
525
526 if (arm_apcs_32)
527 return UNMAKE_THUMB_ADDR (val);
528 else
529 return (val & 0x03fffffc);
530 }
531
532 /* Return 1 if PC is the start of a compiler helper function which
533 can be safely ignored during prologue skipping. IS_THUMB is true
534 if the function is known to be a Thumb function due to the way it
535 is being called. */
536 static int
537 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
538 {
539 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
540 struct bound_minimal_symbol msym;
541
542 msym = lookup_minimal_symbol_by_pc (pc);
543 if (msym.minsym != NULL
544 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
545 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
546 {
547 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
548
549 /* The GNU linker's Thumb call stub to foo is named
550 __foo_from_thumb. */
551 if (strstr (name, "_from_thumb") != NULL)
552 name += 2;
553
554 /* On soft-float targets, __truncdfsf2 is called to convert promoted
555 arguments to their argument types in non-prototyped
556 functions. */
557 if (startswith (name, "__truncdfsf2"))
558 return 1;
559 if (startswith (name, "__aeabi_d2f"))
560 return 1;
561
562 /* Internal functions related to thread-local storage. */
563 if (startswith (name, "__tls_get_addr"))
564 return 1;
565 if (startswith (name, "__aeabi_read_tp"))
566 return 1;
567 }
568 else
569 {
570 /* If we run against a stripped glibc, we may be unable to identify
571 special functions by name. Check for one important case,
572 __aeabi_read_tp, by comparing the *code* against the default
573 implementation (this is hand-written ARM assembler in glibc). */
574
575 if (!is_thumb
576 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
577 == 0xe3e00a0f /* mov r0, #0xffff0fff */
578 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
579 == 0xe240f01f) /* sub pc, r0, #31 */
580 return 1;
581 }
582
583 return 0;
584 }
585
586 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
587 the first 16-bit of instruction, and INSN2 is the second 16-bit of
588 instruction. */
589 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
590 ((bits ((insn1), 0, 3) << 12) \
591 | (bits ((insn1), 10, 10) << 11) \
592 | (bits ((insn2), 12, 14) << 8) \
593 | bits ((insn2), 0, 7))
594
595 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
596 the 32-bit instruction. */
597 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
598 ((bits ((insn), 16, 19) << 12) \
599 | bits ((insn), 0, 11))
600
601 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
602
603 static unsigned int
604 thumb_expand_immediate (unsigned int imm)
605 {
606 unsigned int count = imm >> 7;
607
608 if (count < 8)
609 switch (count / 2)
610 {
611 case 0:
612 return imm & 0xff;
613 case 1:
614 return (imm & 0xff) | ((imm & 0xff) << 16);
615 case 2:
616 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
617 case 3:
618 return (imm & 0xff) | ((imm & 0xff) << 8)
619 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
620 }
621
622 return (0x80 | (imm & 0x7f)) << (32 - count);
623 }
624
625 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
626 epilogue, 0 otherwise. */
627
628 static int
629 thumb_instruction_restores_sp (unsigned short insn)
630 {
631 return (insn == 0x46bd /* mov sp, r7 */
632 || (insn & 0xff80) == 0xb000 /* add sp, imm */
633 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
634 }
635
636 /* Analyze a Thumb prologue, looking for a recognizable stack frame
637 and frame pointer. Scan until we encounter a store that could
638 clobber the stack frame unexpectedly, or an unknown instruction.
639 Return the last address which is definitely safe to skip for an
640 initial breakpoint. */
641
642 static CORE_ADDR
643 thumb_analyze_prologue (struct gdbarch *gdbarch,
644 CORE_ADDR start, CORE_ADDR limit,
645 struct arm_prologue_cache *cache)
646 {
647 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
648 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
649 int i;
650 pv_t regs[16];
651 CORE_ADDR offset;
652 CORE_ADDR unrecognized_pc = 0;
653
654 for (i = 0; i < 16; i++)
655 regs[i] = pv_register (i, 0);
656 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
657
658 while (start < limit)
659 {
660 unsigned short insn;
661
662 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
663
664 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
665 {
666 int regno;
667 int mask;
668
669 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
670 break;
671
672 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
673 whether to save LR (R14). */
674 mask = (insn & 0xff) | ((insn & 0x100) << 6);
675
676 /* Calculate offsets of saved R0-R7 and LR. */
677 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
678 if (mask & (1 << regno))
679 {
680 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
681 -4);
682 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
683 }
684 }
685 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
686 {
687 offset = (insn & 0x7f) << 2; /* get scaled offset */
688 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
689 -offset);
690 }
691 else if (thumb_instruction_restores_sp (insn))
692 {
693 /* Don't scan past the epilogue. */
694 break;
695 }
696 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
697 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
698 (insn & 0xff) << 2);
699 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
700 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
701 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
702 bits (insn, 6, 8));
703 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
704 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
705 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
706 bits (insn, 0, 7));
707 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
708 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
709 && pv_is_constant (regs[bits (insn, 3, 5)]))
710 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
711 regs[bits (insn, 6, 8)]);
712 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
713 && pv_is_constant (regs[bits (insn, 3, 6)]))
714 {
715 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
716 int rm = bits (insn, 3, 6);
717 regs[rd] = pv_add (regs[rd], regs[rm]);
718 }
719 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
720 {
721 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
722 int src_reg = (insn & 0x78) >> 3;
723 regs[dst_reg] = regs[src_reg];
724 }
725 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
726 {
727 /* Handle stores to the stack. Normally pushes are used,
728 but with GCC -mtpcs-frame, there may be other stores
729 in the prologue to create the frame. */
730 int regno = (insn >> 8) & 0x7;
731 pv_t addr;
732
733 offset = (insn & 0xff) << 2;
734 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
735
736 if (stack.store_would_trash (addr))
737 break;
738
739 stack.store (addr, 4, regs[regno]);
740 }
741 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
742 {
743 int rd = bits (insn, 0, 2);
744 int rn = bits (insn, 3, 5);
745 pv_t addr;
746
747 offset = bits (insn, 6, 10) << 2;
748 addr = pv_add_constant (regs[rn], offset);
749
750 if (stack.store_would_trash (addr))
751 break;
752
753 stack.store (addr, 4, regs[rd]);
754 }
755 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
756 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
757 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
758 /* Ignore stores of argument registers to the stack. */
759 ;
760 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
761 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
762 /* Ignore block loads from the stack, potentially copying
763 parameters from memory. */
764 ;
765 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
766 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
767 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
768 /* Similarly ignore single loads from the stack. */
769 ;
770 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
771 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
772 /* Skip register copies, i.e. saves to another register
773 instead of the stack. */
774 ;
775 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
776 /* Recognize constant loads; even with small stacks these are necessary
777 on Thumb. */
778 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
779 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
780 {
781 /* Constant pool loads, for the same reason. */
782 unsigned int constant;
783 CORE_ADDR loc;
784
785 loc = start + 4 + bits (insn, 0, 7) * 4;
786 constant = read_memory_unsigned_integer (loc, 4, byte_order);
787 regs[bits (insn, 8, 10)] = pv_constant (constant);
788 }
789 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
790 {
791 unsigned short inst2;
792
793 inst2 = read_code_unsigned_integer (start + 2, 2,
794 byte_order_for_code);
795
796 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
797 {
798 /* BL, BLX. Allow some special function calls when
799 skipping the prologue; GCC generates these before
800 storing arguments to the stack. */
801 CORE_ADDR nextpc;
802 int j1, j2, imm1, imm2;
803
804 imm1 = sbits (insn, 0, 10);
805 imm2 = bits (inst2, 0, 10);
806 j1 = bit (inst2, 13);
807 j2 = bit (inst2, 11);
808
809 offset = ((imm1 << 12) + (imm2 << 1));
810 offset ^= ((!j2) << 22) | ((!j1) << 23);
811
812 nextpc = start + 4 + offset;
813 /* For BLX make sure to clear the low bits. */
814 if (bit (inst2, 12) == 0)
815 nextpc = nextpc & 0xfffffffc;
816
817 if (!skip_prologue_function (gdbarch, nextpc,
818 bit (inst2, 12) != 0))
819 break;
820 }
821
822 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
823 { registers } */
824 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
825 {
826 pv_t addr = regs[bits (insn, 0, 3)];
827 int regno;
828
829 if (stack.store_would_trash (addr))
830 break;
831
832 /* Calculate offsets of saved registers. */
833 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
834 if (inst2 & (1 << regno))
835 {
836 addr = pv_add_constant (addr, -4);
837 stack.store (addr, 4, regs[regno]);
838 }
839
840 if (insn & 0x0020)
841 regs[bits (insn, 0, 3)] = addr;
842 }
843
844 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
845 [Rn, #+/-imm]{!} */
846 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
847 {
848 int regno1 = bits (inst2, 12, 15);
849 int regno2 = bits (inst2, 8, 11);
850 pv_t addr = regs[bits (insn, 0, 3)];
851
852 offset = inst2 & 0xff;
853 if (insn & 0x0080)
854 addr = pv_add_constant (addr, offset);
855 else
856 addr = pv_add_constant (addr, -offset);
857
858 if (stack.store_would_trash (addr))
859 break;
860
861 stack.store (addr, 4, regs[regno1]);
862 stack.store (pv_add_constant (addr, 4),
863 4, regs[regno2]);
864
865 if (insn & 0x0020)
866 regs[bits (insn, 0, 3)] = addr;
867 }
868
869 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
870 && (inst2 & 0x0c00) == 0x0c00
871 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
872 {
873 int regno = bits (inst2, 12, 15);
874 pv_t addr = regs[bits (insn, 0, 3)];
875
876 offset = inst2 & 0xff;
877 if (inst2 & 0x0200)
878 addr = pv_add_constant (addr, offset);
879 else
880 addr = pv_add_constant (addr, -offset);
881
882 if (stack.store_would_trash (addr))
883 break;
884
885 stack.store (addr, 4, regs[regno]);
886
887 if (inst2 & 0x0100)
888 regs[bits (insn, 0, 3)] = addr;
889 }
890
891 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
892 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
893 {
894 int regno = bits (inst2, 12, 15);
895 pv_t addr;
896
897 offset = inst2 & 0xfff;
898 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
899
900 if (stack.store_would_trash (addr))
901 break;
902
903 stack.store (addr, 4, regs[regno]);
904 }
905
906 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
907 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
908 /* Ignore stores of argument registers to the stack. */
909 ;
910
911 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
912 && (inst2 & 0x0d00) == 0x0c00
913 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
914 /* Ignore stores of argument registers to the stack. */
915 ;
916
917 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
918 { registers } */
919 && (inst2 & 0x8000) == 0x0000
920 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
921 /* Ignore block loads from the stack, potentially copying
922 parameters from memory. */
923 ;
924
925 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
926 [Rn, #+/-imm] */
927 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
928 /* Similarly ignore dual loads from the stack. */
929 ;
930
931 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
932 && (inst2 & 0x0d00) == 0x0c00
933 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
934 /* Similarly ignore single loads from the stack. */
935 ;
936
937 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
938 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
939 /* Similarly ignore single loads from the stack. */
940 ;
941
942 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
943 && (inst2 & 0x8000) == 0x0000)
944 {
945 unsigned int imm = ((bits (insn, 10, 10) << 11)
946 | (bits (inst2, 12, 14) << 8)
947 | bits (inst2, 0, 7));
948
949 regs[bits (inst2, 8, 11)]
950 = pv_add_constant (regs[bits (insn, 0, 3)],
951 thumb_expand_immediate (imm));
952 }
953
954 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
955 && (inst2 & 0x8000) == 0x0000)
956 {
957 unsigned int imm = ((bits (insn, 10, 10) << 11)
958 | (bits (inst2, 12, 14) << 8)
959 | bits (inst2, 0, 7));
960
961 regs[bits (inst2, 8, 11)]
962 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
963 }
964
965 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
966 && (inst2 & 0x8000) == 0x0000)
967 {
968 unsigned int imm = ((bits (insn, 10, 10) << 11)
969 | (bits (inst2, 12, 14) << 8)
970 | bits (inst2, 0, 7));
971
972 regs[bits (inst2, 8, 11)]
973 = pv_add_constant (regs[bits (insn, 0, 3)],
974 - (CORE_ADDR) thumb_expand_immediate (imm));
975 }
976
977 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
978 && (inst2 & 0x8000) == 0x0000)
979 {
980 unsigned int imm = ((bits (insn, 10, 10) << 11)
981 | (bits (inst2, 12, 14) << 8)
982 | bits (inst2, 0, 7));
983
984 regs[bits (inst2, 8, 11)]
985 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
986 }
987
988 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
989 {
990 unsigned int imm = ((bits (insn, 10, 10) << 11)
991 | (bits (inst2, 12, 14) << 8)
992 | bits (inst2, 0, 7));
993
994 regs[bits (inst2, 8, 11)]
995 = pv_constant (thumb_expand_immediate (imm));
996 }
997
998 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
999 {
1000 unsigned int imm
1001 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1002
1003 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1004 }
1005
1006 else if (insn == 0xea5f /* mov.w Rd,Rm */
1007 && (inst2 & 0xf0f0) == 0)
1008 {
1009 int dst_reg = (inst2 & 0x0f00) >> 8;
1010 int src_reg = inst2 & 0xf;
1011 regs[dst_reg] = regs[src_reg];
1012 }
1013
1014 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1015 {
1016 /* Constant pool loads. */
1017 unsigned int constant;
1018 CORE_ADDR loc;
1019
1020 offset = bits (inst2, 0, 11);
1021 if (insn & 0x0080)
1022 loc = start + 4 + offset;
1023 else
1024 loc = start + 4 - offset;
1025
1026 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1027 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1028 }
1029
1030 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1031 {
1032 /* Constant pool loads. */
1033 unsigned int constant;
1034 CORE_ADDR loc;
1035
1036 offset = bits (inst2, 0, 7) << 2;
1037 if (insn & 0x0080)
1038 loc = start + 4 + offset;
1039 else
1040 loc = start + 4 - offset;
1041
1042 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1043 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1044
1045 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1046 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1047 }
1048
1049 else if (thumb2_instruction_changes_pc (insn, inst2))
1050 {
1051 /* Don't scan past anything that might change control flow. */
1052 break;
1053 }
1054 else
1055 {
1056 /* The optimizer might shove anything into the prologue,
1057 so we just skip what we don't recognize. */
1058 unrecognized_pc = start;
1059 }
1060
1061 start += 2;
1062 }
1063 else if (thumb_instruction_changes_pc (insn))
1064 {
1065 /* Don't scan past anything that might change control flow. */
1066 break;
1067 }
1068 else
1069 {
1070 /* The optimizer might shove anything into the prologue,
1071 so we just skip what we don't recognize. */
1072 unrecognized_pc = start;
1073 }
1074
1075 start += 2;
1076 }
1077
1078 if (arm_debug)
1079 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1080 paddress (gdbarch, start));
1081
1082 if (unrecognized_pc == 0)
1083 unrecognized_pc = start;
1084
1085 if (cache == NULL)
1086 return unrecognized_pc;
1087
1088 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1089 {
1090 /* Frame pointer is fp. Frame size is constant. */
1091 cache->framereg = ARM_FP_REGNUM;
1092 cache->framesize = -regs[ARM_FP_REGNUM].k;
1093 }
1094 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1095 {
1096 /* Frame pointer is r7. Frame size is constant. */
1097 cache->framereg = THUMB_FP_REGNUM;
1098 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1099 }
1100 else
1101 {
1102 /* Try the stack pointer... this is a bit desperate. */
1103 cache->framereg = ARM_SP_REGNUM;
1104 cache->framesize = -regs[ARM_SP_REGNUM].k;
1105 }
1106
1107 for (i = 0; i < 16; i++)
1108 if (stack.find_reg (gdbarch, i, &offset))
1109 cache->saved_regs[i].addr = offset;
1110
1111 return unrecognized_pc;
1112 }
1113
1114
1115 /* Try to analyze the instructions starting from PC, which load symbol
1116 __stack_chk_guard. Return the address of instruction after loading this
1117 symbol, set the dest register number to *BASEREG, and set the size of
1118 instructions for loading symbol in OFFSET. Return 0 if instructions are
1119 not recognized. */
1120
1121 static CORE_ADDR
1122 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1123 unsigned int *destreg, int *offset)
1124 {
1125 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1126 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1127 unsigned int low, high, address;
1128
1129 address = 0;
1130 if (is_thumb)
1131 {
1132 unsigned short insn1
1133 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1134
1135 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1136 {
1137 *destreg = bits (insn1, 8, 10);
1138 *offset = 2;
1139 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1140 address = read_memory_unsigned_integer (address, 4,
1141 byte_order_for_code);
1142 }
1143 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1144 {
1145 unsigned short insn2
1146 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1147
1148 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1149
1150 insn1
1151 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1152 insn2
1153 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1154
1155 /* movt Rd, #const */
1156 if ((insn1 & 0xfbc0) == 0xf2c0)
1157 {
1158 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1159 *destreg = bits (insn2, 8, 11);
1160 *offset = 8;
1161 address = (high << 16 | low);
1162 }
1163 }
1164 }
1165 else
1166 {
1167 unsigned int insn
1168 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1169
1170 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1171 {
1172 address = bits (insn, 0, 11) + pc + 8;
1173 address = read_memory_unsigned_integer (address, 4,
1174 byte_order_for_code);
1175
1176 *destreg = bits (insn, 12, 15);
1177 *offset = 4;
1178 }
1179 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1180 {
1181 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1182
1183 insn
1184 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1185
1186 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1187 {
1188 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1189 *destreg = bits (insn, 12, 15);
1190 *offset = 8;
1191 address = (high << 16 | low);
1192 }
1193 }
1194 }
1195
1196 return address;
1197 }
1198
1199 /* Try to skip a sequence of instructions used for stack protector. If PC
1200 points to the first instruction of this sequence, return the address of
1201 first instruction after this sequence, otherwise, return original PC.
1202
1203 On arm, this sequence of instructions is composed of mainly three steps,
1204 Step 1: load symbol __stack_chk_guard,
1205 Step 2: load from address of __stack_chk_guard,
1206 Step 3: store it to somewhere else.
1207
1208 Usually, instructions on step 2 and step 3 are the same on various ARM
1209 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1210 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1211 instructions in step 1 vary from different ARM architectures. On ARMv7,
1212 they are,
1213
1214 movw Rn, #:lower16:__stack_chk_guard
1215 movt Rn, #:upper16:__stack_chk_guard
1216
1217 On ARMv5t, it is,
1218
1219 ldr Rn, .Label
1220 ....
1221 .Lable:
1222 .word __stack_chk_guard
1223
1224 Since ldr/str is a very popular instruction, we can't use them as
1225 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1226 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1227 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1228
1229 static CORE_ADDR
1230 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1231 {
1232 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1233 unsigned int basereg;
1234 struct bound_minimal_symbol stack_chk_guard;
1235 int offset;
1236 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1237 CORE_ADDR addr;
1238
1239 /* Try to parse the instructions in Step 1. */
1240 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1241 &basereg, &offset);
1242 if (!addr)
1243 return pc;
1244
1245 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1246 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1247 Otherwise, this sequence cannot be for stack protector. */
1248 if (stack_chk_guard.minsym == NULL
1249 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1250 return pc;
1251
1252 if (is_thumb)
1253 {
1254 unsigned int destreg;
1255 unsigned short insn
1256 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1257
1258 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1259 if ((insn & 0xf800) != 0x6800)
1260 return pc;
1261 if (bits (insn, 3, 5) != basereg)
1262 return pc;
1263 destreg = bits (insn, 0, 2);
1264
1265 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1266 byte_order_for_code);
1267 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1268 if ((insn & 0xf800) != 0x6000)
1269 return pc;
1270 if (destreg != bits (insn, 0, 2))
1271 return pc;
1272 }
1273 else
1274 {
1275 unsigned int destreg;
1276 unsigned int insn
1277 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1278
1279 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1280 if ((insn & 0x0e500000) != 0x04100000)
1281 return pc;
1282 if (bits (insn, 16, 19) != basereg)
1283 return pc;
1284 destreg = bits (insn, 12, 15);
1285 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1286 insn = read_code_unsigned_integer (pc + offset + 4,
1287 4, byte_order_for_code);
1288 if ((insn & 0x0e500000) != 0x04000000)
1289 return pc;
1290 if (bits (insn, 12, 15) != destreg)
1291 return pc;
1292 }
1293 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1294 on arm. */
1295 if (is_thumb)
1296 return pc + offset + 4;
1297 else
1298 return pc + offset + 8;
1299 }
1300
1301 /* Advance the PC across any function entry prologue instructions to
1302 reach some "real" code.
1303
1304 The APCS (ARM Procedure Call Standard) defines the following
1305 prologue:
1306
1307 mov ip, sp
1308 [stmfd sp!, {a1,a2,a3,a4}]
1309 stmfd sp!, {...,fp,ip,lr,pc}
1310 [stfe f7, [sp, #-12]!]
1311 [stfe f6, [sp, #-12]!]
1312 [stfe f5, [sp, #-12]!]
1313 [stfe f4, [sp, #-12]!]
1314 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1315
1316 static CORE_ADDR
1317 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1318 {
1319 CORE_ADDR func_addr, limit_pc;
1320
1321 /* See if we can determine the end of the prologue via the symbol table.
1322 If so, then return either PC, or the PC after the prologue, whichever
1323 is greater. */
1324 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1325 {
1326 CORE_ADDR post_prologue_pc
1327 = skip_prologue_using_sal (gdbarch, func_addr);
1328 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1329
1330 if (post_prologue_pc)
1331 post_prologue_pc
1332 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1333
1334
1335 /* GCC always emits a line note before the prologue and another
1336 one after, even if the two are at the same address or on the
1337 same line. Take advantage of this so that we do not need to
1338 know every instruction that might appear in the prologue. We
1339 will have producer information for most binaries; if it is
1340 missing (e.g. for -gstabs), assuming the GNU tools. */
1341 if (post_prologue_pc
1342 && (cust == NULL
1343 || COMPUNIT_PRODUCER (cust) == NULL
1344 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1345 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1346 return post_prologue_pc;
1347
1348 if (post_prologue_pc != 0)
1349 {
1350 CORE_ADDR analyzed_limit;
1351
1352 /* For non-GCC compilers, make sure the entire line is an
1353 acceptable prologue; GDB will round this function's
1354 return value up to the end of the following line so we
1355 can not skip just part of a line (and we do not want to).
1356
1357 RealView does not treat the prologue specially, but does
1358 associate prologue code with the opening brace; so this
1359 lets us skip the first line if we think it is the opening
1360 brace. */
1361 if (arm_pc_is_thumb (gdbarch, func_addr))
1362 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1363 post_prologue_pc, NULL);
1364 else
1365 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1366 post_prologue_pc, NULL);
1367
1368 if (analyzed_limit != post_prologue_pc)
1369 return func_addr;
1370
1371 return post_prologue_pc;
1372 }
1373 }
1374
1375 /* Can't determine prologue from the symbol table, need to examine
1376 instructions. */
1377
1378 /* Find an upper limit on the function prologue using the debug
1379 information. If the debug information could not be used to provide
1380 that bound, then use an arbitrary large number as the upper bound. */
1381 /* Like arm_scan_prologue, stop no later than pc + 64. */
1382 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1383 if (limit_pc == 0)
1384 limit_pc = pc + 64; /* Magic. */
1385
1386
1387 /* Check if this is Thumb code. */
1388 if (arm_pc_is_thumb (gdbarch, pc))
1389 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1390 else
1391 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1392 }
1393
1394 /* *INDENT-OFF* */
1395 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1396 This function decodes a Thumb function prologue to determine:
1397 1) the size of the stack frame
1398 2) which registers are saved on it
1399 3) the offsets of saved regs
1400 4) the offset from the stack pointer to the frame pointer
1401
1402 A typical Thumb function prologue would create this stack frame
1403 (offsets relative to FP)
1404 old SP -> 24 stack parameters
1405 20 LR
1406 16 R7
1407 R7 -> 0 local variables (16 bytes)
1408 SP -> -12 additional stack space (12 bytes)
1409 The frame size would thus be 36 bytes, and the frame offset would be
1410 12 bytes. The frame register is R7.
1411
1412 The comments for thumb_skip_prolog() describe the algorithm we use
1413 to detect the end of the prolog. */
1414 /* *INDENT-ON* */
1415
1416 static void
1417 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1418 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1419 {
1420 CORE_ADDR prologue_start;
1421 CORE_ADDR prologue_end;
1422
1423 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1424 &prologue_end))
1425 {
1426 /* See comment in arm_scan_prologue for an explanation of
1427 this heuristics. */
1428 if (prologue_end > prologue_start + 64)
1429 {
1430 prologue_end = prologue_start + 64;
1431 }
1432 }
1433 else
1434 /* We're in the boondocks: we have no idea where the start of the
1435 function is. */
1436 return;
1437
1438 prologue_end = std::min (prologue_end, prev_pc);
1439
1440 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1441 }
1442
1443 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1444 otherwise. */
1445
1446 static int
1447 arm_instruction_restores_sp (unsigned int insn)
1448 {
1449 if (bits (insn, 28, 31) != INST_NV)
1450 {
1451 if ((insn & 0x0df0f000) == 0x0080d000
1452 /* ADD SP (register or immediate). */
1453 || (insn & 0x0df0f000) == 0x0040d000
1454 /* SUB SP (register or immediate). */
1455 || (insn & 0x0ffffff0) == 0x01a0d000
1456 /* MOV SP. */
1457 || (insn & 0x0fff0000) == 0x08bd0000
1458 /* POP (LDMIA). */
1459 || (insn & 0x0fff0000) == 0x049d0000)
1460 /* POP of a single register. */
1461 return 1;
1462 }
1463
1464 return 0;
1465 }
1466
1467 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1468 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1469 fill it in. Return the first address not recognized as a prologue
1470 instruction.
1471
1472 We recognize all the instructions typically found in ARM prologues,
1473 plus harmless instructions which can be skipped (either for analysis
1474 purposes, or a more restrictive set that can be skipped when finding
1475 the end of the prologue). */
1476
1477 static CORE_ADDR
1478 arm_analyze_prologue (struct gdbarch *gdbarch,
1479 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1480 struct arm_prologue_cache *cache)
1481 {
1482 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1483 int regno;
1484 CORE_ADDR offset, current_pc;
1485 pv_t regs[ARM_FPS_REGNUM];
1486 CORE_ADDR unrecognized_pc = 0;
1487
1488 /* Search the prologue looking for instructions that set up the
1489 frame pointer, adjust the stack pointer, and save registers.
1490
1491 Be careful, however, and if it doesn't look like a prologue,
1492 don't try to scan it. If, for instance, a frameless function
1493 begins with stmfd sp!, then we will tell ourselves there is
1494 a frame, which will confuse stack traceback, as well as "finish"
1495 and other operations that rely on a knowledge of the stack
1496 traceback. */
1497
1498 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1499 regs[regno] = pv_register (regno, 0);
1500 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1501
1502 for (current_pc = prologue_start;
1503 current_pc < prologue_end;
1504 current_pc += 4)
1505 {
1506 unsigned int insn
1507 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1508
1509 if (insn == 0xe1a0c00d) /* mov ip, sp */
1510 {
1511 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1512 continue;
1513 }
1514 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1515 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1516 {
1517 unsigned imm = insn & 0xff; /* immediate value */
1518 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1519 int rd = bits (insn, 12, 15);
1520 imm = (imm >> rot) | (imm << (32 - rot));
1521 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1522 continue;
1523 }
1524 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1525 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1526 {
1527 unsigned imm = insn & 0xff; /* immediate value */
1528 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1529 int rd = bits (insn, 12, 15);
1530 imm = (imm >> rot) | (imm << (32 - rot));
1531 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1532 continue;
1533 }
1534 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1535 [sp, #-4]! */
1536 {
1537 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1538 break;
1539 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1540 stack.store (regs[ARM_SP_REGNUM], 4,
1541 regs[bits (insn, 12, 15)]);
1542 continue;
1543 }
1544 else if ((insn & 0xffff0000) == 0xe92d0000)
1545 /* stmfd sp!, {..., fp, ip, lr, pc}
1546 or
1547 stmfd sp!, {a1, a2, a3, a4} */
1548 {
1549 int mask = insn & 0xffff;
1550
1551 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1552 break;
1553
1554 /* Calculate offsets of saved registers. */
1555 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1556 if (mask & (1 << regno))
1557 {
1558 regs[ARM_SP_REGNUM]
1559 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1560 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1561 }
1562 }
1563 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1564 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1565 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1566 {
1567 /* No need to add this to saved_regs -- it's just an arg reg. */
1568 continue;
1569 }
1570 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1571 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1572 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1573 {
1574 /* No need to add this to saved_regs -- it's just an arg reg. */
1575 continue;
1576 }
1577 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1578 { registers } */
1579 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1580 {
1581 /* No need to add this to saved_regs -- it's just arg regs. */
1582 continue;
1583 }
1584 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1585 {
1586 unsigned imm = insn & 0xff; /* immediate value */
1587 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1588 imm = (imm >> rot) | (imm << (32 - rot));
1589 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1590 }
1591 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1592 {
1593 unsigned imm = insn & 0xff; /* immediate value */
1594 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1595 imm = (imm >> rot) | (imm << (32 - rot));
1596 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1597 }
1598 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1599 [sp, -#c]! */
1600 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1601 {
1602 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1603 break;
1604
1605 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1606 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1607 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1608 }
1609 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1610 [sp!] */
1611 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1612 {
1613 int n_saved_fp_regs;
1614 unsigned int fp_start_reg, fp_bound_reg;
1615
1616 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1617 break;
1618
1619 if ((insn & 0x800) == 0x800) /* N0 is set */
1620 {
1621 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1622 n_saved_fp_regs = 3;
1623 else
1624 n_saved_fp_regs = 1;
1625 }
1626 else
1627 {
1628 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1629 n_saved_fp_regs = 2;
1630 else
1631 n_saved_fp_regs = 4;
1632 }
1633
1634 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1635 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1636 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1637 {
1638 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1639 stack.store (regs[ARM_SP_REGNUM], 12,
1640 regs[fp_start_reg++]);
1641 }
1642 }
1643 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1644 {
1645 /* Allow some special function calls when skipping the
1646 prologue; GCC generates these before storing arguments to
1647 the stack. */
1648 CORE_ADDR dest = BranchDest (current_pc, insn);
1649
1650 if (skip_prologue_function (gdbarch, dest, 0))
1651 continue;
1652 else
1653 break;
1654 }
1655 else if ((insn & 0xf0000000) != 0xe0000000)
1656 break; /* Condition not true, exit early. */
1657 else if (arm_instruction_changes_pc (insn))
1658 /* Don't scan past anything that might change control flow. */
1659 break;
1660 else if (arm_instruction_restores_sp (insn))
1661 {
1662 /* Don't scan past the epilogue. */
1663 break;
1664 }
1665 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1666 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1667 /* Ignore block loads from the stack, potentially copying
1668 parameters from memory. */
1669 continue;
1670 else if ((insn & 0xfc500000) == 0xe4100000
1671 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1672 /* Similarly ignore single loads from the stack. */
1673 continue;
1674 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1675 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1676 register instead of the stack. */
1677 continue;
1678 else
1679 {
1680 /* The optimizer might shove anything into the prologue, if
1681 we build up cache (cache != NULL) from scanning prologue,
1682 we just skip what we don't recognize and scan further to
1683 make cache as complete as possible. However, if we skip
1684 prologue, we'll stop immediately on unrecognized
1685 instruction. */
1686 unrecognized_pc = current_pc;
1687 if (cache != NULL)
1688 continue;
1689 else
1690 break;
1691 }
1692 }
1693
1694 if (unrecognized_pc == 0)
1695 unrecognized_pc = current_pc;
1696
1697 if (cache)
1698 {
1699 int framereg, framesize;
1700
1701 /* The frame size is just the distance from the frame register
1702 to the original stack pointer. */
1703 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1704 {
1705 /* Frame pointer is fp. */
1706 framereg = ARM_FP_REGNUM;
1707 framesize = -regs[ARM_FP_REGNUM].k;
1708 }
1709 else
1710 {
1711 /* Try the stack pointer... this is a bit desperate. */
1712 framereg = ARM_SP_REGNUM;
1713 framesize = -regs[ARM_SP_REGNUM].k;
1714 }
1715
1716 cache->framereg = framereg;
1717 cache->framesize = framesize;
1718
1719 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1720 if (stack.find_reg (gdbarch, regno, &offset))
1721 cache->saved_regs[regno].addr = offset;
1722 }
1723
1724 if (arm_debug)
1725 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1726 paddress (gdbarch, unrecognized_pc));
1727
1728 return unrecognized_pc;
1729 }
1730
1731 static void
1732 arm_scan_prologue (struct frame_info *this_frame,
1733 struct arm_prologue_cache *cache)
1734 {
1735 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1736 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1737 CORE_ADDR prologue_start, prologue_end;
1738 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1739 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1740
1741 /* Assume there is no frame until proven otherwise. */
1742 cache->framereg = ARM_SP_REGNUM;
1743 cache->framesize = 0;
1744
1745 /* Check for Thumb prologue. */
1746 if (arm_frame_is_thumb (this_frame))
1747 {
1748 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1749 return;
1750 }
1751
1752 /* Find the function prologue. If we can't find the function in
1753 the symbol table, peek in the stack frame to find the PC. */
1754 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1755 &prologue_end))
1756 {
1757 /* One way to find the end of the prologue (which works well
1758 for unoptimized code) is to do the following:
1759
1760 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1761
1762 if (sal.line == 0)
1763 prologue_end = prev_pc;
1764 else if (sal.end < prologue_end)
1765 prologue_end = sal.end;
1766
1767 This mechanism is very accurate so long as the optimizer
1768 doesn't move any instructions from the function body into the
1769 prologue. If this happens, sal.end will be the last
1770 instruction in the first hunk of prologue code just before
1771 the first instruction that the scheduler has moved from
1772 the body to the prologue.
1773
1774 In order to make sure that we scan all of the prologue
1775 instructions, we use a slightly less accurate mechanism which
1776 may scan more than necessary. To help compensate for this
1777 lack of accuracy, the prologue scanning loop below contains
1778 several clauses which'll cause the loop to terminate early if
1779 an implausible prologue instruction is encountered.
1780
1781 The expression
1782
1783 prologue_start + 64
1784
1785 is a suitable endpoint since it accounts for the largest
1786 possible prologue plus up to five instructions inserted by
1787 the scheduler. */
1788
1789 if (prologue_end > prologue_start + 64)
1790 {
1791 prologue_end = prologue_start + 64; /* See above. */
1792 }
1793 }
1794 else
1795 {
1796 /* We have no symbol information. Our only option is to assume this
1797 function has a standard stack frame and the normal frame register.
1798 Then, we can find the value of our frame pointer on entrance to
1799 the callee (or at the present moment if this is the innermost frame).
1800 The value stored there should be the address of the stmfd + 8. */
1801 CORE_ADDR frame_loc;
1802 ULONGEST return_value;
1803
1804 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1805 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1806 &return_value))
1807 return;
1808 else
1809 {
1810 prologue_start = gdbarch_addr_bits_remove
1811 (gdbarch, return_value) - 8;
1812 prologue_end = prologue_start + 64; /* See above. */
1813 }
1814 }
1815
1816 if (prev_pc < prologue_end)
1817 prologue_end = prev_pc;
1818
1819 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1820 }
1821
1822 static struct arm_prologue_cache *
1823 arm_make_prologue_cache (struct frame_info *this_frame)
1824 {
1825 int reg;
1826 struct arm_prologue_cache *cache;
1827 CORE_ADDR unwound_fp;
1828
1829 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1830 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1831
1832 arm_scan_prologue (this_frame, cache);
1833
1834 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1835 if (unwound_fp == 0)
1836 return cache;
1837
1838 cache->prev_sp = unwound_fp + cache->framesize;
1839
1840 /* Calculate actual addresses of saved registers using offsets
1841 determined by arm_scan_prologue. */
1842 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1843 if (trad_frame_addr_p (cache->saved_regs, reg))
1844 cache->saved_regs[reg].addr += cache->prev_sp;
1845
1846 return cache;
1847 }
1848
1849 /* Implementation of the stop_reason hook for arm_prologue frames. */
1850
1851 static enum unwind_stop_reason
1852 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1853 void **this_cache)
1854 {
1855 struct arm_prologue_cache *cache;
1856 CORE_ADDR pc;
1857
1858 if (*this_cache == NULL)
1859 *this_cache = arm_make_prologue_cache (this_frame);
1860 cache = (struct arm_prologue_cache *) *this_cache;
1861
1862 /* This is meant to halt the backtrace at "_start". */
1863 pc = get_frame_pc (this_frame);
1864 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1865 return UNWIND_OUTERMOST;
1866
1867 /* If we've hit a wall, stop. */
1868 if (cache->prev_sp == 0)
1869 return UNWIND_OUTERMOST;
1870
1871 return UNWIND_NO_REASON;
1872 }
1873
1874 /* Our frame ID for a normal frame is the current function's starting PC
1875 and the caller's SP when we were called. */
1876
1877 static void
1878 arm_prologue_this_id (struct frame_info *this_frame,
1879 void **this_cache,
1880 struct frame_id *this_id)
1881 {
1882 struct arm_prologue_cache *cache;
1883 struct frame_id id;
1884 CORE_ADDR pc, func;
1885
1886 if (*this_cache == NULL)
1887 *this_cache = arm_make_prologue_cache (this_frame);
1888 cache = (struct arm_prologue_cache *) *this_cache;
1889
1890 /* Use function start address as part of the frame ID. If we cannot
1891 identify the start address (due to missing symbol information),
1892 fall back to just using the current PC. */
1893 pc = get_frame_pc (this_frame);
1894 func = get_frame_func (this_frame);
1895 if (!func)
1896 func = pc;
1897
1898 id = frame_id_build (cache->prev_sp, func);
1899 *this_id = id;
1900 }
1901
1902 static struct value *
1903 arm_prologue_prev_register (struct frame_info *this_frame,
1904 void **this_cache,
1905 int prev_regnum)
1906 {
1907 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1908 struct arm_prologue_cache *cache;
1909
1910 if (*this_cache == NULL)
1911 *this_cache = arm_make_prologue_cache (this_frame);
1912 cache = (struct arm_prologue_cache *) *this_cache;
1913
1914 /* If we are asked to unwind the PC, then we need to return the LR
1915 instead. The prologue may save PC, but it will point into this
1916 frame's prologue, not the next frame's resume location. Also
1917 strip the saved T bit. A valid LR may have the low bit set, but
1918 a valid PC never does. */
1919 if (prev_regnum == ARM_PC_REGNUM)
1920 {
1921 CORE_ADDR lr;
1922
1923 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1924 return frame_unwind_got_constant (this_frame, prev_regnum,
1925 arm_addr_bits_remove (gdbarch, lr));
1926 }
1927
1928 /* SP is generally not saved to the stack, but this frame is
1929 identified by the next frame's stack pointer at the time of the call.
1930 The value was already reconstructed into PREV_SP. */
1931 if (prev_regnum == ARM_SP_REGNUM)
1932 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1933
1934 /* The CPSR may have been changed by the call instruction and by the
1935 called function. The only bit we can reconstruct is the T bit,
1936 by checking the low bit of LR as of the call. This is a reliable
1937 indicator of Thumb-ness except for some ARM v4T pre-interworking
1938 Thumb code, which could get away with a clear low bit as long as
1939 the called function did not use bx. Guess that all other
1940 bits are unchanged; the condition flags are presumably lost,
1941 but the processor status is likely valid. */
1942 if (prev_regnum == ARM_PS_REGNUM)
1943 {
1944 CORE_ADDR lr, cpsr;
1945 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1946
1947 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1948 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1949 if (IS_THUMB_ADDR (lr))
1950 cpsr |= t_bit;
1951 else
1952 cpsr &= ~t_bit;
1953 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1954 }
1955
1956 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1957 prev_regnum);
1958 }
1959
1960 struct frame_unwind arm_prologue_unwind = {
1961 NORMAL_FRAME,
1962 arm_prologue_unwind_stop_reason,
1963 arm_prologue_this_id,
1964 arm_prologue_prev_register,
1965 NULL,
1966 default_frame_sniffer
1967 };
1968
1969 /* Maintain a list of ARM exception table entries per objfile, similar to the
1970 list of mapping symbols. We only cache entries for standard ARM-defined
1971 personality routines; the cache will contain only the frame unwinding
1972 instructions associated with the entry (not the descriptors). */
1973
1974 static const struct objfile_data *arm_exidx_data_key;
1975
1976 struct arm_exidx_entry
1977 {
1978 bfd_vma addr;
1979 gdb_byte *entry;
1980 };
1981 typedef struct arm_exidx_entry arm_exidx_entry_s;
1982 DEF_VEC_O(arm_exidx_entry_s);
1983
1984 struct arm_exidx_data
1985 {
1986 VEC(arm_exidx_entry_s) **section_maps;
1987 };
1988
1989 static void
1990 arm_exidx_data_free (struct objfile *objfile, void *arg)
1991 {
1992 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
1993 unsigned int i;
1994
1995 for (i = 0; i < objfile->obfd->section_count; i++)
1996 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
1997 }
1998
1999 static inline int
2000 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2001 const struct arm_exidx_entry *rhs)
2002 {
2003 return lhs->addr < rhs->addr;
2004 }
2005
2006 static struct obj_section *
2007 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2008 {
2009 struct obj_section *osect;
2010
2011 ALL_OBJFILE_OSECTIONS (objfile, osect)
2012 if (bfd_get_section_flags (objfile->obfd,
2013 osect->the_bfd_section) & SEC_ALLOC)
2014 {
2015 bfd_vma start, size;
2016 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2017 size = bfd_get_section_size (osect->the_bfd_section);
2018
2019 if (start <= vma && vma < start + size)
2020 return osect;
2021 }
2022
2023 return NULL;
2024 }
2025
2026 /* Parse contents of exception table and exception index sections
2027 of OBJFILE, and fill in the exception table entry cache.
2028
2029 For each entry that refers to a standard ARM-defined personality
2030 routine, extract the frame unwinding instructions (from either
2031 the index or the table section). The unwinding instructions
2032 are normalized by:
2033 - extracting them from the rest of the table data
2034 - converting to host endianness
2035 - appending the implicit 0xb0 ("Finish") code
2036
2037 The extracted and normalized instructions are stored for later
2038 retrieval by the arm_find_exidx_entry routine. */
2039
2040 static void
2041 arm_exidx_new_objfile (struct objfile *objfile)
2042 {
2043 struct cleanup *cleanups;
2044 struct arm_exidx_data *data;
2045 asection *exidx, *extab;
2046 bfd_vma exidx_vma = 0, extab_vma = 0;
2047 bfd_size_type exidx_size = 0, extab_size = 0;
2048 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2049 LONGEST i;
2050
2051 /* If we've already touched this file, do nothing. */
2052 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2053 return;
2054 cleanups = make_cleanup (null_cleanup, NULL);
2055
2056 /* Read contents of exception table and index. */
2057 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2058 if (exidx)
2059 {
2060 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2061 exidx_size = bfd_get_section_size (exidx);
2062 exidx_data = (gdb_byte *) xmalloc (exidx_size);
2063 make_cleanup (xfree, exidx_data);
2064
2065 if (!bfd_get_section_contents (objfile->obfd, exidx,
2066 exidx_data, 0, exidx_size))
2067 {
2068 do_cleanups (cleanups);
2069 return;
2070 }
2071 }
2072
2073 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2074 if (extab)
2075 {
2076 extab_vma = bfd_section_vma (objfile->obfd, extab);
2077 extab_size = bfd_get_section_size (extab);
2078 extab_data = (gdb_byte *) xmalloc (extab_size);
2079 make_cleanup (xfree, extab_data);
2080
2081 if (!bfd_get_section_contents (objfile->obfd, extab,
2082 extab_data, 0, extab_size))
2083 {
2084 do_cleanups (cleanups);
2085 return;
2086 }
2087 }
2088
2089 /* Allocate exception table data structure. */
2090 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2091 set_objfile_data (objfile, arm_exidx_data_key, data);
2092 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2093 objfile->obfd->section_count,
2094 VEC(arm_exidx_entry_s) *);
2095
2096 /* Fill in exception table. */
2097 for (i = 0; i < exidx_size / 8; i++)
2098 {
2099 struct arm_exidx_entry new_exidx_entry;
2100 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2101 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2102 bfd_vma addr = 0, word = 0;
2103 int n_bytes = 0, n_words = 0;
2104 struct obj_section *sec;
2105 gdb_byte *entry = NULL;
2106
2107 /* Extract address of start of function. */
2108 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2109 idx += exidx_vma + i * 8;
2110
2111 /* Find section containing function and compute section offset. */
2112 sec = arm_obj_section_from_vma (objfile, idx);
2113 if (sec == NULL)
2114 continue;
2115 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2116
2117 /* Determine address of exception table entry. */
2118 if (val == 1)
2119 {
2120 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2121 }
2122 else if ((val & 0xff000000) == 0x80000000)
2123 {
2124 /* Exception table entry embedded in .ARM.exidx
2125 -- must be short form. */
2126 word = val;
2127 n_bytes = 3;
2128 }
2129 else if (!(val & 0x80000000))
2130 {
2131 /* Exception table entry in .ARM.extab. */
2132 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2133 addr += exidx_vma + i * 8 + 4;
2134
2135 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2136 {
2137 word = bfd_h_get_32 (objfile->obfd,
2138 extab_data + addr - extab_vma);
2139 addr += 4;
2140
2141 if ((word & 0xff000000) == 0x80000000)
2142 {
2143 /* Short form. */
2144 n_bytes = 3;
2145 }
2146 else if ((word & 0xff000000) == 0x81000000
2147 || (word & 0xff000000) == 0x82000000)
2148 {
2149 /* Long form. */
2150 n_bytes = 2;
2151 n_words = ((word >> 16) & 0xff);
2152 }
2153 else if (!(word & 0x80000000))
2154 {
2155 bfd_vma pers;
2156 struct obj_section *pers_sec;
2157 int gnu_personality = 0;
2158
2159 /* Custom personality routine. */
2160 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2161 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2162
2163 /* Check whether we've got one of the variants of the
2164 GNU personality routines. */
2165 pers_sec = arm_obj_section_from_vma (objfile, pers);
2166 if (pers_sec)
2167 {
2168 static const char *personality[] =
2169 {
2170 "__gcc_personality_v0",
2171 "__gxx_personality_v0",
2172 "__gcj_personality_v0",
2173 "__gnu_objc_personality_v0",
2174 NULL
2175 };
2176
2177 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2178 int k;
2179
2180 for (k = 0; personality[k]; k++)
2181 if (lookup_minimal_symbol_by_pc_name
2182 (pc, personality[k], objfile))
2183 {
2184 gnu_personality = 1;
2185 break;
2186 }
2187 }
2188
2189 /* If so, the next word contains a word count in the high
2190 byte, followed by the same unwind instructions as the
2191 pre-defined forms. */
2192 if (gnu_personality
2193 && addr + 4 <= extab_vma + extab_size)
2194 {
2195 word = bfd_h_get_32 (objfile->obfd,
2196 extab_data + addr - extab_vma);
2197 addr += 4;
2198 n_bytes = 3;
2199 n_words = ((word >> 24) & 0xff);
2200 }
2201 }
2202 }
2203 }
2204
2205 /* Sanity check address. */
2206 if (n_words)
2207 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2208 n_words = n_bytes = 0;
2209
2210 /* The unwind instructions reside in WORD (only the N_BYTES least
2211 significant bytes are valid), followed by N_WORDS words in the
2212 extab section starting at ADDR. */
2213 if (n_bytes || n_words)
2214 {
2215 gdb_byte *p = entry
2216 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2217 n_bytes + n_words * 4 + 1);
2218
2219 while (n_bytes--)
2220 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2221
2222 while (n_words--)
2223 {
2224 word = bfd_h_get_32 (objfile->obfd,
2225 extab_data + addr - extab_vma);
2226 addr += 4;
2227
2228 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2229 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2230 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2231 *p++ = (gdb_byte) (word & 0xff);
2232 }
2233
2234 /* Implied "Finish" to terminate the list. */
2235 *p++ = 0xb0;
2236 }
2237
2238 /* Push entry onto vector. They are guaranteed to always
2239 appear in order of increasing addresses. */
2240 new_exidx_entry.addr = idx;
2241 new_exidx_entry.entry = entry;
2242 VEC_safe_push (arm_exidx_entry_s,
2243 data->section_maps[sec->the_bfd_section->index],
2244 &new_exidx_entry);
2245 }
2246
2247 do_cleanups (cleanups);
2248 }
2249
2250 /* Search for the exception table entry covering MEMADDR. If one is found,
2251 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2252 set *START to the start of the region covered by this entry. */
2253
2254 static gdb_byte *
2255 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2256 {
2257 struct obj_section *sec;
2258
2259 sec = find_pc_section (memaddr);
2260 if (sec != NULL)
2261 {
2262 struct arm_exidx_data *data;
2263 VEC(arm_exidx_entry_s) *map;
2264 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2265 unsigned int idx;
2266
2267 data = ((struct arm_exidx_data *)
2268 objfile_data (sec->objfile, arm_exidx_data_key));
2269 if (data != NULL)
2270 {
2271 map = data->section_maps[sec->the_bfd_section->index];
2272 if (!VEC_empty (arm_exidx_entry_s, map))
2273 {
2274 struct arm_exidx_entry *map_sym;
2275
2276 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2277 arm_compare_exidx_entries);
2278
2279 /* VEC_lower_bound finds the earliest ordered insertion
2280 point. If the following symbol starts at this exact
2281 address, we use that; otherwise, the preceding
2282 exception table entry covers this address. */
2283 if (idx < VEC_length (arm_exidx_entry_s, map))
2284 {
2285 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2286 if (map_sym->addr == map_key.addr)
2287 {
2288 if (start)
2289 *start = map_sym->addr + obj_section_addr (sec);
2290 return map_sym->entry;
2291 }
2292 }
2293
2294 if (idx > 0)
2295 {
2296 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2297 if (start)
2298 *start = map_sym->addr + obj_section_addr (sec);
2299 return map_sym->entry;
2300 }
2301 }
2302 }
2303 }
2304
2305 return NULL;
2306 }
2307
2308 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2309 instruction list from the ARM exception table entry ENTRY, allocate and
2310 return a prologue cache structure describing how to unwind this frame.
2311
2312 Return NULL if the unwinding instruction list contains a "spare",
2313 "reserved" or "refuse to unwind" instruction as defined in section
2314 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2315 for the ARM Architecture" document. */
2316
2317 static struct arm_prologue_cache *
2318 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2319 {
2320 CORE_ADDR vsp = 0;
2321 int vsp_valid = 0;
2322
2323 struct arm_prologue_cache *cache;
2324 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2325 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2326
2327 for (;;)
2328 {
2329 gdb_byte insn;
2330
2331 /* Whenever we reload SP, we actually have to retrieve its
2332 actual value in the current frame. */
2333 if (!vsp_valid)
2334 {
2335 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2336 {
2337 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2338 vsp = get_frame_register_unsigned (this_frame, reg);
2339 }
2340 else
2341 {
2342 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2343 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2344 }
2345
2346 vsp_valid = 1;
2347 }
2348
2349 /* Decode next unwind instruction. */
2350 insn = *entry++;
2351
2352 if ((insn & 0xc0) == 0)
2353 {
2354 int offset = insn & 0x3f;
2355 vsp += (offset << 2) + 4;
2356 }
2357 else if ((insn & 0xc0) == 0x40)
2358 {
2359 int offset = insn & 0x3f;
2360 vsp -= (offset << 2) + 4;
2361 }
2362 else if ((insn & 0xf0) == 0x80)
2363 {
2364 int mask = ((insn & 0xf) << 8) | *entry++;
2365 int i;
2366
2367 /* The special case of an all-zero mask identifies
2368 "Refuse to unwind". We return NULL to fall back
2369 to the prologue analyzer. */
2370 if (mask == 0)
2371 return NULL;
2372
2373 /* Pop registers r4..r15 under mask. */
2374 for (i = 0; i < 12; i++)
2375 if (mask & (1 << i))
2376 {
2377 cache->saved_regs[4 + i].addr = vsp;
2378 vsp += 4;
2379 }
2380
2381 /* Special-case popping SP -- we need to reload vsp. */
2382 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2383 vsp_valid = 0;
2384 }
2385 else if ((insn & 0xf0) == 0x90)
2386 {
2387 int reg = insn & 0xf;
2388
2389 /* Reserved cases. */
2390 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2391 return NULL;
2392
2393 /* Set SP from another register and mark VSP for reload. */
2394 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2395 vsp_valid = 0;
2396 }
2397 else if ((insn & 0xf0) == 0xa0)
2398 {
2399 int count = insn & 0x7;
2400 int pop_lr = (insn & 0x8) != 0;
2401 int i;
2402
2403 /* Pop r4..r[4+count]. */
2404 for (i = 0; i <= count; i++)
2405 {
2406 cache->saved_regs[4 + i].addr = vsp;
2407 vsp += 4;
2408 }
2409
2410 /* If indicated by flag, pop LR as well. */
2411 if (pop_lr)
2412 {
2413 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2414 vsp += 4;
2415 }
2416 }
2417 else if (insn == 0xb0)
2418 {
2419 /* We could only have updated PC by popping into it; if so, it
2420 will show up as address. Otherwise, copy LR into PC. */
2421 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2422 cache->saved_regs[ARM_PC_REGNUM]
2423 = cache->saved_regs[ARM_LR_REGNUM];
2424
2425 /* We're done. */
2426 break;
2427 }
2428 else if (insn == 0xb1)
2429 {
2430 int mask = *entry++;
2431 int i;
2432
2433 /* All-zero mask and mask >= 16 is "spare". */
2434 if (mask == 0 || mask >= 16)
2435 return NULL;
2436
2437 /* Pop r0..r3 under mask. */
2438 for (i = 0; i < 4; i++)
2439 if (mask & (1 << i))
2440 {
2441 cache->saved_regs[i].addr = vsp;
2442 vsp += 4;
2443 }
2444 }
2445 else if (insn == 0xb2)
2446 {
2447 ULONGEST offset = 0;
2448 unsigned shift = 0;
2449
2450 do
2451 {
2452 offset |= (*entry & 0x7f) << shift;
2453 shift += 7;
2454 }
2455 while (*entry++ & 0x80);
2456
2457 vsp += 0x204 + (offset << 2);
2458 }
2459 else if (insn == 0xb3)
2460 {
2461 int start = *entry >> 4;
2462 int count = (*entry++) & 0xf;
2463 int i;
2464
2465 /* Only registers D0..D15 are valid here. */
2466 if (start + count >= 16)
2467 return NULL;
2468
2469 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2470 for (i = 0; i <= count; i++)
2471 {
2472 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2473 vsp += 8;
2474 }
2475
2476 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2477 vsp += 4;
2478 }
2479 else if ((insn & 0xf8) == 0xb8)
2480 {
2481 int count = insn & 0x7;
2482 int i;
2483
2484 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2485 for (i = 0; i <= count; i++)
2486 {
2487 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2488 vsp += 8;
2489 }
2490
2491 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2492 vsp += 4;
2493 }
2494 else if (insn == 0xc6)
2495 {
2496 int start = *entry >> 4;
2497 int count = (*entry++) & 0xf;
2498 int i;
2499
2500 /* Only registers WR0..WR15 are valid. */
2501 if (start + count >= 16)
2502 return NULL;
2503
2504 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2505 for (i = 0; i <= count; i++)
2506 {
2507 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2508 vsp += 8;
2509 }
2510 }
2511 else if (insn == 0xc7)
2512 {
2513 int mask = *entry++;
2514 int i;
2515
2516 /* All-zero mask and mask >= 16 is "spare". */
2517 if (mask == 0 || mask >= 16)
2518 return NULL;
2519
2520 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2521 for (i = 0; i < 4; i++)
2522 if (mask & (1 << i))
2523 {
2524 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2525 vsp += 4;
2526 }
2527 }
2528 else if ((insn & 0xf8) == 0xc0)
2529 {
2530 int count = insn & 0x7;
2531 int i;
2532
2533 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2534 for (i = 0; i <= count; i++)
2535 {
2536 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2537 vsp += 8;
2538 }
2539 }
2540 else if (insn == 0xc8)
2541 {
2542 int start = *entry >> 4;
2543 int count = (*entry++) & 0xf;
2544 int i;
2545
2546 /* Only registers D0..D31 are valid. */
2547 if (start + count >= 16)
2548 return NULL;
2549
2550 /* Pop VFP double-precision registers
2551 D[16+start]..D[16+start+count]. */
2552 for (i = 0; i <= count; i++)
2553 {
2554 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2555 vsp += 8;
2556 }
2557 }
2558 else if (insn == 0xc9)
2559 {
2560 int start = *entry >> 4;
2561 int count = (*entry++) & 0xf;
2562 int i;
2563
2564 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2565 for (i = 0; i <= count; i++)
2566 {
2567 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2568 vsp += 8;
2569 }
2570 }
2571 else if ((insn & 0xf8) == 0xd0)
2572 {
2573 int count = insn & 0x7;
2574 int i;
2575
2576 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2577 for (i = 0; i <= count; i++)
2578 {
2579 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2580 vsp += 8;
2581 }
2582 }
2583 else
2584 {
2585 /* Everything else is "spare". */
2586 return NULL;
2587 }
2588 }
2589
2590 /* If we restore SP from a register, assume this was the frame register.
2591 Otherwise just fall back to SP as frame register. */
2592 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2593 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2594 else
2595 cache->framereg = ARM_SP_REGNUM;
2596
2597 /* Determine offset to previous frame. */
2598 cache->framesize
2599 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2600
2601 /* We already got the previous SP. */
2602 cache->prev_sp = vsp;
2603
2604 return cache;
2605 }
2606
2607 /* Unwinding via ARM exception table entries. Note that the sniffer
2608 already computes a filled-in prologue cache, which is then used
2609 with the same arm_prologue_this_id and arm_prologue_prev_register
2610 routines also used for prologue-parsing based unwinding. */
2611
2612 static int
2613 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2614 struct frame_info *this_frame,
2615 void **this_prologue_cache)
2616 {
2617 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2618 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2619 CORE_ADDR addr_in_block, exidx_region, func_start;
2620 struct arm_prologue_cache *cache;
2621 gdb_byte *entry;
2622
2623 /* See if we have an ARM exception table entry covering this address. */
2624 addr_in_block = get_frame_address_in_block (this_frame);
2625 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2626 if (!entry)
2627 return 0;
2628
2629 /* The ARM exception table does not describe unwind information
2630 for arbitrary PC values, but is guaranteed to be correct only
2631 at call sites. We have to decide here whether we want to use
2632 ARM exception table information for this frame, or fall back
2633 to using prologue parsing. (Note that if we have DWARF CFI,
2634 this sniffer isn't even called -- CFI is always preferred.)
2635
2636 Before we make this decision, however, we check whether we
2637 actually have *symbol* information for the current frame.
2638 If not, prologue parsing would not work anyway, so we might
2639 as well use the exception table and hope for the best. */
2640 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2641 {
2642 int exc_valid = 0;
2643
2644 /* If the next frame is "normal", we are at a call site in this
2645 frame, so exception information is guaranteed to be valid. */
2646 if (get_next_frame (this_frame)
2647 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2648 exc_valid = 1;
2649
2650 /* We also assume exception information is valid if we're currently
2651 blocked in a system call. The system library is supposed to
2652 ensure this, so that e.g. pthread cancellation works. */
2653 if (arm_frame_is_thumb (this_frame))
2654 {
2655 ULONGEST insn;
2656
2657 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2658 2, byte_order_for_code, &insn)
2659 && (insn & 0xff00) == 0xdf00 /* svc */)
2660 exc_valid = 1;
2661 }
2662 else
2663 {
2664 ULONGEST insn;
2665
2666 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2667 4, byte_order_for_code, &insn)
2668 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2669 exc_valid = 1;
2670 }
2671
2672 /* Bail out if we don't know that exception information is valid. */
2673 if (!exc_valid)
2674 return 0;
2675
2676 /* The ARM exception index does not mark the *end* of the region
2677 covered by the entry, and some functions will not have any entry.
2678 To correctly recognize the end of the covered region, the linker
2679 should have inserted dummy records with a CANTUNWIND marker.
2680
2681 Unfortunately, current versions of GNU ld do not reliably do
2682 this, and thus we may have found an incorrect entry above.
2683 As a (temporary) sanity check, we only use the entry if it
2684 lies *within* the bounds of the function. Note that this check
2685 might reject perfectly valid entries that just happen to cover
2686 multiple functions; therefore this check ought to be removed
2687 once the linker is fixed. */
2688 if (func_start > exidx_region)
2689 return 0;
2690 }
2691
2692 /* Decode the list of unwinding instructions into a prologue cache.
2693 Note that this may fail due to e.g. a "refuse to unwind" code. */
2694 cache = arm_exidx_fill_cache (this_frame, entry);
2695 if (!cache)
2696 return 0;
2697
2698 *this_prologue_cache = cache;
2699 return 1;
2700 }
2701
2702 struct frame_unwind arm_exidx_unwind = {
2703 NORMAL_FRAME,
2704 default_frame_unwind_stop_reason,
2705 arm_prologue_this_id,
2706 arm_prologue_prev_register,
2707 NULL,
2708 arm_exidx_unwind_sniffer
2709 };
2710
2711 static struct arm_prologue_cache *
2712 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2713 {
2714 struct arm_prologue_cache *cache;
2715 int reg;
2716
2717 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2718 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2719
2720 /* Still rely on the offset calculated from prologue. */
2721 arm_scan_prologue (this_frame, cache);
2722
2723 /* Since we are in epilogue, the SP has been restored. */
2724 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2725
2726 /* Calculate actual addresses of saved registers using offsets
2727 determined by arm_scan_prologue. */
2728 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2729 if (trad_frame_addr_p (cache->saved_regs, reg))
2730 cache->saved_regs[reg].addr += cache->prev_sp;
2731
2732 return cache;
2733 }
2734
2735 /* Implementation of function hook 'this_id' in
2736 'struct frame_uwnind' for epilogue unwinder. */
2737
2738 static void
2739 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2740 void **this_cache,
2741 struct frame_id *this_id)
2742 {
2743 struct arm_prologue_cache *cache;
2744 CORE_ADDR pc, func;
2745
2746 if (*this_cache == NULL)
2747 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2748 cache = (struct arm_prologue_cache *) *this_cache;
2749
2750 /* Use function start address as part of the frame ID. If we cannot
2751 identify the start address (due to missing symbol information),
2752 fall back to just using the current PC. */
2753 pc = get_frame_pc (this_frame);
2754 func = get_frame_func (this_frame);
2755 if (func == 0)
2756 func = pc;
2757
2758 (*this_id) = frame_id_build (cache->prev_sp, pc);
2759 }
2760
2761 /* Implementation of function hook 'prev_register' in
2762 'struct frame_uwnind' for epilogue unwinder. */
2763
2764 static struct value *
2765 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2766 void **this_cache, int regnum)
2767 {
2768 if (*this_cache == NULL)
2769 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2770
2771 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2772 }
2773
2774 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2775 CORE_ADDR pc);
2776 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2777 CORE_ADDR pc);
2778
2779 /* Implementation of function hook 'sniffer' in
2780 'struct frame_uwnind' for epilogue unwinder. */
2781
2782 static int
2783 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2784 struct frame_info *this_frame,
2785 void **this_prologue_cache)
2786 {
2787 if (frame_relative_level (this_frame) == 0)
2788 {
2789 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2790 CORE_ADDR pc = get_frame_pc (this_frame);
2791
2792 if (arm_frame_is_thumb (this_frame))
2793 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2794 else
2795 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2796 }
2797 else
2798 return 0;
2799 }
2800
2801 /* Frame unwinder from epilogue. */
2802
2803 static const struct frame_unwind arm_epilogue_frame_unwind =
2804 {
2805 NORMAL_FRAME,
2806 default_frame_unwind_stop_reason,
2807 arm_epilogue_frame_this_id,
2808 arm_epilogue_frame_prev_register,
2809 NULL,
2810 arm_epilogue_frame_sniffer,
2811 };
2812
2813 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2814 trampoline, return the target PC. Otherwise return 0.
2815
2816 void call0a (char c, short s, int i, long l) {}
2817
2818 int main (void)
2819 {
2820 (*pointer_to_call0a) (c, s, i, l);
2821 }
2822
2823 Instead of calling a stub library function _call_via_xx (xx is
2824 the register name), GCC may inline the trampoline in the object
2825 file as below (register r2 has the address of call0a).
2826
2827 .global main
2828 .type main, %function
2829 ...
2830 bl .L1
2831 ...
2832 .size main, .-main
2833
2834 .L1:
2835 bx r2
2836
2837 The trampoline 'bx r2' doesn't belong to main. */
2838
2839 static CORE_ADDR
2840 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2841 {
2842 /* The heuristics of recognizing such trampoline is that FRAME is
2843 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2844 if (arm_frame_is_thumb (frame))
2845 {
2846 gdb_byte buf[2];
2847
2848 if (target_read_memory (pc, buf, 2) == 0)
2849 {
2850 struct gdbarch *gdbarch = get_frame_arch (frame);
2851 enum bfd_endian byte_order_for_code
2852 = gdbarch_byte_order_for_code (gdbarch);
2853 uint16_t insn
2854 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2855
2856 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2857 {
2858 CORE_ADDR dest
2859 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2860
2861 /* Clear the LSB so that gdb core sets step-resume
2862 breakpoint at the right address. */
2863 return UNMAKE_THUMB_ADDR (dest);
2864 }
2865 }
2866 }
2867
2868 return 0;
2869 }
2870
2871 static struct arm_prologue_cache *
2872 arm_make_stub_cache (struct frame_info *this_frame)
2873 {
2874 struct arm_prologue_cache *cache;
2875
2876 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2877 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2878
2879 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2880
2881 return cache;
2882 }
2883
2884 /* Our frame ID for a stub frame is the current SP and LR. */
2885
2886 static void
2887 arm_stub_this_id (struct frame_info *this_frame,
2888 void **this_cache,
2889 struct frame_id *this_id)
2890 {
2891 struct arm_prologue_cache *cache;
2892
2893 if (*this_cache == NULL)
2894 *this_cache = arm_make_stub_cache (this_frame);
2895 cache = (struct arm_prologue_cache *) *this_cache;
2896
2897 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2898 }
2899
2900 static int
2901 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2902 struct frame_info *this_frame,
2903 void **this_prologue_cache)
2904 {
2905 CORE_ADDR addr_in_block;
2906 gdb_byte dummy[4];
2907 CORE_ADDR pc, start_addr;
2908 const char *name;
2909
2910 addr_in_block = get_frame_address_in_block (this_frame);
2911 pc = get_frame_pc (this_frame);
2912 if (in_plt_section (addr_in_block)
2913 /* We also use the stub winder if the target memory is unreadable
2914 to avoid having the prologue unwinder trying to read it. */
2915 || target_read_memory (pc, dummy, 4) != 0)
2916 return 1;
2917
2918 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2919 && arm_skip_bx_reg (this_frame, pc) != 0)
2920 return 1;
2921
2922 return 0;
2923 }
2924
2925 struct frame_unwind arm_stub_unwind = {
2926 NORMAL_FRAME,
2927 default_frame_unwind_stop_reason,
2928 arm_stub_this_id,
2929 arm_prologue_prev_register,
2930 NULL,
2931 arm_stub_unwind_sniffer
2932 };
2933
2934 /* Put here the code to store, into CACHE->saved_regs, the addresses
2935 of the saved registers of frame described by THIS_FRAME. CACHE is
2936 returned. */
2937
2938 static struct arm_prologue_cache *
2939 arm_m_exception_cache (struct frame_info *this_frame)
2940 {
2941 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2942 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2943 struct arm_prologue_cache *cache;
2944 CORE_ADDR unwound_sp;
2945 LONGEST xpsr;
2946
2947 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2948 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2949
2950 unwound_sp = get_frame_register_unsigned (this_frame,
2951 ARM_SP_REGNUM);
2952
2953 /* The hardware saves eight 32-bit words, comprising xPSR,
2954 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2955 "B1.5.6 Exception entry behavior" in
2956 "ARMv7-M Architecture Reference Manual". */
2957 cache->saved_regs[0].addr = unwound_sp;
2958 cache->saved_regs[1].addr = unwound_sp + 4;
2959 cache->saved_regs[2].addr = unwound_sp + 8;
2960 cache->saved_regs[3].addr = unwound_sp + 12;
2961 cache->saved_regs[12].addr = unwound_sp + 16;
2962 cache->saved_regs[14].addr = unwound_sp + 20;
2963 cache->saved_regs[15].addr = unwound_sp + 24;
2964 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2965
2966 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2967 aligner between the top of the 32-byte stack frame and the
2968 previous context's stack pointer. */
2969 cache->prev_sp = unwound_sp + 32;
2970 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2971 && (xpsr & (1 << 9)) != 0)
2972 cache->prev_sp += 4;
2973
2974 return cache;
2975 }
2976
2977 /* Implementation of function hook 'this_id' in
2978 'struct frame_uwnind'. */
2979
2980 static void
2981 arm_m_exception_this_id (struct frame_info *this_frame,
2982 void **this_cache,
2983 struct frame_id *this_id)
2984 {
2985 struct arm_prologue_cache *cache;
2986
2987 if (*this_cache == NULL)
2988 *this_cache = arm_m_exception_cache (this_frame);
2989 cache = (struct arm_prologue_cache *) *this_cache;
2990
2991 /* Our frame ID for a stub frame is the current SP and LR. */
2992 *this_id = frame_id_build (cache->prev_sp,
2993 get_frame_pc (this_frame));
2994 }
2995
2996 /* Implementation of function hook 'prev_register' in
2997 'struct frame_uwnind'. */
2998
2999 static struct value *
3000 arm_m_exception_prev_register (struct frame_info *this_frame,
3001 void **this_cache,
3002 int prev_regnum)
3003 {
3004 struct arm_prologue_cache *cache;
3005
3006 if (*this_cache == NULL)
3007 *this_cache = arm_m_exception_cache (this_frame);
3008 cache = (struct arm_prologue_cache *) *this_cache;
3009
3010 /* The value was already reconstructed into PREV_SP. */
3011 if (prev_regnum == ARM_SP_REGNUM)
3012 return frame_unwind_got_constant (this_frame, prev_regnum,
3013 cache->prev_sp);
3014
3015 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3016 prev_regnum);
3017 }
3018
3019 /* Implementation of function hook 'sniffer' in
3020 'struct frame_uwnind'. */
3021
3022 static int
3023 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3024 struct frame_info *this_frame,
3025 void **this_prologue_cache)
3026 {
3027 CORE_ADDR this_pc = get_frame_pc (this_frame);
3028
3029 /* No need to check is_m; this sniffer is only registered for
3030 M-profile architectures. */
3031
3032 /* Check if exception frame returns to a magic PC value. */
3033 return arm_m_addr_is_magic (this_pc);
3034 }
3035
3036 /* Frame unwinder for M-profile exceptions. */
3037
3038 struct frame_unwind arm_m_exception_unwind =
3039 {
3040 SIGTRAMP_FRAME,
3041 default_frame_unwind_stop_reason,
3042 arm_m_exception_this_id,
3043 arm_m_exception_prev_register,
3044 NULL,
3045 arm_m_exception_unwind_sniffer
3046 };
3047
3048 static CORE_ADDR
3049 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3050 {
3051 struct arm_prologue_cache *cache;
3052
3053 if (*this_cache == NULL)
3054 *this_cache = arm_make_prologue_cache (this_frame);
3055 cache = (struct arm_prologue_cache *) *this_cache;
3056
3057 return cache->prev_sp - cache->framesize;
3058 }
3059
3060 struct frame_base arm_normal_base = {
3061 &arm_prologue_unwind,
3062 arm_normal_frame_base,
3063 arm_normal_frame_base,
3064 arm_normal_frame_base
3065 };
3066
3067 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3068 dummy frame. The frame ID's base needs to match the TOS value
3069 saved by save_dummy_frame_tos() and returned from
3070 arm_push_dummy_call, and the PC needs to match the dummy frame's
3071 breakpoint. */
3072
3073 static struct frame_id
3074 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3075 {
3076 return frame_id_build (get_frame_register_unsigned (this_frame,
3077 ARM_SP_REGNUM),
3078 get_frame_pc (this_frame));
3079 }
3080
3081 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3082 be used to construct the previous frame's ID, after looking up the
3083 containing function). */
3084
3085 static CORE_ADDR
3086 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3087 {
3088 CORE_ADDR pc;
3089 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3090 return arm_addr_bits_remove (gdbarch, pc);
3091 }
3092
3093 static CORE_ADDR
3094 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3095 {
3096 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3097 }
3098
3099 static struct value *
3100 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3101 int regnum)
3102 {
3103 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3104 CORE_ADDR lr, cpsr;
3105 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3106
3107 switch (regnum)
3108 {
3109 case ARM_PC_REGNUM:
3110 /* The PC is normally copied from the return column, which
3111 describes saves of LR. However, that version may have an
3112 extra bit set to indicate Thumb state. The bit is not
3113 part of the PC. */
3114 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3115 return frame_unwind_got_constant (this_frame, regnum,
3116 arm_addr_bits_remove (gdbarch, lr));
3117
3118 case ARM_PS_REGNUM:
3119 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3120 cpsr = get_frame_register_unsigned (this_frame, regnum);
3121 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3122 if (IS_THUMB_ADDR (lr))
3123 cpsr |= t_bit;
3124 else
3125 cpsr &= ~t_bit;
3126 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3127
3128 default:
3129 internal_error (__FILE__, __LINE__,
3130 _("Unexpected register %d"), regnum);
3131 }
3132 }
3133
3134 static void
3135 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3136 struct dwarf2_frame_state_reg *reg,
3137 struct frame_info *this_frame)
3138 {
3139 switch (regnum)
3140 {
3141 case ARM_PC_REGNUM:
3142 case ARM_PS_REGNUM:
3143 reg->how = DWARF2_FRAME_REG_FN;
3144 reg->loc.fn = arm_dwarf2_prev_register;
3145 break;
3146 case ARM_SP_REGNUM:
3147 reg->how = DWARF2_FRAME_REG_CFA;
3148 break;
3149 }
3150 }
3151
3152 /* Implement the stack_frame_destroyed_p gdbarch method. */
3153
3154 static int
3155 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3156 {
3157 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3158 unsigned int insn, insn2;
3159 int found_return = 0, found_stack_adjust = 0;
3160 CORE_ADDR func_start, func_end;
3161 CORE_ADDR scan_pc;
3162 gdb_byte buf[4];
3163
3164 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3165 return 0;
3166
3167 /* The epilogue is a sequence of instructions along the following lines:
3168
3169 - add stack frame size to SP or FP
3170 - [if frame pointer used] restore SP from FP
3171 - restore registers from SP [may include PC]
3172 - a return-type instruction [if PC wasn't already restored]
3173
3174 In a first pass, we scan forward from the current PC and verify the
3175 instructions we find as compatible with this sequence, ending in a
3176 return instruction.
3177
3178 However, this is not sufficient to distinguish indirect function calls
3179 within a function from indirect tail calls in the epilogue in some cases.
3180 Therefore, if we didn't already find any SP-changing instruction during
3181 forward scan, we add a backward scanning heuristic to ensure we actually
3182 are in the epilogue. */
3183
3184 scan_pc = pc;
3185 while (scan_pc < func_end && !found_return)
3186 {
3187 if (target_read_memory (scan_pc, buf, 2))
3188 break;
3189
3190 scan_pc += 2;
3191 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3192
3193 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3194 found_return = 1;
3195 else if (insn == 0x46f7) /* mov pc, lr */
3196 found_return = 1;
3197 else if (thumb_instruction_restores_sp (insn))
3198 {
3199 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3200 found_return = 1;
3201 }
3202 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3203 {
3204 if (target_read_memory (scan_pc, buf, 2))
3205 break;
3206
3207 scan_pc += 2;
3208 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3209
3210 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3211 {
3212 if (insn2 & 0x8000) /* <registers> include PC. */
3213 found_return = 1;
3214 }
3215 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3216 && (insn2 & 0x0fff) == 0x0b04)
3217 {
3218 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3219 found_return = 1;
3220 }
3221 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3222 && (insn2 & 0x0e00) == 0x0a00)
3223 ;
3224 else
3225 break;
3226 }
3227 else
3228 break;
3229 }
3230
3231 if (!found_return)
3232 return 0;
3233
3234 /* Since any instruction in the epilogue sequence, with the possible
3235 exception of return itself, updates the stack pointer, we need to
3236 scan backwards for at most one instruction. Try either a 16-bit or
3237 a 32-bit instruction. This is just a heuristic, so we do not worry
3238 too much about false positives. */
3239
3240 if (pc - 4 < func_start)
3241 return 0;
3242 if (target_read_memory (pc - 4, buf, 4))
3243 return 0;
3244
3245 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3246 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3247
3248 if (thumb_instruction_restores_sp (insn2))
3249 found_stack_adjust = 1;
3250 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3251 found_stack_adjust = 1;
3252 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3253 && (insn2 & 0x0fff) == 0x0b04)
3254 found_stack_adjust = 1;
3255 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3256 && (insn2 & 0x0e00) == 0x0a00)
3257 found_stack_adjust = 1;
3258
3259 return found_stack_adjust;
3260 }
3261
3262 static int
3263 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3264 {
3265 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3266 unsigned int insn;
3267 int found_return;
3268 CORE_ADDR func_start, func_end;
3269
3270 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3271 return 0;
3272
3273 /* We are in the epilogue if the previous instruction was a stack
3274 adjustment and the next instruction is a possible return (bx, mov
3275 pc, or pop). We could have to scan backwards to find the stack
3276 adjustment, or forwards to find the return, but this is a decent
3277 approximation. First scan forwards. */
3278
3279 found_return = 0;
3280 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3281 if (bits (insn, 28, 31) != INST_NV)
3282 {
3283 if ((insn & 0x0ffffff0) == 0x012fff10)
3284 /* BX. */
3285 found_return = 1;
3286 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3287 /* MOV PC. */
3288 found_return = 1;
3289 else if ((insn & 0x0fff0000) == 0x08bd0000
3290 && (insn & 0x0000c000) != 0)
3291 /* POP (LDMIA), including PC or LR. */
3292 found_return = 1;
3293 }
3294
3295 if (!found_return)
3296 return 0;
3297
3298 /* Scan backwards. This is just a heuristic, so do not worry about
3299 false positives from mode changes. */
3300
3301 if (pc < func_start + 4)
3302 return 0;
3303
3304 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3305 if (arm_instruction_restores_sp (insn))
3306 return 1;
3307
3308 return 0;
3309 }
3310
3311 /* Implement the stack_frame_destroyed_p gdbarch method. */
3312
3313 static int
3314 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3315 {
3316 if (arm_pc_is_thumb (gdbarch, pc))
3317 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3318 else
3319 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3320 }
3321
3322 /* When arguments must be pushed onto the stack, they go on in reverse
3323 order. The code below implements a FILO (stack) to do this. */
3324
3325 struct stack_item
3326 {
3327 int len;
3328 struct stack_item *prev;
3329 gdb_byte *data;
3330 };
3331
3332 static struct stack_item *
3333 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3334 {
3335 struct stack_item *si;
3336 si = XNEW (struct stack_item);
3337 si->data = (gdb_byte *) xmalloc (len);
3338 si->len = len;
3339 si->prev = prev;
3340 memcpy (si->data, contents, len);
3341 return si;
3342 }
3343
3344 static struct stack_item *
3345 pop_stack_item (struct stack_item *si)
3346 {
3347 struct stack_item *dead = si;
3348 si = si->prev;
3349 xfree (dead->data);
3350 xfree (dead);
3351 return si;
3352 }
3353
3354
3355 /* Return the alignment (in bytes) of the given type. */
3356
3357 static int
3358 arm_type_align (struct type *t)
3359 {
3360 int n;
3361 int align;
3362 int falign;
3363
3364 t = check_typedef (t);
3365 switch (TYPE_CODE (t))
3366 {
3367 default:
3368 /* Should never happen. */
3369 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3370 return 4;
3371
3372 case TYPE_CODE_PTR:
3373 case TYPE_CODE_ENUM:
3374 case TYPE_CODE_INT:
3375 case TYPE_CODE_FLT:
3376 case TYPE_CODE_SET:
3377 case TYPE_CODE_RANGE:
3378 case TYPE_CODE_REF:
3379 case TYPE_CODE_RVALUE_REF:
3380 case TYPE_CODE_CHAR:
3381 case TYPE_CODE_BOOL:
3382 return TYPE_LENGTH (t);
3383
3384 case TYPE_CODE_ARRAY:
3385 if (TYPE_VECTOR (t))
3386 {
3387 /* Use the natural alignment for vector types (the same for
3388 scalar type), but the maximum alignment is 64-bit. */
3389 if (TYPE_LENGTH (t) > 8)
3390 return 8;
3391 else
3392 return TYPE_LENGTH (t);
3393 }
3394 else
3395 return arm_type_align (TYPE_TARGET_TYPE (t));
3396 case TYPE_CODE_COMPLEX:
3397 return arm_type_align (TYPE_TARGET_TYPE (t));
3398
3399 case TYPE_CODE_STRUCT:
3400 case TYPE_CODE_UNION:
3401 align = 1;
3402 for (n = 0; n < TYPE_NFIELDS (t); n++)
3403 {
3404 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3405 if (falign > align)
3406 align = falign;
3407 }
3408 return align;
3409 }
3410 }
3411
3412 /* Possible base types for a candidate for passing and returning in
3413 VFP registers. */
3414
3415 enum arm_vfp_cprc_base_type
3416 {
3417 VFP_CPRC_UNKNOWN,
3418 VFP_CPRC_SINGLE,
3419 VFP_CPRC_DOUBLE,
3420 VFP_CPRC_VEC64,
3421 VFP_CPRC_VEC128
3422 };
3423
3424 /* The length of one element of base type B. */
3425
3426 static unsigned
3427 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3428 {
3429 switch (b)
3430 {
3431 case VFP_CPRC_SINGLE:
3432 return 4;
3433 case VFP_CPRC_DOUBLE:
3434 return 8;
3435 case VFP_CPRC_VEC64:
3436 return 8;
3437 case VFP_CPRC_VEC128:
3438 return 16;
3439 default:
3440 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3441 (int) b);
3442 }
3443 }
3444
3445 /* The character ('s', 'd' or 'q') for the type of VFP register used
3446 for passing base type B. */
3447
3448 static int
3449 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3450 {
3451 switch (b)
3452 {
3453 case VFP_CPRC_SINGLE:
3454 return 's';
3455 case VFP_CPRC_DOUBLE:
3456 return 'd';
3457 case VFP_CPRC_VEC64:
3458 return 'd';
3459 case VFP_CPRC_VEC128:
3460 return 'q';
3461 default:
3462 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3463 (int) b);
3464 }
3465 }
3466
3467 /* Determine whether T may be part of a candidate for passing and
3468 returning in VFP registers, ignoring the limit on the total number
3469 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3470 classification of the first valid component found; if it is not
3471 VFP_CPRC_UNKNOWN, all components must have the same classification
3472 as *BASE_TYPE. If it is found that T contains a type not permitted
3473 for passing and returning in VFP registers, a type differently
3474 classified from *BASE_TYPE, or two types differently classified
3475 from each other, return -1, otherwise return the total number of
3476 base-type elements found (possibly 0 in an empty structure or
3477 array). Vector types are not currently supported, matching the
3478 generic AAPCS support. */
3479
3480 static int
3481 arm_vfp_cprc_sub_candidate (struct type *t,
3482 enum arm_vfp_cprc_base_type *base_type)
3483 {
3484 t = check_typedef (t);
3485 switch (TYPE_CODE (t))
3486 {
3487 case TYPE_CODE_FLT:
3488 switch (TYPE_LENGTH (t))
3489 {
3490 case 4:
3491 if (*base_type == VFP_CPRC_UNKNOWN)
3492 *base_type = VFP_CPRC_SINGLE;
3493 else if (*base_type != VFP_CPRC_SINGLE)
3494 return -1;
3495 return 1;
3496
3497 case 8:
3498 if (*base_type == VFP_CPRC_UNKNOWN)
3499 *base_type = VFP_CPRC_DOUBLE;
3500 else if (*base_type != VFP_CPRC_DOUBLE)
3501 return -1;
3502 return 1;
3503
3504 default:
3505 return -1;
3506 }
3507 break;
3508
3509 case TYPE_CODE_COMPLEX:
3510 /* Arguments of complex T where T is one of the types float or
3511 double get treated as if they are implemented as:
3512
3513 struct complexT
3514 {
3515 T real;
3516 T imag;
3517 };
3518
3519 */
3520 switch (TYPE_LENGTH (t))
3521 {
3522 case 8:
3523 if (*base_type == VFP_CPRC_UNKNOWN)
3524 *base_type = VFP_CPRC_SINGLE;
3525 else if (*base_type != VFP_CPRC_SINGLE)
3526 return -1;
3527 return 2;
3528
3529 case 16:
3530 if (*base_type == VFP_CPRC_UNKNOWN)
3531 *base_type = VFP_CPRC_DOUBLE;
3532 else if (*base_type != VFP_CPRC_DOUBLE)
3533 return -1;
3534 return 2;
3535
3536 default:
3537 return -1;
3538 }
3539 break;
3540
3541 case TYPE_CODE_ARRAY:
3542 {
3543 if (TYPE_VECTOR (t))
3544 {
3545 /* A 64-bit or 128-bit containerized vector type are VFP
3546 CPRCs. */
3547 switch (TYPE_LENGTH (t))
3548 {
3549 case 8:
3550 if (*base_type == VFP_CPRC_UNKNOWN)
3551 *base_type = VFP_CPRC_VEC64;
3552 return 1;
3553 case 16:
3554 if (*base_type == VFP_CPRC_UNKNOWN)
3555 *base_type = VFP_CPRC_VEC128;
3556 return 1;
3557 default:
3558 return -1;
3559 }
3560 }
3561 else
3562 {
3563 int count;
3564 unsigned unitlen;
3565
3566 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3567 base_type);
3568 if (count == -1)
3569 return -1;
3570 if (TYPE_LENGTH (t) == 0)
3571 {
3572 gdb_assert (count == 0);
3573 return 0;
3574 }
3575 else if (count == 0)
3576 return -1;
3577 unitlen = arm_vfp_cprc_unit_length (*base_type);
3578 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3579 return TYPE_LENGTH (t) / unitlen;
3580 }
3581 }
3582 break;
3583
3584 case TYPE_CODE_STRUCT:
3585 {
3586 int count = 0;
3587 unsigned unitlen;
3588 int i;
3589 for (i = 0; i < TYPE_NFIELDS (t); i++)
3590 {
3591 int sub_count = 0;
3592
3593 if (!field_is_static (&TYPE_FIELD (t, i)))
3594 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3595 base_type);
3596 if (sub_count == -1)
3597 return -1;
3598 count += sub_count;
3599 }
3600 if (TYPE_LENGTH (t) == 0)
3601 {
3602 gdb_assert (count == 0);
3603 return 0;
3604 }
3605 else if (count == 0)
3606 return -1;
3607 unitlen = arm_vfp_cprc_unit_length (*base_type);
3608 if (TYPE_LENGTH (t) != unitlen * count)
3609 return -1;
3610 return count;
3611 }
3612
3613 case TYPE_CODE_UNION:
3614 {
3615 int count = 0;
3616 unsigned unitlen;
3617 int i;
3618 for (i = 0; i < TYPE_NFIELDS (t); i++)
3619 {
3620 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3621 base_type);
3622 if (sub_count == -1)
3623 return -1;
3624 count = (count > sub_count ? count : sub_count);
3625 }
3626 if (TYPE_LENGTH (t) == 0)
3627 {
3628 gdb_assert (count == 0);
3629 return 0;
3630 }
3631 else if (count == 0)
3632 return -1;
3633 unitlen = arm_vfp_cprc_unit_length (*base_type);
3634 if (TYPE_LENGTH (t) != unitlen * count)
3635 return -1;
3636 return count;
3637 }
3638
3639 default:
3640 break;
3641 }
3642
3643 return -1;
3644 }
3645
3646 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3647 if passed to or returned from a non-variadic function with the VFP
3648 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3649 *BASE_TYPE to the base type for T and *COUNT to the number of
3650 elements of that base type before returning. */
3651
3652 static int
3653 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3654 int *count)
3655 {
3656 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3657 int c = arm_vfp_cprc_sub_candidate (t, &b);
3658 if (c <= 0 || c > 4)
3659 return 0;
3660 *base_type = b;
3661 *count = c;
3662 return 1;
3663 }
3664
3665 /* Return 1 if the VFP ABI should be used for passing arguments to and
3666 returning values from a function of type FUNC_TYPE, 0
3667 otherwise. */
3668
3669 static int
3670 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3671 {
3672 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3673 /* Variadic functions always use the base ABI. Assume that functions
3674 without debug info are not variadic. */
3675 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3676 return 0;
3677 /* The VFP ABI is only supported as a variant of AAPCS. */
3678 if (tdep->arm_abi != ARM_ABI_AAPCS)
3679 return 0;
3680 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3681 }
3682
3683 /* We currently only support passing parameters in integer registers, which
3684 conforms with GCC's default model, and VFP argument passing following
3685 the VFP variant of AAPCS. Several other variants exist and
3686 we should probably support some of them based on the selected ABI. */
3687
3688 static CORE_ADDR
3689 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3690 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3691 struct value **args, CORE_ADDR sp, int struct_return,
3692 CORE_ADDR struct_addr)
3693 {
3694 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3695 int argnum;
3696 int argreg;
3697 int nstack;
3698 struct stack_item *si = NULL;
3699 int use_vfp_abi;
3700 struct type *ftype;
3701 unsigned vfp_regs_free = (1 << 16) - 1;
3702
3703 /* Determine the type of this function and whether the VFP ABI
3704 applies. */
3705 ftype = check_typedef (value_type (function));
3706 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3707 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3708 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3709
3710 /* Set the return address. For the ARM, the return breakpoint is
3711 always at BP_ADDR. */
3712 if (arm_pc_is_thumb (gdbarch, bp_addr))
3713 bp_addr |= 1;
3714 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3715
3716 /* Walk through the list of args and determine how large a temporary
3717 stack is required. Need to take care here as structs may be
3718 passed on the stack, and we have to push them. */
3719 nstack = 0;
3720
3721 argreg = ARM_A1_REGNUM;
3722 nstack = 0;
3723
3724 /* The struct_return pointer occupies the first parameter
3725 passing register. */
3726 if (struct_return)
3727 {
3728 if (arm_debug)
3729 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3730 gdbarch_register_name (gdbarch, argreg),
3731 paddress (gdbarch, struct_addr));
3732 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3733 argreg++;
3734 }
3735
3736 for (argnum = 0; argnum < nargs; argnum++)
3737 {
3738 int len;
3739 struct type *arg_type;
3740 struct type *target_type;
3741 enum type_code typecode;
3742 const bfd_byte *val;
3743 int align;
3744 enum arm_vfp_cprc_base_type vfp_base_type;
3745 int vfp_base_count;
3746 int may_use_core_reg = 1;
3747
3748 arg_type = check_typedef (value_type (args[argnum]));
3749 len = TYPE_LENGTH (arg_type);
3750 target_type = TYPE_TARGET_TYPE (arg_type);
3751 typecode = TYPE_CODE (arg_type);
3752 val = value_contents (args[argnum]);
3753
3754 align = arm_type_align (arg_type);
3755 /* Round alignment up to a whole number of words. */
3756 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3757 /* Different ABIs have different maximum alignments. */
3758 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3759 {
3760 /* The APCS ABI only requires word alignment. */
3761 align = INT_REGISTER_SIZE;
3762 }
3763 else
3764 {
3765 /* The AAPCS requires at most doubleword alignment. */
3766 if (align > INT_REGISTER_SIZE * 2)
3767 align = INT_REGISTER_SIZE * 2;
3768 }
3769
3770 if (use_vfp_abi
3771 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3772 &vfp_base_count))
3773 {
3774 int regno;
3775 int unit_length;
3776 int shift;
3777 unsigned mask;
3778
3779 /* Because this is a CPRC it cannot go in a core register or
3780 cause a core register to be skipped for alignment.
3781 Either it goes in VFP registers and the rest of this loop
3782 iteration is skipped for this argument, or it goes on the
3783 stack (and the stack alignment code is correct for this
3784 case). */
3785 may_use_core_reg = 0;
3786
3787 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3788 shift = unit_length / 4;
3789 mask = (1 << (shift * vfp_base_count)) - 1;
3790 for (regno = 0; regno < 16; regno += shift)
3791 if (((vfp_regs_free >> regno) & mask) == mask)
3792 break;
3793
3794 if (regno < 16)
3795 {
3796 int reg_char;
3797 int reg_scaled;
3798 int i;
3799
3800 vfp_regs_free &= ~(mask << regno);
3801 reg_scaled = regno / shift;
3802 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3803 for (i = 0; i < vfp_base_count; i++)
3804 {
3805 char name_buf[4];
3806 int regnum;
3807 if (reg_char == 'q')
3808 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3809 val + i * unit_length);
3810 else
3811 {
3812 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3813 reg_char, reg_scaled + i);
3814 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3815 strlen (name_buf));
3816 regcache_cooked_write (regcache, regnum,
3817 val + i * unit_length);
3818 }
3819 }
3820 continue;
3821 }
3822 else
3823 {
3824 /* This CPRC could not go in VFP registers, so all VFP
3825 registers are now marked as used. */
3826 vfp_regs_free = 0;
3827 }
3828 }
3829
3830 /* Push stack padding for dowubleword alignment. */
3831 if (nstack & (align - 1))
3832 {
3833 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3834 nstack += INT_REGISTER_SIZE;
3835 }
3836
3837 /* Doubleword aligned quantities must go in even register pairs. */
3838 if (may_use_core_reg
3839 && argreg <= ARM_LAST_ARG_REGNUM
3840 && align > INT_REGISTER_SIZE
3841 && argreg & 1)
3842 argreg++;
3843
3844 /* If the argument is a pointer to a function, and it is a
3845 Thumb function, create a LOCAL copy of the value and set
3846 the THUMB bit in it. */
3847 if (TYPE_CODE_PTR == typecode
3848 && target_type != NULL
3849 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3850 {
3851 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3852 if (arm_pc_is_thumb (gdbarch, regval))
3853 {
3854 bfd_byte *copy = (bfd_byte *) alloca (len);
3855 store_unsigned_integer (copy, len, byte_order,
3856 MAKE_THUMB_ADDR (regval));
3857 val = copy;
3858 }
3859 }
3860
3861 /* Copy the argument to general registers or the stack in
3862 register-sized pieces. Large arguments are split between
3863 registers and stack. */
3864 while (len > 0)
3865 {
3866 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3867 CORE_ADDR regval
3868 = extract_unsigned_integer (val, partial_len, byte_order);
3869
3870 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3871 {
3872 /* The argument is being passed in a general purpose
3873 register. */
3874 if (byte_order == BFD_ENDIAN_BIG)
3875 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3876 if (arm_debug)
3877 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3878 argnum,
3879 gdbarch_register_name
3880 (gdbarch, argreg),
3881 phex (regval, INT_REGISTER_SIZE));
3882 regcache_cooked_write_unsigned (regcache, argreg, regval);
3883 argreg++;
3884 }
3885 else
3886 {
3887 gdb_byte buf[INT_REGISTER_SIZE];
3888
3889 memset (buf, 0, sizeof (buf));
3890 store_unsigned_integer (buf, partial_len, byte_order, regval);
3891
3892 /* Push the arguments onto the stack. */
3893 if (arm_debug)
3894 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3895 argnum, nstack);
3896 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3897 nstack += INT_REGISTER_SIZE;
3898 }
3899
3900 len -= partial_len;
3901 val += partial_len;
3902 }
3903 }
3904 /* If we have an odd number of words to push, then decrement the stack
3905 by one word now, so first stack argument will be dword aligned. */
3906 if (nstack & 4)
3907 sp -= 4;
3908
3909 while (si)
3910 {
3911 sp -= si->len;
3912 write_memory (sp, si->data, si->len);
3913 si = pop_stack_item (si);
3914 }
3915
3916 /* Finally, update teh SP register. */
3917 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3918
3919 return sp;
3920 }
3921
3922
3923 /* Always align the frame to an 8-byte boundary. This is required on
3924 some platforms and harmless on the rest. */
3925
3926 static CORE_ADDR
3927 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3928 {
3929 /* Align the stack to eight bytes. */
3930 return sp & ~ (CORE_ADDR) 7;
3931 }
3932
3933 static void
3934 print_fpu_flags (struct ui_file *file, int flags)
3935 {
3936 if (flags & (1 << 0))
3937 fputs_filtered ("IVO ", file);
3938 if (flags & (1 << 1))
3939 fputs_filtered ("DVZ ", file);
3940 if (flags & (1 << 2))
3941 fputs_filtered ("OFL ", file);
3942 if (flags & (1 << 3))
3943 fputs_filtered ("UFL ", file);
3944 if (flags & (1 << 4))
3945 fputs_filtered ("INX ", file);
3946 fputc_filtered ('\n', file);
3947 }
3948
3949 /* Print interesting information about the floating point processor
3950 (if present) or emulator. */
3951 static void
3952 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3953 struct frame_info *frame, const char *args)
3954 {
3955 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3956 int type;
3957
3958 type = (status >> 24) & 127;
3959 if (status & (1 << 31))
3960 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3961 else
3962 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3963 /* i18n: [floating point unit] mask */
3964 fputs_filtered (_("mask: "), file);
3965 print_fpu_flags (file, status >> 16);
3966 /* i18n: [floating point unit] flags */
3967 fputs_filtered (_("flags: "), file);
3968 print_fpu_flags (file, status);
3969 }
3970
3971 /* Construct the ARM extended floating point type. */
3972 static struct type *
3973 arm_ext_type (struct gdbarch *gdbarch)
3974 {
3975 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3976
3977 if (!tdep->arm_ext_type)
3978 tdep->arm_ext_type
3979 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3980 floatformats_arm_ext);
3981
3982 return tdep->arm_ext_type;
3983 }
3984
3985 static struct type *
3986 arm_neon_double_type (struct gdbarch *gdbarch)
3987 {
3988 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3989
3990 if (tdep->neon_double_type == NULL)
3991 {
3992 struct type *t, *elem;
3993
3994 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3995 TYPE_CODE_UNION);
3996 elem = builtin_type (gdbarch)->builtin_uint8;
3997 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3998 elem = builtin_type (gdbarch)->builtin_uint16;
3999 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4000 elem = builtin_type (gdbarch)->builtin_uint32;
4001 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4002 elem = builtin_type (gdbarch)->builtin_uint64;
4003 append_composite_type_field (t, "u64", elem);
4004 elem = builtin_type (gdbarch)->builtin_float;
4005 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4006 elem = builtin_type (gdbarch)->builtin_double;
4007 append_composite_type_field (t, "f64", elem);
4008
4009 TYPE_VECTOR (t) = 1;
4010 TYPE_NAME (t) = "neon_d";
4011 tdep->neon_double_type = t;
4012 }
4013
4014 return tdep->neon_double_type;
4015 }
4016
4017 /* FIXME: The vector types are not correctly ordered on big-endian
4018 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4019 bits of d0 - regardless of what unit size is being held in d0. So
4020 the offset of the first uint8 in d0 is 7, but the offset of the
4021 first float is 4. This code works as-is for little-endian
4022 targets. */
4023
4024 static struct type *
4025 arm_neon_quad_type (struct gdbarch *gdbarch)
4026 {
4027 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4028
4029 if (tdep->neon_quad_type == NULL)
4030 {
4031 struct type *t, *elem;
4032
4033 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4034 TYPE_CODE_UNION);
4035 elem = builtin_type (gdbarch)->builtin_uint8;
4036 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4037 elem = builtin_type (gdbarch)->builtin_uint16;
4038 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4039 elem = builtin_type (gdbarch)->builtin_uint32;
4040 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4041 elem = builtin_type (gdbarch)->builtin_uint64;
4042 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4043 elem = builtin_type (gdbarch)->builtin_float;
4044 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4045 elem = builtin_type (gdbarch)->builtin_double;
4046 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4047
4048 TYPE_VECTOR (t) = 1;
4049 TYPE_NAME (t) = "neon_q";
4050 tdep->neon_quad_type = t;
4051 }
4052
4053 return tdep->neon_quad_type;
4054 }
4055
4056 /* Return the GDB type object for the "standard" data type of data in
4057 register N. */
4058
4059 static struct type *
4060 arm_register_type (struct gdbarch *gdbarch, int regnum)
4061 {
4062 int num_regs = gdbarch_num_regs (gdbarch);
4063
4064 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4065 && regnum >= num_regs && regnum < num_regs + 32)
4066 return builtin_type (gdbarch)->builtin_float;
4067
4068 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4069 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4070 return arm_neon_quad_type (gdbarch);
4071
4072 /* If the target description has register information, we are only
4073 in this function so that we can override the types of
4074 double-precision registers for NEON. */
4075 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4076 {
4077 struct type *t = tdesc_register_type (gdbarch, regnum);
4078
4079 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4080 && TYPE_CODE (t) == TYPE_CODE_FLT
4081 && gdbarch_tdep (gdbarch)->have_neon)
4082 return arm_neon_double_type (gdbarch);
4083 else
4084 return t;
4085 }
4086
4087 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4088 {
4089 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4090 return builtin_type (gdbarch)->builtin_void;
4091
4092 return arm_ext_type (gdbarch);
4093 }
4094 else if (regnum == ARM_SP_REGNUM)
4095 return builtin_type (gdbarch)->builtin_data_ptr;
4096 else if (regnum == ARM_PC_REGNUM)
4097 return builtin_type (gdbarch)->builtin_func_ptr;
4098 else if (regnum >= ARRAY_SIZE (arm_register_names))
4099 /* These registers are only supported on targets which supply
4100 an XML description. */
4101 return builtin_type (gdbarch)->builtin_int0;
4102 else
4103 return builtin_type (gdbarch)->builtin_uint32;
4104 }
4105
4106 /* Map a DWARF register REGNUM onto the appropriate GDB register
4107 number. */
4108
4109 static int
4110 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4111 {
4112 /* Core integer regs. */
4113 if (reg >= 0 && reg <= 15)
4114 return reg;
4115
4116 /* Legacy FPA encoding. These were once used in a way which
4117 overlapped with VFP register numbering, so their use is
4118 discouraged, but GDB doesn't support the ARM toolchain
4119 which used them for VFP. */
4120 if (reg >= 16 && reg <= 23)
4121 return ARM_F0_REGNUM + reg - 16;
4122
4123 /* New assignments for the FPA registers. */
4124 if (reg >= 96 && reg <= 103)
4125 return ARM_F0_REGNUM + reg - 96;
4126
4127 /* WMMX register assignments. */
4128 if (reg >= 104 && reg <= 111)
4129 return ARM_WCGR0_REGNUM + reg - 104;
4130
4131 if (reg >= 112 && reg <= 127)
4132 return ARM_WR0_REGNUM + reg - 112;
4133
4134 if (reg >= 192 && reg <= 199)
4135 return ARM_WC0_REGNUM + reg - 192;
4136
4137 /* VFP v2 registers. A double precision value is actually
4138 in d1 rather than s2, but the ABI only defines numbering
4139 for the single precision registers. This will "just work"
4140 in GDB for little endian targets (we'll read eight bytes,
4141 starting in s0 and then progressing to s1), but will be
4142 reversed on big endian targets with VFP. This won't
4143 be a problem for the new Neon quad registers; you're supposed
4144 to use DW_OP_piece for those. */
4145 if (reg >= 64 && reg <= 95)
4146 {
4147 char name_buf[4];
4148
4149 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4150 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4151 strlen (name_buf));
4152 }
4153
4154 /* VFP v3 / Neon registers. This range is also used for VFP v2
4155 registers, except that it now describes d0 instead of s0. */
4156 if (reg >= 256 && reg <= 287)
4157 {
4158 char name_buf[4];
4159
4160 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4161 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4162 strlen (name_buf));
4163 }
4164
4165 return -1;
4166 }
4167
4168 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4169 static int
4170 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4171 {
4172 int reg = regnum;
4173 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4174
4175 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4176 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4177
4178 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4179 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4180
4181 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4182 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4183
4184 if (reg < NUM_GREGS)
4185 return SIM_ARM_R0_REGNUM + reg;
4186 reg -= NUM_GREGS;
4187
4188 if (reg < NUM_FREGS)
4189 return SIM_ARM_FP0_REGNUM + reg;
4190 reg -= NUM_FREGS;
4191
4192 if (reg < NUM_SREGS)
4193 return SIM_ARM_FPS_REGNUM + reg;
4194 reg -= NUM_SREGS;
4195
4196 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4197 }
4198
4199 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4200 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4201 NULL if an error occurs. BUF is freed. */
4202
4203 static gdb_byte *
4204 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4205 int old_len, int new_len)
4206 {
4207 gdb_byte *new_buf;
4208 int bytes_to_read = new_len - old_len;
4209
4210 new_buf = (gdb_byte *) xmalloc (new_len);
4211 memcpy (new_buf + bytes_to_read, buf, old_len);
4212 xfree (buf);
4213 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4214 {
4215 xfree (new_buf);
4216 return NULL;
4217 }
4218 return new_buf;
4219 }
4220
4221 /* An IT block is at most the 2-byte IT instruction followed by
4222 four 4-byte instructions. The furthest back we must search to
4223 find an IT block that affects the current instruction is thus
4224 2 + 3 * 4 == 14 bytes. */
4225 #define MAX_IT_BLOCK_PREFIX 14
4226
4227 /* Use a quick scan if there are more than this many bytes of
4228 code. */
4229 #define IT_SCAN_THRESHOLD 32
4230
4231 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4232 A breakpoint in an IT block may not be hit, depending on the
4233 condition flags. */
4234 static CORE_ADDR
4235 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4236 {
4237 gdb_byte *buf;
4238 char map_type;
4239 CORE_ADDR boundary, func_start;
4240 int buf_len;
4241 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4242 int i, any, last_it, last_it_count;
4243
4244 /* If we are using BKPT breakpoints, none of this is necessary. */
4245 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4246 return bpaddr;
4247
4248 /* ARM mode does not have this problem. */
4249 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4250 return bpaddr;
4251
4252 /* We are setting a breakpoint in Thumb code that could potentially
4253 contain an IT block. The first step is to find how much Thumb
4254 code there is; we do not need to read outside of known Thumb
4255 sequences. */
4256 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4257 if (map_type == 0)
4258 /* Thumb-2 code must have mapping symbols to have a chance. */
4259 return bpaddr;
4260
4261 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4262
4263 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4264 && func_start > boundary)
4265 boundary = func_start;
4266
4267 /* Search for a candidate IT instruction. We have to do some fancy
4268 footwork to distinguish a real IT instruction from the second
4269 half of a 32-bit instruction, but there is no need for that if
4270 there's no candidate. */
4271 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4272 if (buf_len == 0)
4273 /* No room for an IT instruction. */
4274 return bpaddr;
4275
4276 buf = (gdb_byte *) xmalloc (buf_len);
4277 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4278 return bpaddr;
4279 any = 0;
4280 for (i = 0; i < buf_len; i += 2)
4281 {
4282 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4283 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4284 {
4285 any = 1;
4286 break;
4287 }
4288 }
4289
4290 if (any == 0)
4291 {
4292 xfree (buf);
4293 return bpaddr;
4294 }
4295
4296 /* OK, the code bytes before this instruction contain at least one
4297 halfword which resembles an IT instruction. We know that it's
4298 Thumb code, but there are still two possibilities. Either the
4299 halfword really is an IT instruction, or it is the second half of
4300 a 32-bit Thumb instruction. The only way we can tell is to
4301 scan forwards from a known instruction boundary. */
4302 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4303 {
4304 int definite;
4305
4306 /* There's a lot of code before this instruction. Start with an
4307 optimistic search; it's easy to recognize halfwords that can
4308 not be the start of a 32-bit instruction, and use that to
4309 lock on to the instruction boundaries. */
4310 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4311 if (buf == NULL)
4312 return bpaddr;
4313 buf_len = IT_SCAN_THRESHOLD;
4314
4315 definite = 0;
4316 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4317 {
4318 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4319 if (thumb_insn_size (inst1) == 2)
4320 {
4321 definite = 1;
4322 break;
4323 }
4324 }
4325
4326 /* At this point, if DEFINITE, BUF[I] is the first place we
4327 are sure that we know the instruction boundaries, and it is far
4328 enough from BPADDR that we could not miss an IT instruction
4329 affecting BPADDR. If ! DEFINITE, give up - start from a
4330 known boundary. */
4331 if (! definite)
4332 {
4333 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4334 bpaddr - boundary);
4335 if (buf == NULL)
4336 return bpaddr;
4337 buf_len = bpaddr - boundary;
4338 i = 0;
4339 }
4340 }
4341 else
4342 {
4343 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4344 if (buf == NULL)
4345 return bpaddr;
4346 buf_len = bpaddr - boundary;
4347 i = 0;
4348 }
4349
4350 /* Scan forwards. Find the last IT instruction before BPADDR. */
4351 last_it = -1;
4352 last_it_count = 0;
4353 while (i < buf_len)
4354 {
4355 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4356 last_it_count--;
4357 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4358 {
4359 last_it = i;
4360 if (inst1 & 0x0001)
4361 last_it_count = 4;
4362 else if (inst1 & 0x0002)
4363 last_it_count = 3;
4364 else if (inst1 & 0x0004)
4365 last_it_count = 2;
4366 else
4367 last_it_count = 1;
4368 }
4369 i += thumb_insn_size (inst1);
4370 }
4371
4372 xfree (buf);
4373
4374 if (last_it == -1)
4375 /* There wasn't really an IT instruction after all. */
4376 return bpaddr;
4377
4378 if (last_it_count < 1)
4379 /* It was too far away. */
4380 return bpaddr;
4381
4382 /* This really is a trouble spot. Move the breakpoint to the IT
4383 instruction. */
4384 return bpaddr - buf_len + last_it;
4385 }
4386
4387 /* ARM displaced stepping support.
4388
4389 Generally ARM displaced stepping works as follows:
4390
4391 1. When an instruction is to be single-stepped, it is first decoded by
4392 arm_process_displaced_insn. Depending on the type of instruction, it is
4393 then copied to a scratch location, possibly in a modified form. The
4394 copy_* set of functions performs such modification, as necessary. A
4395 breakpoint is placed after the modified instruction in the scratch space
4396 to return control to GDB. Note in particular that instructions which
4397 modify the PC will no longer do so after modification.
4398
4399 2. The instruction is single-stepped, by setting the PC to the scratch
4400 location address, and resuming. Control returns to GDB when the
4401 breakpoint is hit.
4402
4403 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4404 function used for the current instruction. This function's job is to
4405 put the CPU/memory state back to what it would have been if the
4406 instruction had been executed unmodified in its original location. */
4407
4408 /* NOP instruction (mov r0, r0). */
4409 #define ARM_NOP 0xe1a00000
4410 #define THUMB_NOP 0x4600
4411
4412 /* Helper for register reads for displaced stepping. In particular, this
4413 returns the PC as it would be seen by the instruction at its original
4414 location. */
4415
4416 ULONGEST
4417 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4418 int regno)
4419 {
4420 ULONGEST ret;
4421 CORE_ADDR from = dsc->insn_addr;
4422
4423 if (regno == ARM_PC_REGNUM)
4424 {
4425 /* Compute pipeline offset:
4426 - When executing an ARM instruction, PC reads as the address of the
4427 current instruction plus 8.
4428 - When executing a Thumb instruction, PC reads as the address of the
4429 current instruction plus 4. */
4430
4431 if (!dsc->is_thumb)
4432 from += 8;
4433 else
4434 from += 4;
4435
4436 if (debug_displaced)
4437 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4438 (unsigned long) from);
4439 return (ULONGEST) from;
4440 }
4441 else
4442 {
4443 regcache_cooked_read_unsigned (regs, regno, &ret);
4444 if (debug_displaced)
4445 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4446 regno, (unsigned long) ret);
4447 return ret;
4448 }
4449 }
4450
4451 static int
4452 displaced_in_arm_mode (struct regcache *regs)
4453 {
4454 ULONGEST ps;
4455 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4456
4457 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4458
4459 return (ps & t_bit) == 0;
4460 }
4461
4462 /* Write to the PC as from a branch instruction. */
4463
4464 static void
4465 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4466 ULONGEST val)
4467 {
4468 if (!dsc->is_thumb)
4469 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4470 architecture versions < 6. */
4471 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4472 val & ~(ULONGEST) 0x3);
4473 else
4474 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4475 val & ~(ULONGEST) 0x1);
4476 }
4477
4478 /* Write to the PC as from a branch-exchange instruction. */
4479
4480 static void
4481 bx_write_pc (struct regcache *regs, ULONGEST val)
4482 {
4483 ULONGEST ps;
4484 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4485
4486 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4487
4488 if ((val & 1) == 1)
4489 {
4490 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4491 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4492 }
4493 else if ((val & 2) == 0)
4494 {
4495 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4496 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4497 }
4498 else
4499 {
4500 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4501 mode, align dest to 4 bytes). */
4502 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4503 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4504 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4505 }
4506 }
4507
4508 /* Write to the PC as if from a load instruction. */
4509
4510 static void
4511 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4512 ULONGEST val)
4513 {
4514 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4515 bx_write_pc (regs, val);
4516 else
4517 branch_write_pc (regs, dsc, val);
4518 }
4519
4520 /* Write to the PC as if from an ALU instruction. */
4521
4522 static void
4523 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4524 ULONGEST val)
4525 {
4526 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4527 bx_write_pc (regs, val);
4528 else
4529 branch_write_pc (regs, dsc, val);
4530 }
4531
4532 /* Helper for writing to registers for displaced stepping. Writing to the PC
4533 has a varying effects depending on the instruction which does the write:
4534 this is controlled by the WRITE_PC argument. */
4535
4536 void
4537 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4538 int regno, ULONGEST val, enum pc_write_style write_pc)
4539 {
4540 if (regno == ARM_PC_REGNUM)
4541 {
4542 if (debug_displaced)
4543 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4544 (unsigned long) val);
4545 switch (write_pc)
4546 {
4547 case BRANCH_WRITE_PC:
4548 branch_write_pc (regs, dsc, val);
4549 break;
4550
4551 case BX_WRITE_PC:
4552 bx_write_pc (regs, val);
4553 break;
4554
4555 case LOAD_WRITE_PC:
4556 load_write_pc (regs, dsc, val);
4557 break;
4558
4559 case ALU_WRITE_PC:
4560 alu_write_pc (regs, dsc, val);
4561 break;
4562
4563 case CANNOT_WRITE_PC:
4564 warning (_("Instruction wrote to PC in an unexpected way when "
4565 "single-stepping"));
4566 break;
4567
4568 default:
4569 internal_error (__FILE__, __LINE__,
4570 _("Invalid argument to displaced_write_reg"));
4571 }
4572
4573 dsc->wrote_to_pc = 1;
4574 }
4575 else
4576 {
4577 if (debug_displaced)
4578 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4579 regno, (unsigned long) val);
4580 regcache_cooked_write_unsigned (regs, regno, val);
4581 }
4582 }
4583
4584 /* This function is used to concisely determine if an instruction INSN
4585 references PC. Register fields of interest in INSN should have the
4586 corresponding fields of BITMASK set to 0b1111. The function
4587 returns return 1 if any of these fields in INSN reference the PC
4588 (also 0b1111, r15), else it returns 0. */
4589
4590 static int
4591 insn_references_pc (uint32_t insn, uint32_t bitmask)
4592 {
4593 uint32_t lowbit = 1;
4594
4595 while (bitmask != 0)
4596 {
4597 uint32_t mask;
4598
4599 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4600 ;
4601
4602 if (!lowbit)
4603 break;
4604
4605 mask = lowbit * 0xf;
4606
4607 if ((insn & mask) == mask)
4608 return 1;
4609
4610 bitmask &= ~mask;
4611 }
4612
4613 return 0;
4614 }
4615
4616 /* The simplest copy function. Many instructions have the same effect no
4617 matter what address they are executed at: in those cases, use this. */
4618
4619 static int
4620 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4621 const char *iname, arm_displaced_step_closure *dsc)
4622 {
4623 if (debug_displaced)
4624 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4625 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4626 iname);
4627
4628 dsc->modinsn[0] = insn;
4629
4630 return 0;
4631 }
4632
4633 static int
4634 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4635 uint16_t insn2, const char *iname,
4636 arm_displaced_step_closure *dsc)
4637 {
4638 if (debug_displaced)
4639 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4640 "opcode/class '%s' unmodified\n", insn1, insn2,
4641 iname);
4642
4643 dsc->modinsn[0] = insn1;
4644 dsc->modinsn[1] = insn2;
4645 dsc->numinsns = 2;
4646
4647 return 0;
4648 }
4649
4650 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4651 modification. */
4652 static int
4653 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4654 const char *iname,
4655 arm_displaced_step_closure *dsc)
4656 {
4657 if (debug_displaced)
4658 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4659 "opcode/class '%s' unmodified\n", insn,
4660 iname);
4661
4662 dsc->modinsn[0] = insn;
4663
4664 return 0;
4665 }
4666
4667 /* Preload instructions with immediate offset. */
4668
4669 static void
4670 cleanup_preload (struct gdbarch *gdbarch,
4671 struct regcache *regs, arm_displaced_step_closure *dsc)
4672 {
4673 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4674 if (!dsc->u.preload.immed)
4675 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4676 }
4677
4678 static void
4679 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4680 arm_displaced_step_closure *dsc, unsigned int rn)
4681 {
4682 ULONGEST rn_val;
4683 /* Preload instructions:
4684
4685 {pli/pld} [rn, #+/-imm]
4686 ->
4687 {pli/pld} [r0, #+/-imm]. */
4688
4689 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4690 rn_val = displaced_read_reg (regs, dsc, rn);
4691 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4692 dsc->u.preload.immed = 1;
4693
4694 dsc->cleanup = &cleanup_preload;
4695 }
4696
4697 static int
4698 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4699 arm_displaced_step_closure *dsc)
4700 {
4701 unsigned int rn = bits (insn, 16, 19);
4702
4703 if (!insn_references_pc (insn, 0x000f0000ul))
4704 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4705
4706 if (debug_displaced)
4707 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4708 (unsigned long) insn);
4709
4710 dsc->modinsn[0] = insn & 0xfff0ffff;
4711
4712 install_preload (gdbarch, regs, dsc, rn);
4713
4714 return 0;
4715 }
4716
4717 static int
4718 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4719 struct regcache *regs, arm_displaced_step_closure *dsc)
4720 {
4721 unsigned int rn = bits (insn1, 0, 3);
4722 unsigned int u_bit = bit (insn1, 7);
4723 int imm12 = bits (insn2, 0, 11);
4724 ULONGEST pc_val;
4725
4726 if (rn != ARM_PC_REGNUM)
4727 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4728
4729 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4730 PLD (literal) Encoding T1. */
4731 if (debug_displaced)
4732 fprintf_unfiltered (gdb_stdlog,
4733 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4734 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4735 imm12);
4736
4737 if (!u_bit)
4738 imm12 = -1 * imm12;
4739
4740 /* Rewrite instruction {pli/pld} PC imm12 into:
4741 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4742
4743 {pli/pld} [r0, r1]
4744
4745 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4746
4747 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4748 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4749
4750 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4751
4752 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4753 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4754 dsc->u.preload.immed = 0;
4755
4756 /* {pli/pld} [r0, r1] */
4757 dsc->modinsn[0] = insn1 & 0xfff0;
4758 dsc->modinsn[1] = 0xf001;
4759 dsc->numinsns = 2;
4760
4761 dsc->cleanup = &cleanup_preload;
4762 return 0;
4763 }
4764
4765 /* Preload instructions with register offset. */
4766
4767 static void
4768 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4769 arm_displaced_step_closure *dsc, unsigned int rn,
4770 unsigned int rm)
4771 {
4772 ULONGEST rn_val, rm_val;
4773
4774 /* Preload register-offset instructions:
4775
4776 {pli/pld} [rn, rm {, shift}]
4777 ->
4778 {pli/pld} [r0, r1 {, shift}]. */
4779
4780 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4781 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4782 rn_val = displaced_read_reg (regs, dsc, rn);
4783 rm_val = displaced_read_reg (regs, dsc, rm);
4784 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4785 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4786 dsc->u.preload.immed = 0;
4787
4788 dsc->cleanup = &cleanup_preload;
4789 }
4790
4791 static int
4792 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4793 struct regcache *regs,
4794 arm_displaced_step_closure *dsc)
4795 {
4796 unsigned int rn = bits (insn, 16, 19);
4797 unsigned int rm = bits (insn, 0, 3);
4798
4799
4800 if (!insn_references_pc (insn, 0x000f000ful))
4801 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4802
4803 if (debug_displaced)
4804 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4805 (unsigned long) insn);
4806
4807 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4808
4809 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4810 return 0;
4811 }
4812
4813 /* Copy/cleanup coprocessor load and store instructions. */
4814
4815 static void
4816 cleanup_copro_load_store (struct gdbarch *gdbarch,
4817 struct regcache *regs,
4818 arm_displaced_step_closure *dsc)
4819 {
4820 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4821
4822 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4823
4824 if (dsc->u.ldst.writeback)
4825 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4826 }
4827
4828 static void
4829 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4830 arm_displaced_step_closure *dsc,
4831 int writeback, unsigned int rn)
4832 {
4833 ULONGEST rn_val;
4834
4835 /* Coprocessor load/store instructions:
4836
4837 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4838 ->
4839 {stc/stc2} [r0, #+/-imm].
4840
4841 ldc/ldc2 are handled identically. */
4842
4843 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4844 rn_val = displaced_read_reg (regs, dsc, rn);
4845 /* PC should be 4-byte aligned. */
4846 rn_val = rn_val & 0xfffffffc;
4847 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4848
4849 dsc->u.ldst.writeback = writeback;
4850 dsc->u.ldst.rn = rn;
4851
4852 dsc->cleanup = &cleanup_copro_load_store;
4853 }
4854
4855 static int
4856 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4857 struct regcache *regs,
4858 arm_displaced_step_closure *dsc)
4859 {
4860 unsigned int rn = bits (insn, 16, 19);
4861
4862 if (!insn_references_pc (insn, 0x000f0000ul))
4863 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4864
4865 if (debug_displaced)
4866 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4867 "load/store insn %.8lx\n", (unsigned long) insn);
4868
4869 dsc->modinsn[0] = insn & 0xfff0ffff;
4870
4871 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4872
4873 return 0;
4874 }
4875
4876 static int
4877 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4878 uint16_t insn2, struct regcache *regs,
4879 arm_displaced_step_closure *dsc)
4880 {
4881 unsigned int rn = bits (insn1, 0, 3);
4882
4883 if (rn != ARM_PC_REGNUM)
4884 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4885 "copro load/store", dsc);
4886
4887 if (debug_displaced)
4888 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4889 "load/store insn %.4x%.4x\n", insn1, insn2);
4890
4891 dsc->modinsn[0] = insn1 & 0xfff0;
4892 dsc->modinsn[1] = insn2;
4893 dsc->numinsns = 2;
4894
4895 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4896 doesn't support writeback, so pass 0. */
4897 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4898
4899 return 0;
4900 }
4901
4902 /* Clean up branch instructions (actually perform the branch, by setting
4903 PC). */
4904
4905 static void
4906 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4907 arm_displaced_step_closure *dsc)
4908 {
4909 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4910 int branch_taken = condition_true (dsc->u.branch.cond, status);
4911 enum pc_write_style write_pc = dsc->u.branch.exchange
4912 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4913
4914 if (!branch_taken)
4915 return;
4916
4917 if (dsc->u.branch.link)
4918 {
4919 /* The value of LR should be the next insn of current one. In order
4920 not to confuse logic hanlding later insn `bx lr', if current insn mode
4921 is Thumb, the bit 0 of LR value should be set to 1. */
4922 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4923
4924 if (dsc->is_thumb)
4925 next_insn_addr |= 0x1;
4926
4927 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4928 CANNOT_WRITE_PC);
4929 }
4930
4931 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4932 }
4933
4934 /* Copy B/BL/BLX instructions with immediate destinations. */
4935
4936 static void
4937 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4938 arm_displaced_step_closure *dsc,
4939 unsigned int cond, int exchange, int link, long offset)
4940 {
4941 /* Implement "BL<cond> <label>" as:
4942
4943 Preparation: cond <- instruction condition
4944 Insn: mov r0, r0 (nop)
4945 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4946
4947 B<cond> similar, but don't set r14 in cleanup. */
4948
4949 dsc->u.branch.cond = cond;
4950 dsc->u.branch.link = link;
4951 dsc->u.branch.exchange = exchange;
4952
4953 dsc->u.branch.dest = dsc->insn_addr;
4954 if (link && exchange)
4955 /* For BLX, offset is computed from the Align (PC, 4). */
4956 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4957
4958 if (dsc->is_thumb)
4959 dsc->u.branch.dest += 4 + offset;
4960 else
4961 dsc->u.branch.dest += 8 + offset;
4962
4963 dsc->cleanup = &cleanup_branch;
4964 }
4965 static int
4966 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4967 struct regcache *regs, arm_displaced_step_closure *dsc)
4968 {
4969 unsigned int cond = bits (insn, 28, 31);
4970 int exchange = (cond == 0xf);
4971 int link = exchange || bit (insn, 24);
4972 long offset;
4973
4974 if (debug_displaced)
4975 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4976 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4977 (unsigned long) insn);
4978 if (exchange)
4979 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4980 then arrange the switch into Thumb mode. */
4981 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4982 else
4983 offset = bits (insn, 0, 23) << 2;
4984
4985 if (bit (offset, 25))
4986 offset = offset | ~0x3ffffff;
4987
4988 dsc->modinsn[0] = ARM_NOP;
4989
4990 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4991 return 0;
4992 }
4993
4994 static int
4995 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4996 uint16_t insn2, struct regcache *regs,
4997 arm_displaced_step_closure *dsc)
4998 {
4999 int link = bit (insn2, 14);
5000 int exchange = link && !bit (insn2, 12);
5001 int cond = INST_AL;
5002 long offset = 0;
5003 int j1 = bit (insn2, 13);
5004 int j2 = bit (insn2, 11);
5005 int s = sbits (insn1, 10, 10);
5006 int i1 = !(j1 ^ bit (insn1, 10));
5007 int i2 = !(j2 ^ bit (insn1, 10));
5008
5009 if (!link && !exchange) /* B */
5010 {
5011 offset = (bits (insn2, 0, 10) << 1);
5012 if (bit (insn2, 12)) /* Encoding T4 */
5013 {
5014 offset |= (bits (insn1, 0, 9) << 12)
5015 | (i2 << 22)
5016 | (i1 << 23)
5017 | (s << 24);
5018 cond = INST_AL;
5019 }
5020 else /* Encoding T3 */
5021 {
5022 offset |= (bits (insn1, 0, 5) << 12)
5023 | (j1 << 18)
5024 | (j2 << 19)
5025 | (s << 20);
5026 cond = bits (insn1, 6, 9);
5027 }
5028 }
5029 else
5030 {
5031 offset = (bits (insn1, 0, 9) << 12);
5032 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5033 offset |= exchange ?
5034 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5035 }
5036
5037 if (debug_displaced)
5038 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5039 "%.4x %.4x with offset %.8lx\n",
5040 link ? (exchange) ? "blx" : "bl" : "b",
5041 insn1, insn2, offset);
5042
5043 dsc->modinsn[0] = THUMB_NOP;
5044
5045 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5046 return 0;
5047 }
5048
5049 /* Copy B Thumb instructions. */
5050 static int
5051 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5052 arm_displaced_step_closure *dsc)
5053 {
5054 unsigned int cond = 0;
5055 int offset = 0;
5056 unsigned short bit_12_15 = bits (insn, 12, 15);
5057 CORE_ADDR from = dsc->insn_addr;
5058
5059 if (bit_12_15 == 0xd)
5060 {
5061 /* offset = SignExtend (imm8:0, 32) */
5062 offset = sbits ((insn << 1), 0, 8);
5063 cond = bits (insn, 8, 11);
5064 }
5065 else if (bit_12_15 == 0xe) /* Encoding T2 */
5066 {
5067 offset = sbits ((insn << 1), 0, 11);
5068 cond = INST_AL;
5069 }
5070
5071 if (debug_displaced)
5072 fprintf_unfiltered (gdb_stdlog,
5073 "displaced: copying b immediate insn %.4x "
5074 "with offset %d\n", insn, offset);
5075
5076 dsc->u.branch.cond = cond;
5077 dsc->u.branch.link = 0;
5078 dsc->u.branch.exchange = 0;
5079 dsc->u.branch.dest = from + 4 + offset;
5080
5081 dsc->modinsn[0] = THUMB_NOP;
5082
5083 dsc->cleanup = &cleanup_branch;
5084
5085 return 0;
5086 }
5087
5088 /* Copy BX/BLX with register-specified destinations. */
5089
5090 static void
5091 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5092 arm_displaced_step_closure *dsc, int link,
5093 unsigned int cond, unsigned int rm)
5094 {
5095 /* Implement {BX,BLX}<cond> <reg>" as:
5096
5097 Preparation: cond <- instruction condition
5098 Insn: mov r0, r0 (nop)
5099 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5100
5101 Don't set r14 in cleanup for BX. */
5102
5103 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5104
5105 dsc->u.branch.cond = cond;
5106 dsc->u.branch.link = link;
5107
5108 dsc->u.branch.exchange = 1;
5109
5110 dsc->cleanup = &cleanup_branch;
5111 }
5112
5113 static int
5114 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5115 struct regcache *regs, arm_displaced_step_closure *dsc)
5116 {
5117 unsigned int cond = bits (insn, 28, 31);
5118 /* BX: x12xxx1x
5119 BLX: x12xxx3x. */
5120 int link = bit (insn, 5);
5121 unsigned int rm = bits (insn, 0, 3);
5122
5123 if (debug_displaced)
5124 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5125 (unsigned long) insn);
5126
5127 dsc->modinsn[0] = ARM_NOP;
5128
5129 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5130 return 0;
5131 }
5132
5133 static int
5134 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5135 struct regcache *regs,
5136 arm_displaced_step_closure *dsc)
5137 {
5138 int link = bit (insn, 7);
5139 unsigned int rm = bits (insn, 3, 6);
5140
5141 if (debug_displaced)
5142 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5143 (unsigned short) insn);
5144
5145 dsc->modinsn[0] = THUMB_NOP;
5146
5147 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5148
5149 return 0;
5150 }
5151
5152
5153 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5154
5155 static void
5156 cleanup_alu_imm (struct gdbarch *gdbarch,
5157 struct regcache *regs, arm_displaced_step_closure *dsc)
5158 {
5159 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5160 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5161 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5162 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5163 }
5164
5165 static int
5166 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5167 arm_displaced_step_closure *dsc)
5168 {
5169 unsigned int rn = bits (insn, 16, 19);
5170 unsigned int rd = bits (insn, 12, 15);
5171 unsigned int op = bits (insn, 21, 24);
5172 int is_mov = (op == 0xd);
5173 ULONGEST rd_val, rn_val;
5174
5175 if (!insn_references_pc (insn, 0x000ff000ul))
5176 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5177
5178 if (debug_displaced)
5179 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5180 "%.8lx\n", is_mov ? "move" : "ALU",
5181 (unsigned long) insn);
5182
5183 /* Instruction is of form:
5184
5185 <op><cond> rd, [rn,] #imm
5186
5187 Rewrite as:
5188
5189 Preparation: tmp1, tmp2 <- r0, r1;
5190 r0, r1 <- rd, rn
5191 Insn: <op><cond> r0, r1, #imm
5192 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5193 */
5194
5195 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5196 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5197 rn_val = displaced_read_reg (regs, dsc, rn);
5198 rd_val = displaced_read_reg (regs, dsc, rd);
5199 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5200 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5201 dsc->rd = rd;
5202
5203 if (is_mov)
5204 dsc->modinsn[0] = insn & 0xfff00fff;
5205 else
5206 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5207
5208 dsc->cleanup = &cleanup_alu_imm;
5209
5210 return 0;
5211 }
5212
5213 static int
5214 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5215 uint16_t insn2, struct regcache *regs,
5216 arm_displaced_step_closure *dsc)
5217 {
5218 unsigned int op = bits (insn1, 5, 8);
5219 unsigned int rn, rm, rd;
5220 ULONGEST rd_val, rn_val;
5221
5222 rn = bits (insn1, 0, 3); /* Rn */
5223 rm = bits (insn2, 0, 3); /* Rm */
5224 rd = bits (insn2, 8, 11); /* Rd */
5225
5226 /* This routine is only called for instruction MOV. */
5227 gdb_assert (op == 0x2 && rn == 0xf);
5228
5229 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5230 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5231
5232 if (debug_displaced)
5233 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5234 "ALU", insn1, insn2);
5235
5236 /* Instruction is of form:
5237
5238 <op><cond> rd, [rn,] #imm
5239
5240 Rewrite as:
5241
5242 Preparation: tmp1, tmp2 <- r0, r1;
5243 r0, r1 <- rd, rn
5244 Insn: <op><cond> r0, r1, #imm
5245 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5246 */
5247
5248 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5249 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5250 rn_val = displaced_read_reg (regs, dsc, rn);
5251 rd_val = displaced_read_reg (regs, dsc, rd);
5252 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5253 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5254 dsc->rd = rd;
5255
5256 dsc->modinsn[0] = insn1;
5257 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5258 dsc->numinsns = 2;
5259
5260 dsc->cleanup = &cleanup_alu_imm;
5261
5262 return 0;
5263 }
5264
5265 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5266
5267 static void
5268 cleanup_alu_reg (struct gdbarch *gdbarch,
5269 struct regcache *regs, arm_displaced_step_closure *dsc)
5270 {
5271 ULONGEST rd_val;
5272 int i;
5273
5274 rd_val = displaced_read_reg (regs, dsc, 0);
5275
5276 for (i = 0; i < 3; i++)
5277 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5278
5279 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5280 }
5281
5282 static void
5283 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5284 arm_displaced_step_closure *dsc,
5285 unsigned int rd, unsigned int rn, unsigned int rm)
5286 {
5287 ULONGEST rd_val, rn_val, rm_val;
5288
5289 /* Instruction is of form:
5290
5291 <op><cond> rd, [rn,] rm [, <shift>]
5292
5293 Rewrite as:
5294
5295 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5296 r0, r1, r2 <- rd, rn, rm
5297 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5298 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5299 */
5300
5301 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5302 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5303 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5304 rd_val = displaced_read_reg (regs, dsc, rd);
5305 rn_val = displaced_read_reg (regs, dsc, rn);
5306 rm_val = displaced_read_reg (regs, dsc, rm);
5307 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5308 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5309 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5310 dsc->rd = rd;
5311
5312 dsc->cleanup = &cleanup_alu_reg;
5313 }
5314
5315 static int
5316 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5317 arm_displaced_step_closure *dsc)
5318 {
5319 unsigned int op = bits (insn, 21, 24);
5320 int is_mov = (op == 0xd);
5321
5322 if (!insn_references_pc (insn, 0x000ff00ful))
5323 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5324
5325 if (debug_displaced)
5326 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5327 is_mov ? "move" : "ALU", (unsigned long) insn);
5328
5329 if (is_mov)
5330 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5331 else
5332 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5333
5334 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5335 bits (insn, 0, 3));
5336 return 0;
5337 }
5338
5339 static int
5340 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5341 struct regcache *regs,
5342 arm_displaced_step_closure *dsc)
5343 {
5344 unsigned rm, rd;
5345
5346 rm = bits (insn, 3, 6);
5347 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5348
5349 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5350 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5351
5352 if (debug_displaced)
5353 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5354 (unsigned short) insn);
5355
5356 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5357
5358 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5359
5360 return 0;
5361 }
5362
5363 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5364
5365 static void
5366 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5367 struct regcache *regs,
5368 arm_displaced_step_closure *dsc)
5369 {
5370 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5371 int i;
5372
5373 for (i = 0; i < 4; i++)
5374 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5375
5376 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5377 }
5378
5379 static void
5380 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5381 arm_displaced_step_closure *dsc,
5382 unsigned int rd, unsigned int rn, unsigned int rm,
5383 unsigned rs)
5384 {
5385 int i;
5386 ULONGEST rd_val, rn_val, rm_val, rs_val;
5387
5388 /* Instruction is of form:
5389
5390 <op><cond> rd, [rn,] rm, <shift> rs
5391
5392 Rewrite as:
5393
5394 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5395 r0, r1, r2, r3 <- rd, rn, rm, rs
5396 Insn: <op><cond> r0, r1, r2, <shift> r3
5397 Cleanup: tmp5 <- r0
5398 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5399 rd <- tmp5
5400 */
5401
5402 for (i = 0; i < 4; i++)
5403 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5404
5405 rd_val = displaced_read_reg (regs, dsc, rd);
5406 rn_val = displaced_read_reg (regs, dsc, rn);
5407 rm_val = displaced_read_reg (regs, dsc, rm);
5408 rs_val = displaced_read_reg (regs, dsc, rs);
5409 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5410 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5411 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5412 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5413 dsc->rd = rd;
5414 dsc->cleanup = &cleanup_alu_shifted_reg;
5415 }
5416
5417 static int
5418 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5419 struct regcache *regs,
5420 arm_displaced_step_closure *dsc)
5421 {
5422 unsigned int op = bits (insn, 21, 24);
5423 int is_mov = (op == 0xd);
5424 unsigned int rd, rn, rm, rs;
5425
5426 if (!insn_references_pc (insn, 0x000fff0ful))
5427 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5428
5429 if (debug_displaced)
5430 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5431 "%.8lx\n", is_mov ? "move" : "ALU",
5432 (unsigned long) insn);
5433
5434 rn = bits (insn, 16, 19);
5435 rm = bits (insn, 0, 3);
5436 rs = bits (insn, 8, 11);
5437 rd = bits (insn, 12, 15);
5438
5439 if (is_mov)
5440 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5441 else
5442 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5443
5444 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5445
5446 return 0;
5447 }
5448
5449 /* Clean up load instructions. */
5450
5451 static void
5452 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5453 arm_displaced_step_closure *dsc)
5454 {
5455 ULONGEST rt_val, rt_val2 = 0, rn_val;
5456
5457 rt_val = displaced_read_reg (regs, dsc, 0);
5458 if (dsc->u.ldst.xfersize == 8)
5459 rt_val2 = displaced_read_reg (regs, dsc, 1);
5460 rn_val = displaced_read_reg (regs, dsc, 2);
5461
5462 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5463 if (dsc->u.ldst.xfersize > 4)
5464 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5465 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5466 if (!dsc->u.ldst.immed)
5467 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5468
5469 /* Handle register writeback. */
5470 if (dsc->u.ldst.writeback)
5471 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5472 /* Put result in right place. */
5473 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5474 if (dsc->u.ldst.xfersize == 8)
5475 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5476 }
5477
5478 /* Clean up store instructions. */
5479
5480 static void
5481 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5482 arm_displaced_step_closure *dsc)
5483 {
5484 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5485
5486 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5487 if (dsc->u.ldst.xfersize > 4)
5488 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5489 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5490 if (!dsc->u.ldst.immed)
5491 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5492 if (!dsc->u.ldst.restore_r4)
5493 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5494
5495 /* Writeback. */
5496 if (dsc->u.ldst.writeback)
5497 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5498 }
5499
5500 /* Copy "extra" load/store instructions. These are halfword/doubleword
5501 transfers, which have a different encoding to byte/word transfers. */
5502
5503 static int
5504 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5505 struct regcache *regs, arm_displaced_step_closure *dsc)
5506 {
5507 unsigned int op1 = bits (insn, 20, 24);
5508 unsigned int op2 = bits (insn, 5, 6);
5509 unsigned int rt = bits (insn, 12, 15);
5510 unsigned int rn = bits (insn, 16, 19);
5511 unsigned int rm = bits (insn, 0, 3);
5512 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5513 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5514 int immed = (op1 & 0x4) != 0;
5515 int opcode;
5516 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5517
5518 if (!insn_references_pc (insn, 0x000ff00ful))
5519 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5520
5521 if (debug_displaced)
5522 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5523 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5524 (unsigned long) insn);
5525
5526 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5527
5528 if (opcode < 0)
5529 internal_error (__FILE__, __LINE__,
5530 _("copy_extra_ld_st: instruction decode error"));
5531
5532 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5533 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5534 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5535 if (!immed)
5536 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5537
5538 rt_val = displaced_read_reg (regs, dsc, rt);
5539 if (bytesize[opcode] == 8)
5540 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5541 rn_val = displaced_read_reg (regs, dsc, rn);
5542 if (!immed)
5543 rm_val = displaced_read_reg (regs, dsc, rm);
5544
5545 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5546 if (bytesize[opcode] == 8)
5547 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5548 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5549 if (!immed)
5550 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5551
5552 dsc->rd = rt;
5553 dsc->u.ldst.xfersize = bytesize[opcode];
5554 dsc->u.ldst.rn = rn;
5555 dsc->u.ldst.immed = immed;
5556 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5557 dsc->u.ldst.restore_r4 = 0;
5558
5559 if (immed)
5560 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5561 ->
5562 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5563 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5564 else
5565 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5566 ->
5567 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5568 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5569
5570 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5571
5572 return 0;
5573 }
5574
5575 /* Copy byte/half word/word loads and stores. */
5576
5577 static void
5578 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5579 arm_displaced_step_closure *dsc, int load,
5580 int immed, int writeback, int size, int usermode,
5581 int rt, int rm, int rn)
5582 {
5583 ULONGEST rt_val, rn_val, rm_val = 0;
5584
5585 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5586 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5587 if (!immed)
5588 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5589 if (!load)
5590 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5591
5592 rt_val = displaced_read_reg (regs, dsc, rt);
5593 rn_val = displaced_read_reg (regs, dsc, rn);
5594 if (!immed)
5595 rm_val = displaced_read_reg (regs, dsc, rm);
5596
5597 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5598 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5599 if (!immed)
5600 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5601 dsc->rd = rt;
5602 dsc->u.ldst.xfersize = size;
5603 dsc->u.ldst.rn = rn;
5604 dsc->u.ldst.immed = immed;
5605 dsc->u.ldst.writeback = writeback;
5606
5607 /* To write PC we can do:
5608
5609 Before this sequence of instructions:
5610 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5611 r2 is the Rn value got from dispalced_read_reg.
5612
5613 Insn1: push {pc} Write address of STR instruction + offset on stack
5614 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5615 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5616 = addr(Insn1) + offset - addr(Insn3) - 8
5617 = offset - 16
5618 Insn4: add r4, r4, #8 r4 = offset - 8
5619 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5620 = from + offset
5621 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5622
5623 Otherwise we don't know what value to write for PC, since the offset is
5624 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5625 of this can be found in Section "Saving from r15" in
5626 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5627
5628 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5629 }
5630
5631
5632 static int
5633 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5634 uint16_t insn2, struct regcache *regs,
5635 arm_displaced_step_closure *dsc, int size)
5636 {
5637 unsigned int u_bit = bit (insn1, 7);
5638 unsigned int rt = bits (insn2, 12, 15);
5639 int imm12 = bits (insn2, 0, 11);
5640 ULONGEST pc_val;
5641
5642 if (debug_displaced)
5643 fprintf_unfiltered (gdb_stdlog,
5644 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5645 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5646 imm12);
5647
5648 if (!u_bit)
5649 imm12 = -1 * imm12;
5650
5651 /* Rewrite instruction LDR Rt imm12 into:
5652
5653 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5654
5655 LDR R0, R2, R3,
5656
5657 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5658
5659
5660 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5661 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5662 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5663
5664 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5665
5666 pc_val = pc_val & 0xfffffffc;
5667
5668 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5669 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5670
5671 dsc->rd = rt;
5672
5673 dsc->u.ldst.xfersize = size;
5674 dsc->u.ldst.immed = 0;
5675 dsc->u.ldst.writeback = 0;
5676 dsc->u.ldst.restore_r4 = 0;
5677
5678 /* LDR R0, R2, R3 */
5679 dsc->modinsn[0] = 0xf852;
5680 dsc->modinsn[1] = 0x3;
5681 dsc->numinsns = 2;
5682
5683 dsc->cleanup = &cleanup_load;
5684
5685 return 0;
5686 }
5687
5688 static int
5689 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5690 uint16_t insn2, struct regcache *regs,
5691 arm_displaced_step_closure *dsc,
5692 int writeback, int immed)
5693 {
5694 unsigned int rt = bits (insn2, 12, 15);
5695 unsigned int rn = bits (insn1, 0, 3);
5696 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5697 /* In LDR (register), there is also a register Rm, which is not allowed to
5698 be PC, so we don't have to check it. */
5699
5700 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5701 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5702 dsc);
5703
5704 if (debug_displaced)
5705 fprintf_unfiltered (gdb_stdlog,
5706 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5707 rt, rn, insn1, insn2);
5708
5709 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5710 0, rt, rm, rn);
5711
5712 dsc->u.ldst.restore_r4 = 0;
5713
5714 if (immed)
5715 /* ldr[b]<cond> rt, [rn, #imm], etc.
5716 ->
5717 ldr[b]<cond> r0, [r2, #imm]. */
5718 {
5719 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5720 dsc->modinsn[1] = insn2 & 0x0fff;
5721 }
5722 else
5723 /* ldr[b]<cond> rt, [rn, rm], etc.
5724 ->
5725 ldr[b]<cond> r0, [r2, r3]. */
5726 {
5727 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5728 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5729 }
5730
5731 dsc->numinsns = 2;
5732
5733 return 0;
5734 }
5735
5736
5737 static int
5738 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5739 struct regcache *regs,
5740 arm_displaced_step_closure *dsc,
5741 int load, int size, int usermode)
5742 {
5743 int immed = !bit (insn, 25);
5744 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5745 unsigned int rt = bits (insn, 12, 15);
5746 unsigned int rn = bits (insn, 16, 19);
5747 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5748
5749 if (!insn_references_pc (insn, 0x000ff00ful))
5750 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5751
5752 if (debug_displaced)
5753 fprintf_unfiltered (gdb_stdlog,
5754 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5755 load ? (size == 1 ? "ldrb" : "ldr")
5756 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5757 rt, rn,
5758 (unsigned long) insn);
5759
5760 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5761 usermode, rt, rm, rn);
5762
5763 if (load || rt != ARM_PC_REGNUM)
5764 {
5765 dsc->u.ldst.restore_r4 = 0;
5766
5767 if (immed)
5768 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5769 ->
5770 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5771 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5772 else
5773 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5774 ->
5775 {ldr,str}[b]<cond> r0, [r2, r3]. */
5776 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5777 }
5778 else
5779 {
5780 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5781 dsc->u.ldst.restore_r4 = 1;
5782 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5783 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5784 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5785 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5786 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5787
5788 /* As above. */
5789 if (immed)
5790 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5791 else
5792 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5793
5794 dsc->numinsns = 6;
5795 }
5796
5797 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5798
5799 return 0;
5800 }
5801
5802 /* Cleanup LDM instructions with fully-populated register list. This is an
5803 unfortunate corner case: it's impossible to implement correctly by modifying
5804 the instruction. The issue is as follows: we have an instruction,
5805
5806 ldm rN, {r0-r15}
5807
5808 which we must rewrite to avoid loading PC. A possible solution would be to
5809 do the load in two halves, something like (with suitable cleanup
5810 afterwards):
5811
5812 mov r8, rN
5813 ldm[id][ab] r8!, {r0-r7}
5814 str r7, <temp>
5815 ldm[id][ab] r8, {r7-r14}
5816 <bkpt>
5817
5818 but at present there's no suitable place for <temp>, since the scratch space
5819 is overwritten before the cleanup routine is called. For now, we simply
5820 emulate the instruction. */
5821
5822 static void
5823 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5824 arm_displaced_step_closure *dsc)
5825 {
5826 int inc = dsc->u.block.increment;
5827 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5828 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5829 uint32_t regmask = dsc->u.block.regmask;
5830 int regno = inc ? 0 : 15;
5831 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5832 int exception_return = dsc->u.block.load && dsc->u.block.user
5833 && (regmask & 0x8000) != 0;
5834 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5835 int do_transfer = condition_true (dsc->u.block.cond, status);
5836 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5837
5838 if (!do_transfer)
5839 return;
5840
5841 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5842 sensible we can do here. Complain loudly. */
5843 if (exception_return)
5844 error (_("Cannot single-step exception return"));
5845
5846 /* We don't handle any stores here for now. */
5847 gdb_assert (dsc->u.block.load != 0);
5848
5849 if (debug_displaced)
5850 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5851 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5852 dsc->u.block.increment ? "inc" : "dec",
5853 dsc->u.block.before ? "before" : "after");
5854
5855 while (regmask)
5856 {
5857 uint32_t memword;
5858
5859 if (inc)
5860 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5861 regno++;
5862 else
5863 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5864 regno--;
5865
5866 xfer_addr += bump_before;
5867
5868 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5869 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5870
5871 xfer_addr += bump_after;
5872
5873 regmask &= ~(1 << regno);
5874 }
5875
5876 if (dsc->u.block.writeback)
5877 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5878 CANNOT_WRITE_PC);
5879 }
5880
5881 /* Clean up an STM which included the PC in the register list. */
5882
5883 static void
5884 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5885 arm_displaced_step_closure *dsc)
5886 {
5887 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5888 int store_executed = condition_true (dsc->u.block.cond, status);
5889 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5890 CORE_ADDR stm_insn_addr;
5891 uint32_t pc_val;
5892 long offset;
5893 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5894
5895 /* If condition code fails, there's nothing else to do. */
5896 if (!store_executed)
5897 return;
5898
5899 if (dsc->u.block.increment)
5900 {
5901 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5902
5903 if (dsc->u.block.before)
5904 pc_stored_at += 4;
5905 }
5906 else
5907 {
5908 pc_stored_at = dsc->u.block.xfer_addr;
5909
5910 if (dsc->u.block.before)
5911 pc_stored_at -= 4;
5912 }
5913
5914 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5915 stm_insn_addr = dsc->scratch_base;
5916 offset = pc_val - stm_insn_addr;
5917
5918 if (debug_displaced)
5919 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5920 "STM instruction\n", offset);
5921
5922 /* Rewrite the stored PC to the proper value for the non-displaced original
5923 instruction. */
5924 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5925 dsc->insn_addr + offset);
5926 }
5927
5928 /* Clean up an LDM which includes the PC in the register list. We clumped all
5929 the registers in the transferred list into a contiguous range r0...rX (to
5930 avoid loading PC directly and losing control of the debugged program), so we
5931 must undo that here. */
5932
5933 static void
5934 cleanup_block_load_pc (struct gdbarch *gdbarch,
5935 struct regcache *regs,
5936 arm_displaced_step_closure *dsc)
5937 {
5938 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5939 int load_executed = condition_true (dsc->u.block.cond, status);
5940 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5941 unsigned int regs_loaded = bitcount (mask);
5942 unsigned int num_to_shuffle = regs_loaded, clobbered;
5943
5944 /* The method employed here will fail if the register list is fully populated
5945 (we need to avoid loading PC directly). */
5946 gdb_assert (num_to_shuffle < 16);
5947
5948 if (!load_executed)
5949 return;
5950
5951 clobbered = (1 << num_to_shuffle) - 1;
5952
5953 while (num_to_shuffle > 0)
5954 {
5955 if ((mask & (1 << write_reg)) != 0)
5956 {
5957 unsigned int read_reg = num_to_shuffle - 1;
5958
5959 if (read_reg != write_reg)
5960 {
5961 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5962 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5963 if (debug_displaced)
5964 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5965 "loaded register r%d to r%d\n"), read_reg,
5966 write_reg);
5967 }
5968 else if (debug_displaced)
5969 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5970 "r%d already in the right place\n"),
5971 write_reg);
5972
5973 clobbered &= ~(1 << write_reg);
5974
5975 num_to_shuffle--;
5976 }
5977
5978 write_reg--;
5979 }
5980
5981 /* Restore any registers we scribbled over. */
5982 for (write_reg = 0; clobbered != 0; write_reg++)
5983 {
5984 if ((clobbered & (1 << write_reg)) != 0)
5985 {
5986 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5987 CANNOT_WRITE_PC);
5988 if (debug_displaced)
5989 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5990 "clobbered register r%d\n"), write_reg);
5991 clobbered &= ~(1 << write_reg);
5992 }
5993 }
5994
5995 /* Perform register writeback manually. */
5996 if (dsc->u.block.writeback)
5997 {
5998 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5999
6000 if (dsc->u.block.increment)
6001 new_rn_val += regs_loaded * 4;
6002 else
6003 new_rn_val -= regs_loaded * 4;
6004
6005 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6006 CANNOT_WRITE_PC);
6007 }
6008 }
6009
6010 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6011 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6012
6013 static int
6014 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6015 struct regcache *regs,
6016 arm_displaced_step_closure *dsc)
6017 {
6018 int load = bit (insn, 20);
6019 int user = bit (insn, 22);
6020 int increment = bit (insn, 23);
6021 int before = bit (insn, 24);
6022 int writeback = bit (insn, 21);
6023 int rn = bits (insn, 16, 19);
6024
6025 /* Block transfers which don't mention PC can be run directly
6026 out-of-line. */
6027 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6028 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6029
6030 if (rn == ARM_PC_REGNUM)
6031 {
6032 warning (_("displaced: Unpredictable LDM or STM with "
6033 "base register r15"));
6034 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6035 }
6036
6037 if (debug_displaced)
6038 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6039 "%.8lx\n", (unsigned long) insn);
6040
6041 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6042 dsc->u.block.rn = rn;
6043
6044 dsc->u.block.load = load;
6045 dsc->u.block.user = user;
6046 dsc->u.block.increment = increment;
6047 dsc->u.block.before = before;
6048 dsc->u.block.writeback = writeback;
6049 dsc->u.block.cond = bits (insn, 28, 31);
6050
6051 dsc->u.block.regmask = insn & 0xffff;
6052
6053 if (load)
6054 {
6055 if ((insn & 0xffff) == 0xffff)
6056 {
6057 /* LDM with a fully-populated register list. This case is
6058 particularly tricky. Implement for now by fully emulating the
6059 instruction (which might not behave perfectly in all cases, but
6060 these instructions should be rare enough for that not to matter
6061 too much). */
6062 dsc->modinsn[0] = ARM_NOP;
6063
6064 dsc->cleanup = &cleanup_block_load_all;
6065 }
6066 else
6067 {
6068 /* LDM of a list of registers which includes PC. Implement by
6069 rewriting the list of registers to be transferred into a
6070 contiguous chunk r0...rX before doing the transfer, then shuffling
6071 registers into the correct places in the cleanup routine. */
6072 unsigned int regmask = insn & 0xffff;
6073 unsigned int num_in_list = bitcount (regmask), new_regmask;
6074 unsigned int i;
6075
6076 for (i = 0; i < num_in_list; i++)
6077 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6078
6079 /* Writeback makes things complicated. We need to avoid clobbering
6080 the base register with one of the registers in our modified
6081 register list, but just using a different register can't work in
6082 all cases, e.g.:
6083
6084 ldm r14!, {r0-r13,pc}
6085
6086 which would need to be rewritten as:
6087
6088 ldm rN!, {r0-r14}
6089
6090 but that can't work, because there's no free register for N.
6091
6092 Solve this by turning off the writeback bit, and emulating
6093 writeback manually in the cleanup routine. */
6094
6095 if (writeback)
6096 insn &= ~(1 << 21);
6097
6098 new_regmask = (1 << num_in_list) - 1;
6099
6100 if (debug_displaced)
6101 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6102 "{..., pc}: original reg list %.4x, modified "
6103 "list %.4x\n"), rn, writeback ? "!" : "",
6104 (int) insn & 0xffff, new_regmask);
6105
6106 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6107
6108 dsc->cleanup = &cleanup_block_load_pc;
6109 }
6110 }
6111 else
6112 {
6113 /* STM of a list of registers which includes PC. Run the instruction
6114 as-is, but out of line: this will store the wrong value for the PC,
6115 so we must manually fix up the memory in the cleanup routine.
6116 Doing things this way has the advantage that we can auto-detect
6117 the offset of the PC write (which is architecture-dependent) in
6118 the cleanup routine. */
6119 dsc->modinsn[0] = insn;
6120
6121 dsc->cleanup = &cleanup_block_store_pc;
6122 }
6123
6124 return 0;
6125 }
6126
6127 static int
6128 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6129 struct regcache *regs,
6130 arm_displaced_step_closure *dsc)
6131 {
6132 int rn = bits (insn1, 0, 3);
6133 int load = bit (insn1, 4);
6134 int writeback = bit (insn1, 5);
6135
6136 /* Block transfers which don't mention PC can be run directly
6137 out-of-line. */
6138 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6139 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6140
6141 if (rn == ARM_PC_REGNUM)
6142 {
6143 warning (_("displaced: Unpredictable LDM or STM with "
6144 "base register r15"));
6145 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6146 "unpredictable ldm/stm", dsc);
6147 }
6148
6149 if (debug_displaced)
6150 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6151 "%.4x%.4x\n", insn1, insn2);
6152
6153 /* Clear bit 13, since it should be always zero. */
6154 dsc->u.block.regmask = (insn2 & 0xdfff);
6155 dsc->u.block.rn = rn;
6156
6157 dsc->u.block.load = load;
6158 dsc->u.block.user = 0;
6159 dsc->u.block.increment = bit (insn1, 7);
6160 dsc->u.block.before = bit (insn1, 8);
6161 dsc->u.block.writeback = writeback;
6162 dsc->u.block.cond = INST_AL;
6163 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6164
6165 if (load)
6166 {
6167 if (dsc->u.block.regmask == 0xffff)
6168 {
6169 /* This branch is impossible to happen. */
6170 gdb_assert (0);
6171 }
6172 else
6173 {
6174 unsigned int regmask = dsc->u.block.regmask;
6175 unsigned int num_in_list = bitcount (regmask), new_regmask;
6176 unsigned int i;
6177
6178 for (i = 0; i < num_in_list; i++)
6179 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6180
6181 if (writeback)
6182 insn1 &= ~(1 << 5);
6183
6184 new_regmask = (1 << num_in_list) - 1;
6185
6186 if (debug_displaced)
6187 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6188 "{..., pc}: original reg list %.4x, modified "
6189 "list %.4x\n"), rn, writeback ? "!" : "",
6190 (int) dsc->u.block.regmask, new_regmask);
6191
6192 dsc->modinsn[0] = insn1;
6193 dsc->modinsn[1] = (new_regmask & 0xffff);
6194 dsc->numinsns = 2;
6195
6196 dsc->cleanup = &cleanup_block_load_pc;
6197 }
6198 }
6199 else
6200 {
6201 dsc->modinsn[0] = insn1;
6202 dsc->modinsn[1] = insn2;
6203 dsc->numinsns = 2;
6204 dsc->cleanup = &cleanup_block_store_pc;
6205 }
6206 return 0;
6207 }
6208
6209 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6210 This is used to avoid a dependency on BFD's bfd_endian enum. */
6211
6212 ULONGEST
6213 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6214 int byte_order)
6215 {
6216 return read_memory_unsigned_integer (memaddr, len,
6217 (enum bfd_endian) byte_order);
6218 }
6219
6220 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6221
6222 CORE_ADDR
6223 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6224 CORE_ADDR val)
6225 {
6226 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6227 }
6228
6229 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6230
6231 static CORE_ADDR
6232 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6233 {
6234 return 0;
6235 }
6236
6237 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6238
6239 int
6240 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6241 {
6242 return arm_is_thumb (self->regcache);
6243 }
6244
6245 /* single_step() is called just before we want to resume the inferior,
6246 if we want to single-step it but there is no hardware or kernel
6247 single-step support. We find the target of the coming instructions
6248 and breakpoint them. */
6249
6250 std::vector<CORE_ADDR>
6251 arm_software_single_step (struct regcache *regcache)
6252 {
6253 struct gdbarch *gdbarch = regcache->arch ();
6254 struct arm_get_next_pcs next_pcs_ctx;
6255
6256 arm_get_next_pcs_ctor (&next_pcs_ctx,
6257 &arm_get_next_pcs_ops,
6258 gdbarch_byte_order (gdbarch),
6259 gdbarch_byte_order_for_code (gdbarch),
6260 0,
6261 regcache);
6262
6263 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6264
6265 for (CORE_ADDR &pc_ref : next_pcs)
6266 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6267
6268 return next_pcs;
6269 }
6270
6271 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6272 for Linux, where some SVC instructions must be treated specially. */
6273
6274 static void
6275 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6276 arm_displaced_step_closure *dsc)
6277 {
6278 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6279
6280 if (debug_displaced)
6281 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6282 "%.8lx\n", (unsigned long) resume_addr);
6283
6284 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6285 }
6286
6287
6288 /* Common copy routine for svc instruciton. */
6289
6290 static int
6291 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6292 arm_displaced_step_closure *dsc)
6293 {
6294 /* Preparation: none.
6295 Insn: unmodified svc.
6296 Cleanup: pc <- insn_addr + insn_size. */
6297
6298 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6299 instruction. */
6300 dsc->wrote_to_pc = 1;
6301
6302 /* Allow OS-specific code to override SVC handling. */
6303 if (dsc->u.svc.copy_svc_os)
6304 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6305 else
6306 {
6307 dsc->cleanup = &cleanup_svc;
6308 return 0;
6309 }
6310 }
6311
6312 static int
6313 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6314 struct regcache *regs, arm_displaced_step_closure *dsc)
6315 {
6316
6317 if (debug_displaced)
6318 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6319 (unsigned long) insn);
6320
6321 dsc->modinsn[0] = insn;
6322
6323 return install_svc (gdbarch, regs, dsc);
6324 }
6325
6326 static int
6327 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6328 struct regcache *regs, arm_displaced_step_closure *dsc)
6329 {
6330
6331 if (debug_displaced)
6332 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6333 insn);
6334
6335 dsc->modinsn[0] = insn;
6336
6337 return install_svc (gdbarch, regs, dsc);
6338 }
6339
6340 /* Copy undefined instructions. */
6341
6342 static int
6343 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6344 arm_displaced_step_closure *dsc)
6345 {
6346 if (debug_displaced)
6347 fprintf_unfiltered (gdb_stdlog,
6348 "displaced: copying undefined insn %.8lx\n",
6349 (unsigned long) insn);
6350
6351 dsc->modinsn[0] = insn;
6352
6353 return 0;
6354 }
6355
6356 static int
6357 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6358 arm_displaced_step_closure *dsc)
6359 {
6360
6361 if (debug_displaced)
6362 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6363 "%.4x %.4x\n", (unsigned short) insn1,
6364 (unsigned short) insn2);
6365
6366 dsc->modinsn[0] = insn1;
6367 dsc->modinsn[1] = insn2;
6368 dsc->numinsns = 2;
6369
6370 return 0;
6371 }
6372
6373 /* Copy unpredictable instructions. */
6374
6375 static int
6376 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6377 arm_displaced_step_closure *dsc)
6378 {
6379 if (debug_displaced)
6380 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6381 "%.8lx\n", (unsigned long) insn);
6382
6383 dsc->modinsn[0] = insn;
6384
6385 return 0;
6386 }
6387
6388 /* The decode_* functions are instruction decoding helpers. They mostly follow
6389 the presentation in the ARM ARM. */
6390
6391 static int
6392 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6393 struct regcache *regs,
6394 arm_displaced_step_closure *dsc)
6395 {
6396 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6397 unsigned int rn = bits (insn, 16, 19);
6398
6399 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6400 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6401 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6402 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6403 else if ((op1 & 0x60) == 0x20)
6404 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6405 else if ((op1 & 0x71) == 0x40)
6406 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6407 dsc);
6408 else if ((op1 & 0x77) == 0x41)
6409 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6410 else if ((op1 & 0x77) == 0x45)
6411 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6412 else if ((op1 & 0x77) == 0x51)
6413 {
6414 if (rn != 0xf)
6415 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6416 else
6417 return arm_copy_unpred (gdbarch, insn, dsc);
6418 }
6419 else if ((op1 & 0x77) == 0x55)
6420 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6421 else if (op1 == 0x57)
6422 switch (op2)
6423 {
6424 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6425 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6426 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6427 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6428 default: return arm_copy_unpred (gdbarch, insn, dsc);
6429 }
6430 else if ((op1 & 0x63) == 0x43)
6431 return arm_copy_unpred (gdbarch, insn, dsc);
6432 else if ((op2 & 0x1) == 0x0)
6433 switch (op1 & ~0x80)
6434 {
6435 case 0x61:
6436 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6437 case 0x65:
6438 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6439 case 0x71: case 0x75:
6440 /* pld/pldw reg. */
6441 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6442 case 0x63: case 0x67: case 0x73: case 0x77:
6443 return arm_copy_unpred (gdbarch, insn, dsc);
6444 default:
6445 return arm_copy_undef (gdbarch, insn, dsc);
6446 }
6447 else
6448 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6449 }
6450
6451 static int
6452 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6453 struct regcache *regs,
6454 arm_displaced_step_closure *dsc)
6455 {
6456 if (bit (insn, 27) == 0)
6457 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6458 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6459 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6460 {
6461 case 0x0: case 0x2:
6462 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6463
6464 case 0x1: case 0x3:
6465 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6466
6467 case 0x4: case 0x5: case 0x6: case 0x7:
6468 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6469
6470 case 0x8:
6471 switch ((insn & 0xe00000) >> 21)
6472 {
6473 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6474 /* stc/stc2. */
6475 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6476
6477 case 0x2:
6478 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6479
6480 default:
6481 return arm_copy_undef (gdbarch, insn, dsc);
6482 }
6483
6484 case 0x9:
6485 {
6486 int rn_f = (bits (insn, 16, 19) == 0xf);
6487 switch ((insn & 0xe00000) >> 21)
6488 {
6489 case 0x1: case 0x3:
6490 /* ldc/ldc2 imm (undefined for rn == pc). */
6491 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6492 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6493
6494 case 0x2:
6495 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6496
6497 case 0x4: case 0x5: case 0x6: case 0x7:
6498 /* ldc/ldc2 lit (undefined for rn != pc). */
6499 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6500 : arm_copy_undef (gdbarch, insn, dsc);
6501
6502 default:
6503 return arm_copy_undef (gdbarch, insn, dsc);
6504 }
6505 }
6506
6507 case 0xa:
6508 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6509
6510 case 0xb:
6511 if (bits (insn, 16, 19) == 0xf)
6512 /* ldc/ldc2 lit. */
6513 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6514 else
6515 return arm_copy_undef (gdbarch, insn, dsc);
6516
6517 case 0xc:
6518 if (bit (insn, 4))
6519 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6520 else
6521 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6522
6523 case 0xd:
6524 if (bit (insn, 4))
6525 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6526 else
6527 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6528
6529 default:
6530 return arm_copy_undef (gdbarch, insn, dsc);
6531 }
6532 }
6533
6534 /* Decode miscellaneous instructions in dp/misc encoding space. */
6535
6536 static int
6537 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6538 struct regcache *regs,
6539 arm_displaced_step_closure *dsc)
6540 {
6541 unsigned int op2 = bits (insn, 4, 6);
6542 unsigned int op = bits (insn, 21, 22);
6543
6544 switch (op2)
6545 {
6546 case 0x0:
6547 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6548
6549 case 0x1:
6550 if (op == 0x1) /* bx. */
6551 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6552 else if (op == 0x3)
6553 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6554 else
6555 return arm_copy_undef (gdbarch, insn, dsc);
6556
6557 case 0x2:
6558 if (op == 0x1)
6559 /* Not really supported. */
6560 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6561 else
6562 return arm_copy_undef (gdbarch, insn, dsc);
6563
6564 case 0x3:
6565 if (op == 0x1)
6566 return arm_copy_bx_blx_reg (gdbarch, insn,
6567 regs, dsc); /* blx register. */
6568 else
6569 return arm_copy_undef (gdbarch, insn, dsc);
6570
6571 case 0x5:
6572 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6573
6574 case 0x7:
6575 if (op == 0x1)
6576 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6577 else if (op == 0x3)
6578 /* Not really supported. */
6579 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6580
6581 default:
6582 return arm_copy_undef (gdbarch, insn, dsc);
6583 }
6584 }
6585
6586 static int
6587 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6588 struct regcache *regs,
6589 arm_displaced_step_closure *dsc)
6590 {
6591 if (bit (insn, 25))
6592 switch (bits (insn, 20, 24))
6593 {
6594 case 0x10:
6595 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6596
6597 case 0x14:
6598 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6599
6600 case 0x12: case 0x16:
6601 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6602
6603 default:
6604 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6605 }
6606 else
6607 {
6608 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6609
6610 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6611 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6612 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6613 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6614 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6615 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6616 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6617 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6618 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6619 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6620 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6621 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6622 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6623 /* 2nd arg means "unprivileged". */
6624 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6625 dsc);
6626 }
6627
6628 /* Should be unreachable. */
6629 return 1;
6630 }
6631
6632 static int
6633 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6634 struct regcache *regs,
6635 arm_displaced_step_closure *dsc)
6636 {
6637 int a = bit (insn, 25), b = bit (insn, 4);
6638 uint32_t op1 = bits (insn, 20, 24);
6639
6640 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6641 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6642 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6643 else if ((!a && (op1 & 0x17) == 0x02)
6644 || (a && (op1 & 0x17) == 0x02 && !b))
6645 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6646 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6647 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6648 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6649 else if ((!a && (op1 & 0x17) == 0x03)
6650 || (a && (op1 & 0x17) == 0x03 && !b))
6651 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6652 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6653 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6654 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6655 else if ((!a && (op1 & 0x17) == 0x06)
6656 || (a && (op1 & 0x17) == 0x06 && !b))
6657 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6658 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6659 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6660 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6661 else if ((!a && (op1 & 0x17) == 0x07)
6662 || (a && (op1 & 0x17) == 0x07 && !b))
6663 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6664
6665 /* Should be unreachable. */
6666 return 1;
6667 }
6668
6669 static int
6670 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6671 arm_displaced_step_closure *dsc)
6672 {
6673 switch (bits (insn, 20, 24))
6674 {
6675 case 0x00: case 0x01: case 0x02: case 0x03:
6676 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6677
6678 case 0x04: case 0x05: case 0x06: case 0x07:
6679 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6680
6681 case 0x08: case 0x09: case 0x0a: case 0x0b:
6682 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6683 return arm_copy_unmodified (gdbarch, insn,
6684 "decode/pack/unpack/saturate/reverse", dsc);
6685
6686 case 0x18:
6687 if (bits (insn, 5, 7) == 0) /* op2. */
6688 {
6689 if (bits (insn, 12, 15) == 0xf)
6690 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6691 else
6692 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6693 }
6694 else
6695 return arm_copy_undef (gdbarch, insn, dsc);
6696
6697 case 0x1a: case 0x1b:
6698 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6699 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6700 else
6701 return arm_copy_undef (gdbarch, insn, dsc);
6702
6703 case 0x1c: case 0x1d:
6704 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6705 {
6706 if (bits (insn, 0, 3) == 0xf)
6707 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6708 else
6709 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6710 }
6711 else
6712 return arm_copy_undef (gdbarch, insn, dsc);
6713
6714 case 0x1e: case 0x1f:
6715 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6716 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6717 else
6718 return arm_copy_undef (gdbarch, insn, dsc);
6719 }
6720
6721 /* Should be unreachable. */
6722 return 1;
6723 }
6724
6725 static int
6726 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6727 struct regcache *regs,
6728 arm_displaced_step_closure *dsc)
6729 {
6730 if (bit (insn, 25))
6731 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6732 else
6733 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6734 }
6735
6736 static int
6737 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6738 struct regcache *regs,
6739 arm_displaced_step_closure *dsc)
6740 {
6741 unsigned int opcode = bits (insn, 20, 24);
6742
6743 switch (opcode)
6744 {
6745 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6746 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6747
6748 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6749 case 0x12: case 0x16:
6750 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6751
6752 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6753 case 0x13: case 0x17:
6754 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6755
6756 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6757 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6758 /* Note: no writeback for these instructions. Bit 25 will always be
6759 zero though (via caller), so the following works OK. */
6760 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6761 }
6762
6763 /* Should be unreachable. */
6764 return 1;
6765 }
6766
6767 /* Decode shifted register instructions. */
6768
6769 static int
6770 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6771 uint16_t insn2, struct regcache *regs,
6772 arm_displaced_step_closure *dsc)
6773 {
6774 /* PC is only allowed to be used in instruction MOV. */
6775
6776 unsigned int op = bits (insn1, 5, 8);
6777 unsigned int rn = bits (insn1, 0, 3);
6778
6779 if (op == 0x2 && rn == 0xf) /* MOV */
6780 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6781 else
6782 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6783 "dp (shift reg)", dsc);
6784 }
6785
6786
6787 /* Decode extension register load/store. Exactly the same as
6788 arm_decode_ext_reg_ld_st. */
6789
6790 static int
6791 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6792 uint16_t insn2, struct regcache *regs,
6793 arm_displaced_step_closure *dsc)
6794 {
6795 unsigned int opcode = bits (insn1, 4, 8);
6796
6797 switch (opcode)
6798 {
6799 case 0x04: case 0x05:
6800 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6801 "vfp/neon vmov", dsc);
6802
6803 case 0x08: case 0x0c: /* 01x00 */
6804 case 0x0a: case 0x0e: /* 01x10 */
6805 case 0x12: case 0x16: /* 10x10 */
6806 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6807 "vfp/neon vstm/vpush", dsc);
6808
6809 case 0x09: case 0x0d: /* 01x01 */
6810 case 0x0b: case 0x0f: /* 01x11 */
6811 case 0x13: case 0x17: /* 10x11 */
6812 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6813 "vfp/neon vldm/vpop", dsc);
6814
6815 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6816 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6817 "vstr", dsc);
6818 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6819 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6820 }
6821
6822 /* Should be unreachable. */
6823 return 1;
6824 }
6825
6826 static int
6827 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6828 struct regcache *regs, arm_displaced_step_closure *dsc)
6829 {
6830 unsigned int op1 = bits (insn, 20, 25);
6831 int op = bit (insn, 4);
6832 unsigned int coproc = bits (insn, 8, 11);
6833
6834 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6835 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6836 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6837 && (coproc & 0xe) != 0xa)
6838 /* stc/stc2. */
6839 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6840 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6841 && (coproc & 0xe) != 0xa)
6842 /* ldc/ldc2 imm/lit. */
6843 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6844 else if ((op1 & 0x3e) == 0x00)
6845 return arm_copy_undef (gdbarch, insn, dsc);
6846 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6847 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6848 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6849 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6850 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6851 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6852 else if ((op1 & 0x30) == 0x20 && !op)
6853 {
6854 if ((coproc & 0xe) == 0xa)
6855 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6856 else
6857 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6858 }
6859 else if ((op1 & 0x30) == 0x20 && op)
6860 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6861 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6862 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6863 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6864 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6865 else if ((op1 & 0x30) == 0x30)
6866 return arm_copy_svc (gdbarch, insn, regs, dsc);
6867 else
6868 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6869 }
6870
6871 static int
6872 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6873 uint16_t insn2, struct regcache *regs,
6874 arm_displaced_step_closure *dsc)
6875 {
6876 unsigned int coproc = bits (insn2, 8, 11);
6877 unsigned int bit_5_8 = bits (insn1, 5, 8);
6878 unsigned int bit_9 = bit (insn1, 9);
6879 unsigned int bit_4 = bit (insn1, 4);
6880
6881 if (bit_9 == 0)
6882 {
6883 if (bit_5_8 == 2)
6884 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6885 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6886 dsc);
6887 else if (bit_5_8 == 0) /* UNDEFINED. */
6888 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6889 else
6890 {
6891 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6892 if ((coproc & 0xe) == 0xa)
6893 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6894 dsc);
6895 else /* coproc is not 101x. */
6896 {
6897 if (bit_4 == 0) /* STC/STC2. */
6898 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6899 "stc/stc2", dsc);
6900 else /* LDC/LDC2 {literal, immeidate}. */
6901 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6902 regs, dsc);
6903 }
6904 }
6905 }
6906 else
6907 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6908
6909 return 0;
6910 }
6911
6912 static void
6913 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6914 arm_displaced_step_closure *dsc, int rd)
6915 {
6916 /* ADR Rd, #imm
6917
6918 Rewrite as:
6919
6920 Preparation: Rd <- PC
6921 Insn: ADD Rd, #imm
6922 Cleanup: Null.
6923 */
6924
6925 /* Rd <- PC */
6926 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6927 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6928 }
6929
6930 static int
6931 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6932 arm_displaced_step_closure *dsc,
6933 int rd, unsigned int imm)
6934 {
6935
6936 /* Encoding T2: ADDS Rd, #imm */
6937 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6938
6939 install_pc_relative (gdbarch, regs, dsc, rd);
6940
6941 return 0;
6942 }
6943
6944 static int
6945 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6946 struct regcache *regs,
6947 arm_displaced_step_closure *dsc)
6948 {
6949 unsigned int rd = bits (insn, 8, 10);
6950 unsigned int imm8 = bits (insn, 0, 7);
6951
6952 if (debug_displaced)
6953 fprintf_unfiltered (gdb_stdlog,
6954 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6955 rd, imm8, insn);
6956
6957 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6958 }
6959
6960 static int
6961 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6962 uint16_t insn2, struct regcache *regs,
6963 arm_displaced_step_closure *dsc)
6964 {
6965 unsigned int rd = bits (insn2, 8, 11);
6966 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6967 extract raw immediate encoding rather than computing immediate. When
6968 generating ADD or SUB instruction, we can simply perform OR operation to
6969 set immediate into ADD. */
6970 unsigned int imm_3_8 = insn2 & 0x70ff;
6971 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6972
6973 if (debug_displaced)
6974 fprintf_unfiltered (gdb_stdlog,
6975 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6976 rd, imm_i, imm_3_8, insn1, insn2);
6977
6978 if (bit (insn1, 7)) /* Encoding T2 */
6979 {
6980 /* Encoding T3: SUB Rd, Rd, #imm */
6981 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6982 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6983 }
6984 else /* Encoding T3 */
6985 {
6986 /* Encoding T3: ADD Rd, Rd, #imm */
6987 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6988 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6989 }
6990 dsc->numinsns = 2;
6991
6992 install_pc_relative (gdbarch, regs, dsc, rd);
6993
6994 return 0;
6995 }
6996
6997 static int
6998 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6999 struct regcache *regs,
7000 arm_displaced_step_closure *dsc)
7001 {
7002 unsigned int rt = bits (insn1, 8, 10);
7003 unsigned int pc;
7004 int imm8 = (bits (insn1, 0, 7) << 2);
7005
7006 /* LDR Rd, #imm8
7007
7008 Rwrite as:
7009
7010 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7011
7012 Insn: LDR R0, [R2, R3];
7013 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7014
7015 if (debug_displaced)
7016 fprintf_unfiltered (gdb_stdlog,
7017 "displaced: copying thumb ldr r%d [pc #%d]\n"
7018 , rt, imm8);
7019
7020 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7021 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7022 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7023 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7024 /* The assembler calculates the required value of the offset from the
7025 Align(PC,4) value of this instruction to the label. */
7026 pc = pc & 0xfffffffc;
7027
7028 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7029 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7030
7031 dsc->rd = rt;
7032 dsc->u.ldst.xfersize = 4;
7033 dsc->u.ldst.rn = 0;
7034 dsc->u.ldst.immed = 0;
7035 dsc->u.ldst.writeback = 0;
7036 dsc->u.ldst.restore_r4 = 0;
7037
7038 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7039
7040 dsc->cleanup = &cleanup_load;
7041
7042 return 0;
7043 }
7044
7045 /* Copy Thumb cbnz/cbz insruction. */
7046
7047 static int
7048 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7049 struct regcache *regs,
7050 arm_displaced_step_closure *dsc)
7051 {
7052 int non_zero = bit (insn1, 11);
7053 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7054 CORE_ADDR from = dsc->insn_addr;
7055 int rn = bits (insn1, 0, 2);
7056 int rn_val = displaced_read_reg (regs, dsc, rn);
7057
7058 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7059 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7060 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7061 condition is false, let it be, cleanup_branch will do nothing. */
7062 if (dsc->u.branch.cond)
7063 {
7064 dsc->u.branch.cond = INST_AL;
7065 dsc->u.branch.dest = from + 4 + imm5;
7066 }
7067 else
7068 dsc->u.branch.dest = from + 2;
7069
7070 dsc->u.branch.link = 0;
7071 dsc->u.branch.exchange = 0;
7072
7073 if (debug_displaced)
7074 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7075 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7076 rn, rn_val, insn1, dsc->u.branch.dest);
7077
7078 dsc->modinsn[0] = THUMB_NOP;
7079
7080 dsc->cleanup = &cleanup_branch;
7081 return 0;
7082 }
7083
7084 /* Copy Table Branch Byte/Halfword */
7085 static int
7086 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7087 uint16_t insn2, struct regcache *regs,
7088 arm_displaced_step_closure *dsc)
7089 {
7090 ULONGEST rn_val, rm_val;
7091 int is_tbh = bit (insn2, 4);
7092 CORE_ADDR halfwords = 0;
7093 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7094
7095 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7096 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7097
7098 if (is_tbh)
7099 {
7100 gdb_byte buf[2];
7101
7102 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7103 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7104 }
7105 else
7106 {
7107 gdb_byte buf[1];
7108
7109 target_read_memory (rn_val + rm_val, buf, 1);
7110 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7111 }
7112
7113 if (debug_displaced)
7114 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7115 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7116 (unsigned int) rn_val, (unsigned int) rm_val,
7117 (unsigned int) halfwords);
7118
7119 dsc->u.branch.cond = INST_AL;
7120 dsc->u.branch.link = 0;
7121 dsc->u.branch.exchange = 0;
7122 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7123
7124 dsc->cleanup = &cleanup_branch;
7125
7126 return 0;
7127 }
7128
7129 static void
7130 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7131 arm_displaced_step_closure *dsc)
7132 {
7133 /* PC <- r7 */
7134 int val = displaced_read_reg (regs, dsc, 7);
7135 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7136
7137 /* r7 <- r8 */
7138 val = displaced_read_reg (regs, dsc, 8);
7139 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7140
7141 /* r8 <- tmp[0] */
7142 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7143
7144 }
7145
7146 static int
7147 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7148 struct regcache *regs,
7149 arm_displaced_step_closure *dsc)
7150 {
7151 dsc->u.block.regmask = insn1 & 0x00ff;
7152
7153 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7154 to :
7155
7156 (1) register list is full, that is, r0-r7 are used.
7157 Prepare: tmp[0] <- r8
7158
7159 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7160 MOV r8, r7; Move value of r7 to r8;
7161 POP {r7}; Store PC value into r7.
7162
7163 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7164
7165 (2) register list is not full, supposing there are N registers in
7166 register list (except PC, 0 <= N <= 7).
7167 Prepare: for each i, 0 - N, tmp[i] <- ri.
7168
7169 POP {r0, r1, ...., rN};
7170
7171 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7172 from tmp[] properly.
7173 */
7174 if (debug_displaced)
7175 fprintf_unfiltered (gdb_stdlog,
7176 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7177 dsc->u.block.regmask, insn1);
7178
7179 if (dsc->u.block.regmask == 0xff)
7180 {
7181 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7182
7183 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7184 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7185 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7186
7187 dsc->numinsns = 3;
7188 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7189 }
7190 else
7191 {
7192 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7193 unsigned int i;
7194 unsigned int new_regmask;
7195
7196 for (i = 0; i < num_in_list + 1; i++)
7197 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7198
7199 new_regmask = (1 << (num_in_list + 1)) - 1;
7200
7201 if (debug_displaced)
7202 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7203 "{..., pc}: original reg list %.4x,"
7204 " modified list %.4x\n"),
7205 (int) dsc->u.block.regmask, new_regmask);
7206
7207 dsc->u.block.regmask |= 0x8000;
7208 dsc->u.block.writeback = 0;
7209 dsc->u.block.cond = INST_AL;
7210
7211 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7212
7213 dsc->cleanup = &cleanup_block_load_pc;
7214 }
7215
7216 return 0;
7217 }
7218
7219 static void
7220 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7221 struct regcache *regs,
7222 arm_displaced_step_closure *dsc)
7223 {
7224 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7225 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7226 int err = 0;
7227
7228 /* 16-bit thumb instructions. */
7229 switch (op_bit_12_15)
7230 {
7231 /* Shift (imme), add, subtract, move and compare. */
7232 case 0: case 1: case 2: case 3:
7233 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7234 "shift/add/sub/mov/cmp",
7235 dsc);
7236 break;
7237 case 4:
7238 switch (op_bit_10_11)
7239 {
7240 case 0: /* Data-processing */
7241 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7242 "data-processing",
7243 dsc);
7244 break;
7245 case 1: /* Special data instructions and branch and exchange. */
7246 {
7247 unsigned short op = bits (insn1, 7, 9);
7248 if (op == 6 || op == 7) /* BX or BLX */
7249 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7250 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7251 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7252 else
7253 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7254 dsc);
7255 }
7256 break;
7257 default: /* LDR (literal) */
7258 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7259 }
7260 break;
7261 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7262 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7263 break;
7264 case 10:
7265 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7266 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7267 else /* Generate SP-relative address */
7268 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7269 break;
7270 case 11: /* Misc 16-bit instructions */
7271 {
7272 switch (bits (insn1, 8, 11))
7273 {
7274 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7275 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7276 break;
7277 case 12: case 13: /* POP */
7278 if (bit (insn1, 8)) /* PC is in register list. */
7279 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7280 else
7281 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7282 break;
7283 case 15: /* If-Then, and hints */
7284 if (bits (insn1, 0, 3))
7285 /* If-Then makes up to four following instructions conditional.
7286 IT instruction itself is not conditional, so handle it as a
7287 common unmodified instruction. */
7288 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7289 dsc);
7290 else
7291 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7292 break;
7293 default:
7294 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7295 }
7296 }
7297 break;
7298 case 12:
7299 if (op_bit_10_11 < 2) /* Store multiple registers */
7300 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7301 else /* Load multiple registers */
7302 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7303 break;
7304 case 13: /* Conditional branch and supervisor call */
7305 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7306 err = thumb_copy_b (gdbarch, insn1, dsc);
7307 else
7308 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7309 break;
7310 case 14: /* Unconditional branch */
7311 err = thumb_copy_b (gdbarch, insn1, dsc);
7312 break;
7313 default:
7314 err = 1;
7315 }
7316
7317 if (err)
7318 internal_error (__FILE__, __LINE__,
7319 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7320 }
7321
7322 static int
7323 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7324 uint16_t insn1, uint16_t insn2,
7325 struct regcache *regs,
7326 arm_displaced_step_closure *dsc)
7327 {
7328 int rt = bits (insn2, 12, 15);
7329 int rn = bits (insn1, 0, 3);
7330 int op1 = bits (insn1, 7, 8);
7331
7332 switch (bits (insn1, 5, 6))
7333 {
7334 case 0: /* Load byte and memory hints */
7335 if (rt == 0xf) /* PLD/PLI */
7336 {
7337 if (rn == 0xf)
7338 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7339 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7340 else
7341 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7342 "pli/pld", dsc);
7343 }
7344 else
7345 {
7346 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7347 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7348 1);
7349 else
7350 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7351 "ldrb{reg, immediate}/ldrbt",
7352 dsc);
7353 }
7354
7355 break;
7356 case 1: /* Load halfword and memory hints. */
7357 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7358 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7359 "pld/unalloc memhint", dsc);
7360 else
7361 {
7362 if (rn == 0xf)
7363 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7364 2);
7365 else
7366 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7367 "ldrh/ldrht", dsc);
7368 }
7369 break;
7370 case 2: /* Load word */
7371 {
7372 int insn2_bit_8_11 = bits (insn2, 8, 11);
7373
7374 if (rn == 0xf)
7375 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7376 else if (op1 == 0x1) /* Encoding T3 */
7377 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7378 0, 1);
7379 else /* op1 == 0x0 */
7380 {
7381 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7382 /* LDR (immediate) */
7383 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7384 dsc, bit (insn2, 8), 1);
7385 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7386 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7387 "ldrt", dsc);
7388 else
7389 /* LDR (register) */
7390 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7391 dsc, 0, 0);
7392 }
7393 break;
7394 }
7395 default:
7396 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7397 break;
7398 }
7399 return 0;
7400 }
7401
7402 static void
7403 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7404 uint16_t insn2, struct regcache *regs,
7405 arm_displaced_step_closure *dsc)
7406 {
7407 int err = 0;
7408 unsigned short op = bit (insn2, 15);
7409 unsigned int op1 = bits (insn1, 11, 12);
7410
7411 switch (op1)
7412 {
7413 case 1:
7414 {
7415 switch (bits (insn1, 9, 10))
7416 {
7417 case 0:
7418 if (bit (insn1, 6))
7419 {
7420 /* Load/store {dual, execlusive}, table branch. */
7421 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7422 && bits (insn2, 5, 7) == 0)
7423 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7424 dsc);
7425 else
7426 /* PC is not allowed to use in load/store {dual, exclusive}
7427 instructions. */
7428 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7429 "load/store dual/ex", dsc);
7430 }
7431 else /* load/store multiple */
7432 {
7433 switch (bits (insn1, 7, 8))
7434 {
7435 case 0: case 3: /* SRS, RFE */
7436 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7437 "srs/rfe", dsc);
7438 break;
7439 case 1: case 2: /* LDM/STM/PUSH/POP */
7440 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7441 break;
7442 }
7443 }
7444 break;
7445
7446 case 1:
7447 /* Data-processing (shift register). */
7448 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7449 dsc);
7450 break;
7451 default: /* Coprocessor instructions. */
7452 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7453 break;
7454 }
7455 break;
7456 }
7457 case 2: /* op1 = 2 */
7458 if (op) /* Branch and misc control. */
7459 {
7460 if (bit (insn2, 14) /* BLX/BL */
7461 || bit (insn2, 12) /* Unconditional branch */
7462 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7463 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7464 else
7465 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7466 "misc ctrl", dsc);
7467 }
7468 else
7469 {
7470 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7471 {
7472 int op = bits (insn1, 4, 8);
7473 int rn = bits (insn1, 0, 3);
7474 if ((op == 0 || op == 0xa) && rn == 0xf)
7475 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7476 regs, dsc);
7477 else
7478 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7479 "dp/pb", dsc);
7480 }
7481 else /* Data processing (modified immeidate) */
7482 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7483 "dp/mi", dsc);
7484 }
7485 break;
7486 case 3: /* op1 = 3 */
7487 switch (bits (insn1, 9, 10))
7488 {
7489 case 0:
7490 if (bit (insn1, 4))
7491 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7492 regs, dsc);
7493 else /* NEON Load/Store and Store single data item */
7494 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7495 "neon elt/struct load/store",
7496 dsc);
7497 break;
7498 case 1: /* op1 = 3, bits (9, 10) == 1 */
7499 switch (bits (insn1, 7, 8))
7500 {
7501 case 0: case 1: /* Data processing (register) */
7502 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7503 "dp(reg)", dsc);
7504 break;
7505 case 2: /* Multiply and absolute difference */
7506 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7507 "mul/mua/diff", dsc);
7508 break;
7509 case 3: /* Long multiply and divide */
7510 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7511 "lmul/lmua", dsc);
7512 break;
7513 }
7514 break;
7515 default: /* Coprocessor instructions */
7516 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7517 break;
7518 }
7519 break;
7520 default:
7521 err = 1;
7522 }
7523
7524 if (err)
7525 internal_error (__FILE__, __LINE__,
7526 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7527
7528 }
7529
7530 static void
7531 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7532 struct regcache *regs,
7533 arm_displaced_step_closure *dsc)
7534 {
7535 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7536 uint16_t insn1
7537 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7538
7539 if (debug_displaced)
7540 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7541 "at %.8lx\n", insn1, (unsigned long) from);
7542
7543 dsc->is_thumb = 1;
7544 dsc->insn_size = thumb_insn_size (insn1);
7545 if (thumb_insn_size (insn1) == 4)
7546 {
7547 uint16_t insn2
7548 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7549 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7550 }
7551 else
7552 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7553 }
7554
7555 void
7556 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7557 CORE_ADDR to, struct regcache *regs,
7558 arm_displaced_step_closure *dsc)
7559 {
7560 int err = 0;
7561 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7562 uint32_t insn;
7563
7564 /* Most displaced instructions use a 1-instruction scratch space, so set this
7565 here and override below if/when necessary. */
7566 dsc->numinsns = 1;
7567 dsc->insn_addr = from;
7568 dsc->scratch_base = to;
7569 dsc->cleanup = NULL;
7570 dsc->wrote_to_pc = 0;
7571
7572 if (!displaced_in_arm_mode (regs))
7573 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7574
7575 dsc->is_thumb = 0;
7576 dsc->insn_size = 4;
7577 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7578 if (debug_displaced)
7579 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7580 "at %.8lx\n", (unsigned long) insn,
7581 (unsigned long) from);
7582
7583 if ((insn & 0xf0000000) == 0xf0000000)
7584 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7585 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7586 {
7587 case 0x0: case 0x1: case 0x2: case 0x3:
7588 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7589 break;
7590
7591 case 0x4: case 0x5: case 0x6:
7592 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7593 break;
7594
7595 case 0x7:
7596 err = arm_decode_media (gdbarch, insn, dsc);
7597 break;
7598
7599 case 0x8: case 0x9: case 0xa: case 0xb:
7600 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7601 break;
7602
7603 case 0xc: case 0xd: case 0xe: case 0xf:
7604 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7605 break;
7606 }
7607
7608 if (err)
7609 internal_error (__FILE__, __LINE__,
7610 _("arm_process_displaced_insn: Instruction decode error"));
7611 }
7612
7613 /* Actually set up the scratch space for a displaced instruction. */
7614
7615 void
7616 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7617 CORE_ADDR to, arm_displaced_step_closure *dsc)
7618 {
7619 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7620 unsigned int i, len, offset;
7621 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7622 int size = dsc->is_thumb? 2 : 4;
7623 const gdb_byte *bkp_insn;
7624
7625 offset = 0;
7626 /* Poke modified instruction(s). */
7627 for (i = 0; i < dsc->numinsns; i++)
7628 {
7629 if (debug_displaced)
7630 {
7631 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7632 if (size == 4)
7633 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7634 dsc->modinsn[i]);
7635 else if (size == 2)
7636 fprintf_unfiltered (gdb_stdlog, "%.4x",
7637 (unsigned short)dsc->modinsn[i]);
7638
7639 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7640 (unsigned long) to + offset);
7641
7642 }
7643 write_memory_unsigned_integer (to + offset, size,
7644 byte_order_for_code,
7645 dsc->modinsn[i]);
7646 offset += size;
7647 }
7648
7649 /* Choose the correct breakpoint instruction. */
7650 if (dsc->is_thumb)
7651 {
7652 bkp_insn = tdep->thumb_breakpoint;
7653 len = tdep->thumb_breakpoint_size;
7654 }
7655 else
7656 {
7657 bkp_insn = tdep->arm_breakpoint;
7658 len = tdep->arm_breakpoint_size;
7659 }
7660
7661 /* Put breakpoint afterwards. */
7662 write_memory (to + offset, bkp_insn, len);
7663
7664 if (debug_displaced)
7665 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7666 paddress (gdbarch, from), paddress (gdbarch, to));
7667 }
7668
7669 /* Entry point for cleaning things up after a displaced instruction has been
7670 single-stepped. */
7671
7672 void
7673 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7674 struct displaced_step_closure *dsc_,
7675 CORE_ADDR from, CORE_ADDR to,
7676 struct regcache *regs)
7677 {
7678 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7679
7680 if (dsc->cleanup)
7681 dsc->cleanup (gdbarch, regs, dsc);
7682
7683 if (!dsc->wrote_to_pc)
7684 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7685 dsc->insn_addr + dsc->insn_size);
7686
7687 }
7688
7689 #include "bfd-in2.h"
7690 #include "libcoff.h"
7691
7692 static int
7693 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7694 {
7695 gdb_disassembler *di
7696 = static_cast<gdb_disassembler *>(info->application_data);
7697 struct gdbarch *gdbarch = di->arch ();
7698
7699 if (arm_pc_is_thumb (gdbarch, memaddr))
7700 {
7701 static asymbol *asym;
7702 static combined_entry_type ce;
7703 static struct coff_symbol_struct csym;
7704 static struct bfd fake_bfd;
7705 static bfd_target fake_target;
7706
7707 if (csym.native == NULL)
7708 {
7709 /* Create a fake symbol vector containing a Thumb symbol.
7710 This is solely so that the code in print_insn_little_arm()
7711 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7712 the presence of a Thumb symbol and switch to decoding
7713 Thumb instructions. */
7714
7715 fake_target.flavour = bfd_target_coff_flavour;
7716 fake_bfd.xvec = &fake_target;
7717 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7718 csym.native = &ce;
7719 csym.symbol.the_bfd = &fake_bfd;
7720 csym.symbol.name = "fake";
7721 asym = (asymbol *) & csym;
7722 }
7723
7724 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7725 info->symbols = &asym;
7726 }
7727 else
7728 info->symbols = NULL;
7729
7730 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7731 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7732 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7733 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7734 in default_print_insn. */
7735 if (exec_bfd != NULL)
7736 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7737
7738 return default_print_insn (memaddr, info);
7739 }
7740
7741 /* The following define instruction sequences that will cause ARM
7742 cpu's to take an undefined instruction trap. These are used to
7743 signal a breakpoint to GDB.
7744
7745 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7746 modes. A different instruction is required for each mode. The ARM
7747 cpu's can also be big or little endian. Thus four different
7748 instructions are needed to support all cases.
7749
7750 Note: ARMv4 defines several new instructions that will take the
7751 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7752 not in fact add the new instructions. The new undefined
7753 instructions in ARMv4 are all instructions that had no defined
7754 behaviour in earlier chips. There is no guarantee that they will
7755 raise an exception, but may be treated as NOP's. In practice, it
7756 may only safe to rely on instructions matching:
7757
7758 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7759 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7760 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7761
7762 Even this may only true if the condition predicate is true. The
7763 following use a condition predicate of ALWAYS so it is always TRUE.
7764
7765 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7766 and NetBSD all use a software interrupt rather than an undefined
7767 instruction to force a trap. This can be handled by by the
7768 abi-specific code during establishment of the gdbarch vector. */
7769
7770 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7771 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7772 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7773 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7774
7775 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7776 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7777 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7778 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7779
7780 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7781
7782 static int
7783 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7784 {
7785 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7786 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7787
7788 if (arm_pc_is_thumb (gdbarch, *pcptr))
7789 {
7790 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7791
7792 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7793 check whether we are replacing a 32-bit instruction. */
7794 if (tdep->thumb2_breakpoint != NULL)
7795 {
7796 gdb_byte buf[2];
7797
7798 if (target_read_memory (*pcptr, buf, 2) == 0)
7799 {
7800 unsigned short inst1;
7801
7802 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7803 if (thumb_insn_size (inst1) == 4)
7804 return ARM_BP_KIND_THUMB2;
7805 }
7806 }
7807
7808 return ARM_BP_KIND_THUMB;
7809 }
7810 else
7811 return ARM_BP_KIND_ARM;
7812
7813 }
7814
7815 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7816
7817 static const gdb_byte *
7818 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7819 {
7820 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7821
7822 switch (kind)
7823 {
7824 case ARM_BP_KIND_ARM:
7825 *size = tdep->arm_breakpoint_size;
7826 return tdep->arm_breakpoint;
7827 case ARM_BP_KIND_THUMB:
7828 *size = tdep->thumb_breakpoint_size;
7829 return tdep->thumb_breakpoint;
7830 case ARM_BP_KIND_THUMB2:
7831 *size = tdep->thumb2_breakpoint_size;
7832 return tdep->thumb2_breakpoint;
7833 default:
7834 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7835 }
7836 }
7837
7838 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7839
7840 static int
7841 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7842 struct regcache *regcache,
7843 CORE_ADDR *pcptr)
7844 {
7845 gdb_byte buf[4];
7846
7847 /* Check the memory pointed by PC is readable. */
7848 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7849 {
7850 struct arm_get_next_pcs next_pcs_ctx;
7851
7852 arm_get_next_pcs_ctor (&next_pcs_ctx,
7853 &arm_get_next_pcs_ops,
7854 gdbarch_byte_order (gdbarch),
7855 gdbarch_byte_order_for_code (gdbarch),
7856 0,
7857 regcache);
7858
7859 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7860
7861 /* If MEMADDR is the next instruction of current pc, do the
7862 software single step computation, and get the thumb mode by
7863 the destination address. */
7864 for (CORE_ADDR pc : next_pcs)
7865 {
7866 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7867 {
7868 if (IS_THUMB_ADDR (pc))
7869 {
7870 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7871 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7872 }
7873 else
7874 return ARM_BP_KIND_ARM;
7875 }
7876 }
7877 }
7878
7879 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7880 }
7881
7882 /* Extract from an array REGBUF containing the (raw) register state a
7883 function return value of type TYPE, and copy that, in virtual
7884 format, into VALBUF. */
7885
7886 static void
7887 arm_extract_return_value (struct type *type, struct regcache *regs,
7888 gdb_byte *valbuf)
7889 {
7890 struct gdbarch *gdbarch = regs->arch ();
7891 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7892
7893 if (TYPE_CODE_FLT == TYPE_CODE (type))
7894 {
7895 switch (gdbarch_tdep (gdbarch)->fp_model)
7896 {
7897 case ARM_FLOAT_FPA:
7898 {
7899 /* The value is in register F0 in internal format. We need to
7900 extract the raw value and then convert it to the desired
7901 internal type. */
7902 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7903
7904 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7905 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7906 valbuf, type);
7907 }
7908 break;
7909
7910 case ARM_FLOAT_SOFT_FPA:
7911 case ARM_FLOAT_SOFT_VFP:
7912 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7913 not using the VFP ABI code. */
7914 case ARM_FLOAT_VFP:
7915 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7916 if (TYPE_LENGTH (type) > 4)
7917 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7918 valbuf + INT_REGISTER_SIZE);
7919 break;
7920
7921 default:
7922 internal_error (__FILE__, __LINE__,
7923 _("arm_extract_return_value: "
7924 "Floating point model not supported"));
7925 break;
7926 }
7927 }
7928 else if (TYPE_CODE (type) == TYPE_CODE_INT
7929 || TYPE_CODE (type) == TYPE_CODE_CHAR
7930 || TYPE_CODE (type) == TYPE_CODE_BOOL
7931 || TYPE_CODE (type) == TYPE_CODE_PTR
7932 || TYPE_IS_REFERENCE (type)
7933 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7934 {
7935 /* If the type is a plain integer, then the access is
7936 straight-forward. Otherwise we have to play around a bit
7937 more. */
7938 int len = TYPE_LENGTH (type);
7939 int regno = ARM_A1_REGNUM;
7940 ULONGEST tmp;
7941
7942 while (len > 0)
7943 {
7944 /* By using store_unsigned_integer we avoid having to do
7945 anything special for small big-endian values. */
7946 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7947 store_unsigned_integer (valbuf,
7948 (len > INT_REGISTER_SIZE
7949 ? INT_REGISTER_SIZE : len),
7950 byte_order, tmp);
7951 len -= INT_REGISTER_SIZE;
7952 valbuf += INT_REGISTER_SIZE;
7953 }
7954 }
7955 else
7956 {
7957 /* For a structure or union the behaviour is as if the value had
7958 been stored to word-aligned memory and then loaded into
7959 registers with 32-bit load instruction(s). */
7960 int len = TYPE_LENGTH (type);
7961 int regno = ARM_A1_REGNUM;
7962 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7963
7964 while (len > 0)
7965 {
7966 regcache_cooked_read (regs, regno++, tmpbuf);
7967 memcpy (valbuf, tmpbuf,
7968 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7969 len -= INT_REGISTER_SIZE;
7970 valbuf += INT_REGISTER_SIZE;
7971 }
7972 }
7973 }
7974
7975
7976 /* Will a function return an aggregate type in memory or in a
7977 register? Return 0 if an aggregate type can be returned in a
7978 register, 1 if it must be returned in memory. */
7979
7980 static int
7981 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7982 {
7983 enum type_code code;
7984
7985 type = check_typedef (type);
7986
7987 /* Simple, non-aggregate types (ie not including vectors and
7988 complex) are always returned in a register (or registers). */
7989 code = TYPE_CODE (type);
7990 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7991 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7992 return 0;
7993
7994 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7995 {
7996 /* Vector values should be returned using ARM registers if they
7997 are not over 16 bytes. */
7998 return (TYPE_LENGTH (type) > 16);
7999 }
8000
8001 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
8002 {
8003 /* The AAPCS says all aggregates not larger than a word are returned
8004 in a register. */
8005 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
8006 return 0;
8007
8008 return 1;
8009 }
8010 else
8011 {
8012 int nRc;
8013
8014 /* All aggregate types that won't fit in a register must be returned
8015 in memory. */
8016 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8017 return 1;
8018
8019 /* In the ARM ABI, "integer" like aggregate types are returned in
8020 registers. For an aggregate type to be integer like, its size
8021 must be less than or equal to INT_REGISTER_SIZE and the
8022 offset of each addressable subfield must be zero. Note that bit
8023 fields are not addressable, and all addressable subfields of
8024 unions always start at offset zero.
8025
8026 This function is based on the behaviour of GCC 2.95.1.
8027 See: gcc/arm.c: arm_return_in_memory() for details.
8028
8029 Note: All versions of GCC before GCC 2.95.2 do not set up the
8030 parameters correctly for a function returning the following
8031 structure: struct { float f;}; This should be returned in memory,
8032 not a register. Richard Earnshaw sent me a patch, but I do not
8033 know of any way to detect if a function like the above has been
8034 compiled with the correct calling convention. */
8035
8036 /* Assume all other aggregate types can be returned in a register.
8037 Run a check for structures, unions and arrays. */
8038 nRc = 0;
8039
8040 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8041 {
8042 int i;
8043 /* Need to check if this struct/union is "integer" like. For
8044 this to be true, its size must be less than or equal to
8045 INT_REGISTER_SIZE and the offset of each addressable
8046 subfield must be zero. Note that bit fields are not
8047 addressable, and unions always start at offset zero. If any
8048 of the subfields is a floating point type, the struct/union
8049 cannot be an integer type. */
8050
8051 /* For each field in the object, check:
8052 1) Is it FP? --> yes, nRc = 1;
8053 2) Is it addressable (bitpos != 0) and
8054 not packed (bitsize == 0)?
8055 --> yes, nRc = 1
8056 */
8057
8058 for (i = 0; i < TYPE_NFIELDS (type); i++)
8059 {
8060 enum type_code field_type_code;
8061
8062 field_type_code
8063 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8064 i)));
8065
8066 /* Is it a floating point type field? */
8067 if (field_type_code == TYPE_CODE_FLT)
8068 {
8069 nRc = 1;
8070 break;
8071 }
8072
8073 /* If bitpos != 0, then we have to care about it. */
8074 if (TYPE_FIELD_BITPOS (type, i) != 0)
8075 {
8076 /* Bitfields are not addressable. If the field bitsize is
8077 zero, then the field is not packed. Hence it cannot be
8078 a bitfield or any other packed type. */
8079 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8080 {
8081 nRc = 1;
8082 break;
8083 }
8084 }
8085 }
8086 }
8087
8088 return nRc;
8089 }
8090 }
8091
8092 /* Write into appropriate registers a function return value of type
8093 TYPE, given in virtual format. */
8094
8095 static void
8096 arm_store_return_value (struct type *type, struct regcache *regs,
8097 const gdb_byte *valbuf)
8098 {
8099 struct gdbarch *gdbarch = regs->arch ();
8100 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8101
8102 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8103 {
8104 gdb_byte buf[FP_REGISTER_SIZE];
8105
8106 switch (gdbarch_tdep (gdbarch)->fp_model)
8107 {
8108 case ARM_FLOAT_FPA:
8109
8110 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8111 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
8112 break;
8113
8114 case ARM_FLOAT_SOFT_FPA:
8115 case ARM_FLOAT_SOFT_VFP:
8116 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8117 not using the VFP ABI code. */
8118 case ARM_FLOAT_VFP:
8119 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8120 if (TYPE_LENGTH (type) > 4)
8121 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
8122 valbuf + INT_REGISTER_SIZE);
8123 break;
8124
8125 default:
8126 internal_error (__FILE__, __LINE__,
8127 _("arm_store_return_value: Floating "
8128 "point model not supported"));
8129 break;
8130 }
8131 }
8132 else if (TYPE_CODE (type) == TYPE_CODE_INT
8133 || TYPE_CODE (type) == TYPE_CODE_CHAR
8134 || TYPE_CODE (type) == TYPE_CODE_BOOL
8135 || TYPE_CODE (type) == TYPE_CODE_PTR
8136 || TYPE_IS_REFERENCE (type)
8137 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8138 {
8139 if (TYPE_LENGTH (type) <= 4)
8140 {
8141 /* Values of one word or less are zero/sign-extended and
8142 returned in r0. */
8143 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8144 LONGEST val = unpack_long (type, valbuf);
8145
8146 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8147 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8148 }
8149 else
8150 {
8151 /* Integral values greater than one word are stored in consecutive
8152 registers starting with r0. This will always be a multiple of
8153 the regiser size. */
8154 int len = TYPE_LENGTH (type);
8155 int regno = ARM_A1_REGNUM;
8156
8157 while (len > 0)
8158 {
8159 regcache_cooked_write (regs, regno++, valbuf);
8160 len -= INT_REGISTER_SIZE;
8161 valbuf += INT_REGISTER_SIZE;
8162 }
8163 }
8164 }
8165 else
8166 {
8167 /* For a structure or union the behaviour is as if the value had
8168 been stored to word-aligned memory and then loaded into
8169 registers with 32-bit load instruction(s). */
8170 int len = TYPE_LENGTH (type);
8171 int regno = ARM_A1_REGNUM;
8172 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8173
8174 while (len > 0)
8175 {
8176 memcpy (tmpbuf, valbuf,
8177 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8178 regcache_cooked_write (regs, regno++, tmpbuf);
8179 len -= INT_REGISTER_SIZE;
8180 valbuf += INT_REGISTER_SIZE;
8181 }
8182 }
8183 }
8184
8185
8186 /* Handle function return values. */
8187
8188 static enum return_value_convention
8189 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8190 struct type *valtype, struct regcache *regcache,
8191 gdb_byte *readbuf, const gdb_byte *writebuf)
8192 {
8193 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8194 struct type *func_type = function ? value_type (function) : NULL;
8195 enum arm_vfp_cprc_base_type vfp_base_type;
8196 int vfp_base_count;
8197
8198 if (arm_vfp_abi_for_function (gdbarch, func_type)
8199 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8200 {
8201 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8202 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8203 int i;
8204 for (i = 0; i < vfp_base_count; i++)
8205 {
8206 if (reg_char == 'q')
8207 {
8208 if (writebuf)
8209 arm_neon_quad_write (gdbarch, regcache, i,
8210 writebuf + i * unit_length);
8211
8212 if (readbuf)
8213 arm_neon_quad_read (gdbarch, regcache, i,
8214 readbuf + i * unit_length);
8215 }
8216 else
8217 {
8218 char name_buf[4];
8219 int regnum;
8220
8221 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8222 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8223 strlen (name_buf));
8224 if (writebuf)
8225 regcache_cooked_write (regcache, regnum,
8226 writebuf + i * unit_length);
8227 if (readbuf)
8228 regcache_cooked_read (regcache, regnum,
8229 readbuf + i * unit_length);
8230 }
8231 }
8232 return RETURN_VALUE_REGISTER_CONVENTION;
8233 }
8234
8235 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8236 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8237 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8238 {
8239 if (tdep->struct_return == pcc_struct_return
8240 || arm_return_in_memory (gdbarch, valtype))
8241 return RETURN_VALUE_STRUCT_CONVENTION;
8242 }
8243 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8244 {
8245 if (arm_return_in_memory (gdbarch, valtype))
8246 return RETURN_VALUE_STRUCT_CONVENTION;
8247 }
8248
8249 if (writebuf)
8250 arm_store_return_value (valtype, regcache, writebuf);
8251
8252 if (readbuf)
8253 arm_extract_return_value (valtype, regcache, readbuf);
8254
8255 return RETURN_VALUE_REGISTER_CONVENTION;
8256 }
8257
8258
8259 static int
8260 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8261 {
8262 struct gdbarch *gdbarch = get_frame_arch (frame);
8263 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8264 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8265 CORE_ADDR jb_addr;
8266 gdb_byte buf[INT_REGISTER_SIZE];
8267
8268 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8269
8270 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8271 INT_REGISTER_SIZE))
8272 return 0;
8273
8274 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8275 return 1;
8276 }
8277
8278 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8279 return the target PC. Otherwise return 0. */
8280
8281 CORE_ADDR
8282 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8283 {
8284 const char *name;
8285 int namelen;
8286 CORE_ADDR start_addr;
8287
8288 /* Find the starting address and name of the function containing the PC. */
8289 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8290 {
8291 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8292 check here. */
8293 start_addr = arm_skip_bx_reg (frame, pc);
8294 if (start_addr != 0)
8295 return start_addr;
8296
8297 return 0;
8298 }
8299
8300 /* If PC is in a Thumb call or return stub, return the address of the
8301 target PC, which is in a register. The thunk functions are called
8302 _call_via_xx, where x is the register name. The possible names
8303 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8304 functions, named __ARM_call_via_r[0-7]. */
8305 if (startswith (name, "_call_via_")
8306 || startswith (name, "__ARM_call_via_"))
8307 {
8308 /* Use the name suffix to determine which register contains the
8309 target PC. */
8310 static const char *table[15] =
8311 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8312 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8313 };
8314 int regno;
8315 int offset = strlen (name) - 2;
8316
8317 for (regno = 0; regno <= 14; regno++)
8318 if (strcmp (&name[offset], table[regno]) == 0)
8319 return get_frame_register_unsigned (frame, regno);
8320 }
8321
8322 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8323 non-interworking calls to foo. We could decode the stubs
8324 to find the target but it's easier to use the symbol table. */
8325 namelen = strlen (name);
8326 if (name[0] == '_' && name[1] == '_'
8327 && ((namelen > 2 + strlen ("_from_thumb")
8328 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8329 || (namelen > 2 + strlen ("_from_arm")
8330 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8331 {
8332 char *target_name;
8333 int target_len = namelen - 2;
8334 struct bound_minimal_symbol minsym;
8335 struct objfile *objfile;
8336 struct obj_section *sec;
8337
8338 if (name[namelen - 1] == 'b')
8339 target_len -= strlen ("_from_thumb");
8340 else
8341 target_len -= strlen ("_from_arm");
8342
8343 target_name = (char *) alloca (target_len + 1);
8344 memcpy (target_name, name + 2, target_len);
8345 target_name[target_len] = '\0';
8346
8347 sec = find_pc_section (pc);
8348 objfile = (sec == NULL) ? NULL : sec->objfile;
8349 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8350 if (minsym.minsym != NULL)
8351 return BMSYMBOL_VALUE_ADDRESS (minsym);
8352 else
8353 return 0;
8354 }
8355
8356 return 0; /* not a stub */
8357 }
8358
8359 static void
8360 set_arm_command (const char *args, int from_tty)
8361 {
8362 printf_unfiltered (_("\
8363 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8364 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8365 }
8366
8367 static void
8368 show_arm_command (const char *args, int from_tty)
8369 {
8370 cmd_show_list (showarmcmdlist, from_tty, "");
8371 }
8372
8373 static void
8374 arm_update_current_architecture (void)
8375 {
8376 struct gdbarch_info info;
8377
8378 /* If the current architecture is not ARM, we have nothing to do. */
8379 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8380 return;
8381
8382 /* Update the architecture. */
8383 gdbarch_info_init (&info);
8384
8385 if (!gdbarch_update_p (info))
8386 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8387 }
8388
8389 static void
8390 set_fp_model_sfunc (const char *args, int from_tty,
8391 struct cmd_list_element *c)
8392 {
8393 int fp_model;
8394
8395 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8396 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8397 {
8398 arm_fp_model = (enum arm_float_model) fp_model;
8399 break;
8400 }
8401
8402 if (fp_model == ARM_FLOAT_LAST)
8403 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8404 current_fp_model);
8405
8406 arm_update_current_architecture ();
8407 }
8408
8409 static void
8410 show_fp_model (struct ui_file *file, int from_tty,
8411 struct cmd_list_element *c, const char *value)
8412 {
8413 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8414
8415 if (arm_fp_model == ARM_FLOAT_AUTO
8416 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8417 fprintf_filtered (file, _("\
8418 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8419 fp_model_strings[tdep->fp_model]);
8420 else
8421 fprintf_filtered (file, _("\
8422 The current ARM floating point model is \"%s\".\n"),
8423 fp_model_strings[arm_fp_model]);
8424 }
8425
8426 static void
8427 arm_set_abi (const char *args, int from_tty,
8428 struct cmd_list_element *c)
8429 {
8430 int arm_abi;
8431
8432 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8433 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8434 {
8435 arm_abi_global = (enum arm_abi_kind) arm_abi;
8436 break;
8437 }
8438
8439 if (arm_abi == ARM_ABI_LAST)
8440 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8441 arm_abi_string);
8442
8443 arm_update_current_architecture ();
8444 }
8445
8446 static void
8447 arm_show_abi (struct ui_file *file, int from_tty,
8448 struct cmd_list_element *c, const char *value)
8449 {
8450 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8451
8452 if (arm_abi_global == ARM_ABI_AUTO
8453 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8454 fprintf_filtered (file, _("\
8455 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8456 arm_abi_strings[tdep->arm_abi]);
8457 else
8458 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8459 arm_abi_string);
8460 }
8461
8462 static void
8463 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8464 struct cmd_list_element *c, const char *value)
8465 {
8466 fprintf_filtered (file,
8467 _("The current execution mode assumed "
8468 "(when symbols are unavailable) is \"%s\".\n"),
8469 arm_fallback_mode_string);
8470 }
8471
8472 static void
8473 arm_show_force_mode (struct ui_file *file, int from_tty,
8474 struct cmd_list_element *c, const char *value)
8475 {
8476 fprintf_filtered (file,
8477 _("The current execution mode assumed "
8478 "(even when symbols are available) is \"%s\".\n"),
8479 arm_force_mode_string);
8480 }
8481
8482 /* If the user changes the register disassembly style used for info
8483 register and other commands, we have to also switch the style used
8484 in opcodes for disassembly output. This function is run in the "set
8485 arm disassembly" command, and does that. */
8486
8487 static void
8488 set_disassembly_style_sfunc (const char *args, int from_tty,
8489 struct cmd_list_element *c)
8490 {
8491 /* Convert the short style name into the long style name (eg, reg-names-*)
8492 before calling the generic set_disassembler_options() function. */
8493 std::string long_name = std::string ("reg-names-") + disassembly_style;
8494 set_disassembler_options (&long_name[0]);
8495 }
8496
8497 static void
8498 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8499 struct cmd_list_element *c, const char *value)
8500 {
8501 struct gdbarch *gdbarch = get_current_arch ();
8502 char *options = get_disassembler_options (gdbarch);
8503 const char *style = "";
8504 int len = 0;
8505 const char *opt;
8506
8507 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8508 if (CONST_STRNEQ (opt, "reg-names-"))
8509 {
8510 style = &opt[strlen ("reg-names-")];
8511 len = strcspn (style, ",");
8512 }
8513
8514 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8515 }
8516 \f
8517 /* Return the ARM register name corresponding to register I. */
8518 static const char *
8519 arm_register_name (struct gdbarch *gdbarch, int i)
8520 {
8521 const int num_regs = gdbarch_num_regs (gdbarch);
8522
8523 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8524 && i >= num_regs && i < num_regs + 32)
8525 {
8526 static const char *const vfp_pseudo_names[] = {
8527 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8528 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8529 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8530 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8531 };
8532
8533 return vfp_pseudo_names[i - num_regs];
8534 }
8535
8536 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8537 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8538 {
8539 static const char *const neon_pseudo_names[] = {
8540 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8541 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8542 };
8543
8544 return neon_pseudo_names[i - num_regs - 32];
8545 }
8546
8547 if (i >= ARRAY_SIZE (arm_register_names))
8548 /* These registers are only supported on targets which supply
8549 an XML description. */
8550 return "";
8551
8552 return arm_register_names[i];
8553 }
8554
8555 /* Test whether the coff symbol specific value corresponds to a Thumb
8556 function. */
8557
8558 static int
8559 coff_sym_is_thumb (int val)
8560 {
8561 return (val == C_THUMBEXT
8562 || val == C_THUMBSTAT
8563 || val == C_THUMBEXTFUNC
8564 || val == C_THUMBSTATFUNC
8565 || val == C_THUMBLABEL);
8566 }
8567
8568 /* arm_coff_make_msymbol_special()
8569 arm_elf_make_msymbol_special()
8570
8571 These functions test whether the COFF or ELF symbol corresponds to
8572 an address in thumb code, and set a "special" bit in a minimal
8573 symbol to indicate that it does. */
8574
8575 static void
8576 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8577 {
8578 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8579
8580 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8581 == ST_BRANCH_TO_THUMB)
8582 MSYMBOL_SET_SPECIAL (msym);
8583 }
8584
8585 static void
8586 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8587 {
8588 if (coff_sym_is_thumb (val))
8589 MSYMBOL_SET_SPECIAL (msym);
8590 }
8591
8592 static void
8593 arm_objfile_data_free (struct objfile *objfile, void *arg)
8594 {
8595 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8596 unsigned int i;
8597
8598 for (i = 0; i < objfile->obfd->section_count; i++)
8599 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8600 }
8601
8602 static void
8603 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8604 asymbol *sym)
8605 {
8606 const char *name = bfd_asymbol_name (sym);
8607 struct arm_per_objfile *data;
8608 VEC(arm_mapping_symbol_s) **map_p;
8609 struct arm_mapping_symbol new_map_sym;
8610
8611 gdb_assert (name[0] == '$');
8612 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8613 return;
8614
8615 data = (struct arm_per_objfile *) objfile_data (objfile,
8616 arm_objfile_data_key);
8617 if (data == NULL)
8618 {
8619 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8620 struct arm_per_objfile);
8621 set_objfile_data (objfile, arm_objfile_data_key, data);
8622 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8623 objfile->obfd->section_count,
8624 VEC(arm_mapping_symbol_s) *);
8625 }
8626 map_p = &data->section_maps[bfd_get_section (sym)->index];
8627
8628 new_map_sym.value = sym->value;
8629 new_map_sym.type = name[1];
8630
8631 /* Assume that most mapping symbols appear in order of increasing
8632 value. If they were randomly distributed, it would be faster to
8633 always push here and then sort at first use. */
8634 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8635 {
8636 struct arm_mapping_symbol *prev_map_sym;
8637
8638 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8639 if (prev_map_sym->value >= sym->value)
8640 {
8641 unsigned int idx;
8642 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8643 arm_compare_mapping_symbols);
8644 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8645 return;
8646 }
8647 }
8648
8649 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8650 }
8651
8652 static void
8653 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8654 {
8655 struct gdbarch *gdbarch = regcache->arch ();
8656 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8657
8658 /* If necessary, set the T bit. */
8659 if (arm_apcs_32)
8660 {
8661 ULONGEST val, t_bit;
8662 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8663 t_bit = arm_psr_thumb_bit (gdbarch);
8664 if (arm_pc_is_thumb (gdbarch, pc))
8665 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8666 val | t_bit);
8667 else
8668 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8669 val & ~t_bit);
8670 }
8671 }
8672
8673 /* Read the contents of a NEON quad register, by reading from two
8674 double registers. This is used to implement the quad pseudo
8675 registers, and for argument passing in case the quad registers are
8676 missing; vectors are passed in quad registers when using the VFP
8677 ABI, even if a NEON unit is not present. REGNUM is the index of
8678 the quad register, in [0, 15]. */
8679
8680 static enum register_status
8681 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8682 int regnum, gdb_byte *buf)
8683 {
8684 char name_buf[4];
8685 gdb_byte reg_buf[8];
8686 int offset, double_regnum;
8687 enum register_status status;
8688
8689 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8690 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8691 strlen (name_buf));
8692
8693 /* d0 is always the least significant half of q0. */
8694 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8695 offset = 8;
8696 else
8697 offset = 0;
8698
8699 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8700 if (status != REG_VALID)
8701 return status;
8702 memcpy (buf + offset, reg_buf, 8);
8703
8704 offset = 8 - offset;
8705 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8706 if (status != REG_VALID)
8707 return status;
8708 memcpy (buf + offset, reg_buf, 8);
8709
8710 return REG_VALID;
8711 }
8712
8713 static enum register_status
8714 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8715 int regnum, gdb_byte *buf)
8716 {
8717 const int num_regs = gdbarch_num_regs (gdbarch);
8718 char name_buf[4];
8719 gdb_byte reg_buf[8];
8720 int offset, double_regnum;
8721
8722 gdb_assert (regnum >= num_regs);
8723 regnum -= num_regs;
8724
8725 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8726 /* Quad-precision register. */
8727 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8728 else
8729 {
8730 enum register_status status;
8731
8732 /* Single-precision register. */
8733 gdb_assert (regnum < 32);
8734
8735 /* s0 is always the least significant half of d0. */
8736 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8737 offset = (regnum & 1) ? 0 : 4;
8738 else
8739 offset = (regnum & 1) ? 4 : 0;
8740
8741 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8742 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8743 strlen (name_buf));
8744
8745 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8746 if (status == REG_VALID)
8747 memcpy (buf, reg_buf + offset, 4);
8748 return status;
8749 }
8750 }
8751
8752 /* Store the contents of BUF to a NEON quad register, by writing to
8753 two double registers. This is used to implement the quad pseudo
8754 registers, and for argument passing in case the quad registers are
8755 missing; vectors are passed in quad registers when using the VFP
8756 ABI, even if a NEON unit is not present. REGNUM is the index
8757 of the quad register, in [0, 15]. */
8758
8759 static void
8760 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8761 int regnum, const gdb_byte *buf)
8762 {
8763 char name_buf[4];
8764 int offset, double_regnum;
8765
8766 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8767 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8768 strlen (name_buf));
8769
8770 /* d0 is always the least significant half of q0. */
8771 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8772 offset = 8;
8773 else
8774 offset = 0;
8775
8776 regcache_raw_write (regcache, double_regnum, buf + offset);
8777 offset = 8 - offset;
8778 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8779 }
8780
8781 static void
8782 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8783 int regnum, const gdb_byte *buf)
8784 {
8785 const int num_regs = gdbarch_num_regs (gdbarch);
8786 char name_buf[4];
8787 gdb_byte reg_buf[8];
8788 int offset, double_regnum;
8789
8790 gdb_assert (regnum >= num_regs);
8791 regnum -= num_regs;
8792
8793 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8794 /* Quad-precision register. */
8795 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8796 else
8797 {
8798 /* Single-precision register. */
8799 gdb_assert (regnum < 32);
8800
8801 /* s0 is always the least significant half of d0. */
8802 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8803 offset = (regnum & 1) ? 0 : 4;
8804 else
8805 offset = (regnum & 1) ? 4 : 0;
8806
8807 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8808 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8809 strlen (name_buf));
8810
8811 regcache_raw_read (regcache, double_regnum, reg_buf);
8812 memcpy (reg_buf + offset, buf, 4);
8813 regcache_raw_write (regcache, double_regnum, reg_buf);
8814 }
8815 }
8816
8817 static struct value *
8818 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8819 {
8820 const int *reg_p = (const int *) baton;
8821 return value_of_register (*reg_p, frame);
8822 }
8823 \f
8824 static enum gdb_osabi
8825 arm_elf_osabi_sniffer (bfd *abfd)
8826 {
8827 unsigned int elfosabi;
8828 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8829
8830 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8831
8832 if (elfosabi == ELFOSABI_ARM)
8833 /* GNU tools use this value. Check note sections in this case,
8834 as well. */
8835 bfd_map_over_sections (abfd,
8836 generic_elf_osabi_sniff_abi_tag_sections,
8837 &osabi);
8838
8839 /* Anything else will be handled by the generic ELF sniffer. */
8840 return osabi;
8841 }
8842
8843 static int
8844 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8845 struct reggroup *group)
8846 {
8847 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8848 this, FPS register belongs to save_regroup, restore_reggroup, and
8849 all_reggroup, of course. */
8850 if (regnum == ARM_FPS_REGNUM)
8851 return (group == float_reggroup
8852 || group == save_reggroup
8853 || group == restore_reggroup
8854 || group == all_reggroup);
8855 else
8856 return default_register_reggroup_p (gdbarch, regnum, group);
8857 }
8858
8859 \f
8860 /* For backward-compatibility we allow two 'g' packet lengths with
8861 the remote protocol depending on whether FPA registers are
8862 supplied. M-profile targets do not have FPA registers, but some
8863 stubs already exist in the wild which use a 'g' packet which
8864 supplies them albeit with dummy values. The packet format which
8865 includes FPA registers should be considered deprecated for
8866 M-profile targets. */
8867
8868 static void
8869 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8870 {
8871 if (gdbarch_tdep (gdbarch)->is_m)
8872 {
8873 /* If we know from the executable this is an M-profile target,
8874 cater for remote targets whose register set layout is the
8875 same as the FPA layout. */
8876 register_remote_g_packet_guess (gdbarch,
8877 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8878 (16 * INT_REGISTER_SIZE)
8879 + (8 * FP_REGISTER_SIZE)
8880 + (2 * INT_REGISTER_SIZE),
8881 tdesc_arm_with_m_fpa_layout);
8882
8883 /* The regular M-profile layout. */
8884 register_remote_g_packet_guess (gdbarch,
8885 /* r0-r12,sp,lr,pc; xpsr */
8886 (16 * INT_REGISTER_SIZE)
8887 + INT_REGISTER_SIZE,
8888 tdesc_arm_with_m);
8889
8890 /* M-profile plus M4F VFP. */
8891 register_remote_g_packet_guess (gdbarch,
8892 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8893 (16 * INT_REGISTER_SIZE)
8894 + (16 * VFP_REGISTER_SIZE)
8895 + (2 * INT_REGISTER_SIZE),
8896 tdesc_arm_with_m_vfp_d16);
8897 }
8898
8899 /* Otherwise we don't have a useful guess. */
8900 }
8901
8902 /* Implement the code_of_frame_writable gdbarch method. */
8903
8904 static int
8905 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8906 {
8907 if (gdbarch_tdep (gdbarch)->is_m
8908 && get_frame_type (frame) == SIGTRAMP_FRAME)
8909 {
8910 /* M-profile exception frames return to some magic PCs, where
8911 isn't writable at all. */
8912 return 0;
8913 }
8914 else
8915 return 1;
8916 }
8917
8918 \f
8919 /* Initialize the current architecture based on INFO. If possible,
8920 re-use an architecture from ARCHES, which is a list of
8921 architectures already created during this debugging session.
8922
8923 Called e.g. at program startup, when reading a core file, and when
8924 reading a binary file. */
8925
8926 static struct gdbarch *
8927 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8928 {
8929 struct gdbarch_tdep *tdep;
8930 struct gdbarch *gdbarch;
8931 struct gdbarch_list *best_arch;
8932 enum arm_abi_kind arm_abi = arm_abi_global;
8933 enum arm_float_model fp_model = arm_fp_model;
8934 struct tdesc_arch_data *tdesc_data = NULL;
8935 int i, is_m = 0;
8936 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8937 int have_wmmx_registers = 0;
8938 int have_neon = 0;
8939 int have_fpa_registers = 1;
8940 const struct target_desc *tdesc = info.target_desc;
8941
8942 /* If we have an object to base this architecture on, try to determine
8943 its ABI. */
8944
8945 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8946 {
8947 int ei_osabi, e_flags;
8948
8949 switch (bfd_get_flavour (info.abfd))
8950 {
8951 case bfd_target_coff_flavour:
8952 /* Assume it's an old APCS-style ABI. */
8953 /* XXX WinCE? */
8954 arm_abi = ARM_ABI_APCS;
8955 break;
8956
8957 case bfd_target_elf_flavour:
8958 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8959 e_flags = elf_elfheader (info.abfd)->e_flags;
8960
8961 if (ei_osabi == ELFOSABI_ARM)
8962 {
8963 /* GNU tools used to use this value, but do not for EABI
8964 objects. There's nowhere to tag an EABI version
8965 anyway, so assume APCS. */
8966 arm_abi = ARM_ABI_APCS;
8967 }
8968 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8969 {
8970 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8971 int attr_arch, attr_profile;
8972
8973 switch (eabi_ver)
8974 {
8975 case EF_ARM_EABI_UNKNOWN:
8976 /* Assume GNU tools. */
8977 arm_abi = ARM_ABI_APCS;
8978 break;
8979
8980 case EF_ARM_EABI_VER4:
8981 case EF_ARM_EABI_VER5:
8982 arm_abi = ARM_ABI_AAPCS;
8983 /* EABI binaries default to VFP float ordering.
8984 They may also contain build attributes that can
8985 be used to identify if the VFP argument-passing
8986 ABI is in use. */
8987 if (fp_model == ARM_FLOAT_AUTO)
8988 {
8989 #ifdef HAVE_ELF
8990 switch (bfd_elf_get_obj_attr_int (info.abfd,
8991 OBJ_ATTR_PROC,
8992 Tag_ABI_VFP_args))
8993 {
8994 case AEABI_VFP_args_base:
8995 /* "The user intended FP parameter/result
8996 passing to conform to AAPCS, base
8997 variant". */
8998 fp_model = ARM_FLOAT_SOFT_VFP;
8999 break;
9000 case AEABI_VFP_args_vfp:
9001 /* "The user intended FP parameter/result
9002 passing to conform to AAPCS, VFP
9003 variant". */
9004 fp_model = ARM_FLOAT_VFP;
9005 break;
9006 case AEABI_VFP_args_toolchain:
9007 /* "The user intended FP parameter/result
9008 passing to conform to tool chain-specific
9009 conventions" - we don't know any such
9010 conventions, so leave it as "auto". */
9011 break;
9012 case AEABI_VFP_args_compatible:
9013 /* "Code is compatible with both the base
9014 and VFP variants; the user did not permit
9015 non-variadic functions to pass FP
9016 parameters/results" - leave it as
9017 "auto". */
9018 break;
9019 default:
9020 /* Attribute value not mentioned in the
9021 November 2012 ABI, so leave it as
9022 "auto". */
9023 break;
9024 }
9025 #else
9026 fp_model = ARM_FLOAT_SOFT_VFP;
9027 #endif
9028 }
9029 break;
9030
9031 default:
9032 /* Leave it as "auto". */
9033 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9034 break;
9035 }
9036
9037 #ifdef HAVE_ELF
9038 /* Detect M-profile programs. This only works if the
9039 executable file includes build attributes; GCC does
9040 copy them to the executable, but e.g. RealView does
9041 not. */
9042 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9043 Tag_CPU_arch);
9044 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9045 OBJ_ATTR_PROC,
9046 Tag_CPU_arch_profile);
9047 /* GCC specifies the profile for v6-M; RealView only
9048 specifies the profile for architectures starting with
9049 V7 (as opposed to architectures with a tag
9050 numerically greater than TAG_CPU_ARCH_V7). */
9051 if (!tdesc_has_registers (tdesc)
9052 && (attr_arch == TAG_CPU_ARCH_V6_M
9053 || attr_arch == TAG_CPU_ARCH_V6S_M
9054 || attr_profile == 'M'))
9055 is_m = 1;
9056 #endif
9057 }
9058
9059 if (fp_model == ARM_FLOAT_AUTO)
9060 {
9061 int e_flags = elf_elfheader (info.abfd)->e_flags;
9062
9063 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9064 {
9065 case 0:
9066 /* Leave it as "auto". Strictly speaking this case
9067 means FPA, but almost nobody uses that now, and
9068 many toolchains fail to set the appropriate bits
9069 for the floating-point model they use. */
9070 break;
9071 case EF_ARM_SOFT_FLOAT:
9072 fp_model = ARM_FLOAT_SOFT_FPA;
9073 break;
9074 case EF_ARM_VFP_FLOAT:
9075 fp_model = ARM_FLOAT_VFP;
9076 break;
9077 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9078 fp_model = ARM_FLOAT_SOFT_VFP;
9079 break;
9080 }
9081 }
9082
9083 if (e_flags & EF_ARM_BE8)
9084 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9085
9086 break;
9087
9088 default:
9089 /* Leave it as "auto". */
9090 break;
9091 }
9092 }
9093
9094 /* Check any target description for validity. */
9095 if (tdesc_has_registers (tdesc))
9096 {
9097 /* For most registers we require GDB's default names; but also allow
9098 the numeric names for sp / lr / pc, as a convenience. */
9099 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9100 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9101 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9102
9103 const struct tdesc_feature *feature;
9104 int valid_p;
9105
9106 feature = tdesc_find_feature (tdesc,
9107 "org.gnu.gdb.arm.core");
9108 if (feature == NULL)
9109 {
9110 feature = tdesc_find_feature (tdesc,
9111 "org.gnu.gdb.arm.m-profile");
9112 if (feature == NULL)
9113 return NULL;
9114 else
9115 is_m = 1;
9116 }
9117
9118 tdesc_data = tdesc_data_alloc ();
9119
9120 valid_p = 1;
9121 for (i = 0; i < ARM_SP_REGNUM; i++)
9122 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9123 arm_register_names[i]);
9124 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9125 ARM_SP_REGNUM,
9126 arm_sp_names);
9127 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9128 ARM_LR_REGNUM,
9129 arm_lr_names);
9130 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9131 ARM_PC_REGNUM,
9132 arm_pc_names);
9133 if (is_m)
9134 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9135 ARM_PS_REGNUM, "xpsr");
9136 else
9137 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9138 ARM_PS_REGNUM, "cpsr");
9139
9140 if (!valid_p)
9141 {
9142 tdesc_data_cleanup (tdesc_data);
9143 return NULL;
9144 }
9145
9146 feature = tdesc_find_feature (tdesc,
9147 "org.gnu.gdb.arm.fpa");
9148 if (feature != NULL)
9149 {
9150 valid_p = 1;
9151 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9152 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9153 arm_register_names[i]);
9154 if (!valid_p)
9155 {
9156 tdesc_data_cleanup (tdesc_data);
9157 return NULL;
9158 }
9159 }
9160 else
9161 have_fpa_registers = 0;
9162
9163 feature = tdesc_find_feature (tdesc,
9164 "org.gnu.gdb.xscale.iwmmxt");
9165 if (feature != NULL)
9166 {
9167 static const char *const iwmmxt_names[] = {
9168 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9169 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9170 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9171 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9172 };
9173
9174 valid_p = 1;
9175 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9176 valid_p
9177 &= tdesc_numbered_register (feature, tdesc_data, i,
9178 iwmmxt_names[i - ARM_WR0_REGNUM]);
9179
9180 /* Check for the control registers, but do not fail if they
9181 are missing. */
9182 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9183 tdesc_numbered_register (feature, tdesc_data, i,
9184 iwmmxt_names[i - ARM_WR0_REGNUM]);
9185
9186 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9187 valid_p
9188 &= tdesc_numbered_register (feature, tdesc_data, i,
9189 iwmmxt_names[i - ARM_WR0_REGNUM]);
9190
9191 if (!valid_p)
9192 {
9193 tdesc_data_cleanup (tdesc_data);
9194 return NULL;
9195 }
9196
9197 have_wmmx_registers = 1;
9198 }
9199
9200 /* If we have a VFP unit, check whether the single precision registers
9201 are present. If not, then we will synthesize them as pseudo
9202 registers. */
9203 feature = tdesc_find_feature (tdesc,
9204 "org.gnu.gdb.arm.vfp");
9205 if (feature != NULL)
9206 {
9207 static const char *const vfp_double_names[] = {
9208 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9209 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9210 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9211 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9212 };
9213
9214 /* Require the double precision registers. There must be either
9215 16 or 32. */
9216 valid_p = 1;
9217 for (i = 0; i < 32; i++)
9218 {
9219 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9220 ARM_D0_REGNUM + i,
9221 vfp_double_names[i]);
9222 if (!valid_p)
9223 break;
9224 }
9225 if (!valid_p && i == 16)
9226 valid_p = 1;
9227
9228 /* Also require FPSCR. */
9229 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9230 ARM_FPSCR_REGNUM, "fpscr");
9231 if (!valid_p)
9232 {
9233 tdesc_data_cleanup (tdesc_data);
9234 return NULL;
9235 }
9236
9237 if (tdesc_unnumbered_register (feature, "s0") == 0)
9238 have_vfp_pseudos = 1;
9239
9240 vfp_register_count = i;
9241
9242 /* If we have VFP, also check for NEON. The architecture allows
9243 NEON without VFP (integer vector operations only), but GDB
9244 does not support that. */
9245 feature = tdesc_find_feature (tdesc,
9246 "org.gnu.gdb.arm.neon");
9247 if (feature != NULL)
9248 {
9249 /* NEON requires 32 double-precision registers. */
9250 if (i != 32)
9251 {
9252 tdesc_data_cleanup (tdesc_data);
9253 return NULL;
9254 }
9255
9256 /* If there are quad registers defined by the stub, use
9257 their type; otherwise (normally) provide them with
9258 the default type. */
9259 if (tdesc_unnumbered_register (feature, "q0") == 0)
9260 have_neon_pseudos = 1;
9261
9262 have_neon = 1;
9263 }
9264 }
9265 }
9266
9267 /* If there is already a candidate, use it. */
9268 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9269 best_arch != NULL;
9270 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9271 {
9272 if (arm_abi != ARM_ABI_AUTO
9273 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9274 continue;
9275
9276 if (fp_model != ARM_FLOAT_AUTO
9277 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9278 continue;
9279
9280 /* There are various other properties in tdep that we do not
9281 need to check here: those derived from a target description,
9282 since gdbarches with a different target description are
9283 automatically disqualified. */
9284
9285 /* Do check is_m, though, since it might come from the binary. */
9286 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9287 continue;
9288
9289 /* Found a match. */
9290 break;
9291 }
9292
9293 if (best_arch != NULL)
9294 {
9295 if (tdesc_data != NULL)
9296 tdesc_data_cleanup (tdesc_data);
9297 return best_arch->gdbarch;
9298 }
9299
9300 tdep = XCNEW (struct gdbarch_tdep);
9301 gdbarch = gdbarch_alloc (&info, tdep);
9302
9303 /* Record additional information about the architecture we are defining.
9304 These are gdbarch discriminators, like the OSABI. */
9305 tdep->arm_abi = arm_abi;
9306 tdep->fp_model = fp_model;
9307 tdep->is_m = is_m;
9308 tdep->have_fpa_registers = have_fpa_registers;
9309 tdep->have_wmmx_registers = have_wmmx_registers;
9310 gdb_assert (vfp_register_count == 0
9311 || vfp_register_count == 16
9312 || vfp_register_count == 32);
9313 tdep->vfp_register_count = vfp_register_count;
9314 tdep->have_vfp_pseudos = have_vfp_pseudos;
9315 tdep->have_neon_pseudos = have_neon_pseudos;
9316 tdep->have_neon = have_neon;
9317
9318 arm_register_g_packet_guesses (gdbarch);
9319
9320 /* Breakpoints. */
9321 switch (info.byte_order_for_code)
9322 {
9323 case BFD_ENDIAN_BIG:
9324 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9325 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9326 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9327 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9328
9329 break;
9330
9331 case BFD_ENDIAN_LITTLE:
9332 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9333 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9334 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9335 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9336
9337 break;
9338
9339 default:
9340 internal_error (__FILE__, __LINE__,
9341 _("arm_gdbarch_init: bad byte order for float format"));
9342 }
9343
9344 /* On ARM targets char defaults to unsigned. */
9345 set_gdbarch_char_signed (gdbarch, 0);
9346
9347 /* wchar_t is unsigned under the AAPCS. */
9348 if (tdep->arm_abi == ARM_ABI_AAPCS)
9349 set_gdbarch_wchar_signed (gdbarch, 0);
9350 else
9351 set_gdbarch_wchar_signed (gdbarch, 1);
9352
9353 /* Note: for displaced stepping, this includes the breakpoint, and one word
9354 of additional scratch space. This setting isn't used for anything beside
9355 displaced stepping at present. */
9356 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9357
9358 /* This should be low enough for everything. */
9359 tdep->lowest_pc = 0x20;
9360 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9361
9362 /* The default, for both APCS and AAPCS, is to return small
9363 structures in registers. */
9364 tdep->struct_return = reg_struct_return;
9365
9366 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9367 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9368
9369 if (is_m)
9370 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9371
9372 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9373
9374 /* Frame handling. */
9375 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
9376 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9377 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9378
9379 frame_base_set_default (gdbarch, &arm_normal_base);
9380
9381 /* Address manipulation. */
9382 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9383
9384 /* Advance PC across function entry code. */
9385 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9386
9387 /* Detect whether PC is at a point where the stack has been destroyed. */
9388 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9389
9390 /* Skip trampolines. */
9391 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9392
9393 /* The stack grows downward. */
9394 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9395
9396 /* Breakpoint manipulation. */
9397 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9398 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9399 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9400 arm_breakpoint_kind_from_current_state);
9401
9402 /* Information about registers, etc. */
9403 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9404 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9405 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9406 set_gdbarch_register_type (gdbarch, arm_register_type);
9407 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9408
9409 /* This "info float" is FPA-specific. Use the generic version if we
9410 do not have FPA. */
9411 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9412 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9413
9414 /* Internal <-> external register number maps. */
9415 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9416 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9417
9418 set_gdbarch_register_name (gdbarch, arm_register_name);
9419
9420 /* Returning results. */
9421 set_gdbarch_return_value (gdbarch, arm_return_value);
9422
9423 /* Disassembly. */
9424 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9425
9426 /* Minsymbol frobbing. */
9427 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9428 set_gdbarch_coff_make_msymbol_special (gdbarch,
9429 arm_coff_make_msymbol_special);
9430 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9431
9432 /* Thumb-2 IT block support. */
9433 set_gdbarch_adjust_breakpoint_address (gdbarch,
9434 arm_adjust_breakpoint_address);
9435
9436 /* Virtual tables. */
9437 set_gdbarch_vbit_in_delta (gdbarch, 1);
9438
9439 /* Hook in the ABI-specific overrides, if they have been registered. */
9440 gdbarch_init_osabi (info, gdbarch);
9441
9442 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9443
9444 /* Add some default predicates. */
9445 if (is_m)
9446 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9447 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9448 dwarf2_append_unwinders (gdbarch);
9449 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9450 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9451 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9452
9453 /* Now we have tuned the configuration, set a few final things,
9454 based on what the OS ABI has told us. */
9455
9456 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9457 binaries are always marked. */
9458 if (tdep->arm_abi == ARM_ABI_AUTO)
9459 tdep->arm_abi = ARM_ABI_APCS;
9460
9461 /* Watchpoints are not steppable. */
9462 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9463
9464 /* We used to default to FPA for generic ARM, but almost nobody
9465 uses that now, and we now provide a way for the user to force
9466 the model. So default to the most useful variant. */
9467 if (tdep->fp_model == ARM_FLOAT_AUTO)
9468 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9469
9470 if (tdep->jb_pc >= 0)
9471 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9472
9473 /* Floating point sizes and format. */
9474 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9475 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9476 {
9477 set_gdbarch_double_format
9478 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9479 set_gdbarch_long_double_format
9480 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9481 }
9482 else
9483 {
9484 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9485 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9486 }
9487
9488 if (have_vfp_pseudos)
9489 {
9490 /* NOTE: These are the only pseudo registers used by
9491 the ARM target at the moment. If more are added, a
9492 little more care in numbering will be needed. */
9493
9494 int num_pseudos = 32;
9495 if (have_neon_pseudos)
9496 num_pseudos += 16;
9497 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9498 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9499 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9500 }
9501
9502 if (tdesc_data)
9503 {
9504 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9505
9506 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9507
9508 /* Override tdesc_register_type to adjust the types of VFP
9509 registers for NEON. */
9510 set_gdbarch_register_type (gdbarch, arm_register_type);
9511 }
9512
9513 /* Add standard register aliases. We add aliases even for those
9514 nanes which are used by the current architecture - it's simpler,
9515 and does no harm, since nothing ever lists user registers. */
9516 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9517 user_reg_add (gdbarch, arm_register_aliases[i].name,
9518 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9519
9520 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9521 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9522
9523 return gdbarch;
9524 }
9525
9526 static void
9527 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9528 {
9529 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9530
9531 if (tdep == NULL)
9532 return;
9533
9534 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9535 (unsigned long) tdep->lowest_pc);
9536 }
9537
9538 #if GDB_SELF_TEST
9539 namespace selftests
9540 {
9541 static void arm_record_test (void);
9542 }
9543 #endif
9544
9545 void
9546 _initialize_arm_tdep (void)
9547 {
9548 long length;
9549 const char *setname;
9550 const char *setdesc;
9551 int i, j;
9552 char regdesc[1024], *rdptr = regdesc;
9553 size_t rest = sizeof (regdesc);
9554
9555 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9556
9557 arm_objfile_data_key
9558 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9559
9560 /* Add ourselves to objfile event chain. */
9561 observer_attach_new_objfile (arm_exidx_new_objfile);
9562 arm_exidx_data_key
9563 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9564
9565 /* Register an ELF OS ABI sniffer for ARM binaries. */
9566 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9567 bfd_target_elf_flavour,
9568 arm_elf_osabi_sniffer);
9569
9570 /* Initialize the standard target descriptions. */
9571 initialize_tdesc_arm_with_m ();
9572 initialize_tdesc_arm_with_m_fpa_layout ();
9573 initialize_tdesc_arm_with_m_vfp_d16 ();
9574 initialize_tdesc_arm_with_iwmmxt ();
9575 initialize_tdesc_arm_with_vfpv2 ();
9576 initialize_tdesc_arm_with_vfpv3 ();
9577 initialize_tdesc_arm_with_neon ();
9578
9579 /* Add root prefix command for all "set arm"/"show arm" commands. */
9580 add_prefix_cmd ("arm", no_class, set_arm_command,
9581 _("Various ARM-specific commands."),
9582 &setarmcmdlist, "set arm ", 0, &setlist);
9583
9584 add_prefix_cmd ("arm", no_class, show_arm_command,
9585 _("Various ARM-specific commands."),
9586 &showarmcmdlist, "show arm ", 0, &showlist);
9587
9588
9589 arm_disassembler_options = xstrdup ("reg-names-std");
9590 const disasm_options_t *disasm_options = disassembler_options_arm ();
9591 int num_disassembly_styles = 0;
9592 for (i = 0; disasm_options->name[i] != NULL; i++)
9593 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9594 num_disassembly_styles++;
9595
9596 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9597 valid_disassembly_styles = XNEWVEC (const char *,
9598 num_disassembly_styles + 1);
9599 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9600 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9601 {
9602 size_t offset = strlen ("reg-names-");
9603 const char *style = disasm_options->name[i];
9604 valid_disassembly_styles[j++] = &style[offset];
9605 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9606 disasm_options->description[i]);
9607 rdptr += length;
9608 rest -= length;
9609 }
9610 /* Mark the end of valid options. */
9611 valid_disassembly_styles[num_disassembly_styles] = NULL;
9612
9613 /* Create the help text. */
9614 std::string helptext = string_printf ("%s%s%s",
9615 _("The valid values are:\n"),
9616 regdesc,
9617 _("The default is \"std\"."));
9618
9619 add_setshow_enum_cmd("disassembler", no_class,
9620 valid_disassembly_styles, &disassembly_style,
9621 _("Set the disassembly style."),
9622 _("Show the disassembly style."),
9623 helptext.c_str (),
9624 set_disassembly_style_sfunc,
9625 show_disassembly_style_sfunc,
9626 &setarmcmdlist, &showarmcmdlist);
9627
9628 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9629 _("Set usage of ARM 32-bit mode."),
9630 _("Show usage of ARM 32-bit mode."),
9631 _("When off, a 26-bit PC will be used."),
9632 NULL,
9633 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9634 mode is %s. */
9635 &setarmcmdlist, &showarmcmdlist);
9636
9637 /* Add a command to allow the user to force the FPU model. */
9638 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9639 _("Set the floating point type."),
9640 _("Show the floating point type."),
9641 _("auto - Determine the FP typefrom the OS-ABI.\n\
9642 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9643 fpa - FPA co-processor (GCC compiled).\n\
9644 softvfp - Software FP with pure-endian doubles.\n\
9645 vfp - VFP co-processor."),
9646 set_fp_model_sfunc, show_fp_model,
9647 &setarmcmdlist, &showarmcmdlist);
9648
9649 /* Add a command to allow the user to force the ABI. */
9650 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9651 _("Set the ABI."),
9652 _("Show the ABI."),
9653 NULL, arm_set_abi, arm_show_abi,
9654 &setarmcmdlist, &showarmcmdlist);
9655
9656 /* Add two commands to allow the user to force the assumed
9657 execution mode. */
9658 add_setshow_enum_cmd ("fallback-mode", class_support,
9659 arm_mode_strings, &arm_fallback_mode_string,
9660 _("Set the mode assumed when symbols are unavailable."),
9661 _("Show the mode assumed when symbols are unavailable."),
9662 NULL, NULL, arm_show_fallback_mode,
9663 &setarmcmdlist, &showarmcmdlist);
9664 add_setshow_enum_cmd ("force-mode", class_support,
9665 arm_mode_strings, &arm_force_mode_string,
9666 _("Set the mode assumed even when symbols are available."),
9667 _("Show the mode assumed even when symbols are available."),
9668 NULL, NULL, arm_show_force_mode,
9669 &setarmcmdlist, &showarmcmdlist);
9670
9671 /* Debugging flag. */
9672 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9673 _("Set ARM debugging."),
9674 _("Show ARM debugging."),
9675 _("When on, arm-specific debugging is enabled."),
9676 NULL,
9677 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9678 &setdebuglist, &showdebuglist);
9679
9680 #if GDB_SELF_TEST
9681 selftests::register_test ("arm-record", selftests::arm_record_test);
9682 #endif
9683
9684 }
9685
9686 /* ARM-reversible process record data structures. */
9687
9688 #define ARM_INSN_SIZE_BYTES 4
9689 #define THUMB_INSN_SIZE_BYTES 2
9690 #define THUMB2_INSN_SIZE_BYTES 4
9691
9692
9693 /* Position of the bit within a 32-bit ARM instruction
9694 that defines whether the instruction is a load or store. */
9695 #define INSN_S_L_BIT_NUM 20
9696
9697 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9698 do \
9699 { \
9700 unsigned int reg_len = LENGTH; \
9701 if (reg_len) \
9702 { \
9703 REGS = XNEWVEC (uint32_t, reg_len); \
9704 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9705 } \
9706 } \
9707 while (0)
9708
9709 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9710 do \
9711 { \
9712 unsigned int mem_len = LENGTH; \
9713 if (mem_len) \
9714 { \
9715 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9716 memcpy(&MEMS->len, &RECORD_BUF[0], \
9717 sizeof(struct arm_mem_r) * LENGTH); \
9718 } \
9719 } \
9720 while (0)
9721
9722 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9723 #define INSN_RECORDED(ARM_RECORD) \
9724 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9725
9726 /* ARM memory record structure. */
9727 struct arm_mem_r
9728 {
9729 uint32_t len; /* Record length. */
9730 uint32_t addr; /* Memory address. */
9731 };
9732
9733 /* ARM instruction record contains opcode of current insn
9734 and execution state (before entry to decode_insn()),
9735 contains list of to-be-modified registers and
9736 memory blocks (on return from decode_insn()). */
9737
9738 typedef struct insn_decode_record_t
9739 {
9740 struct gdbarch *gdbarch;
9741 struct regcache *regcache;
9742 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9743 uint32_t arm_insn; /* Should accommodate thumb. */
9744 uint32_t cond; /* Condition code. */
9745 uint32_t opcode; /* Insn opcode. */
9746 uint32_t decode; /* Insn decode bits. */
9747 uint32_t mem_rec_count; /* No of mem records. */
9748 uint32_t reg_rec_count; /* No of reg records. */
9749 uint32_t *arm_regs; /* Registers to be saved for this record. */
9750 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9751 } insn_decode_record;
9752
9753
9754 /* Checks ARM SBZ and SBO mandatory fields. */
9755
9756 static int
9757 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9758 {
9759 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9760
9761 if (!len)
9762 return 1;
9763
9764 if (!sbo)
9765 ones = ~ones;
9766
9767 while (ones)
9768 {
9769 if (!(ones & sbo))
9770 {
9771 return 0;
9772 }
9773 ones = ones >> 1;
9774 }
9775 return 1;
9776 }
9777
9778 enum arm_record_result
9779 {
9780 ARM_RECORD_SUCCESS = 0,
9781 ARM_RECORD_FAILURE = 1
9782 };
9783
9784 typedef enum
9785 {
9786 ARM_RECORD_STRH=1,
9787 ARM_RECORD_STRD
9788 } arm_record_strx_t;
9789
9790 typedef enum
9791 {
9792 ARM_RECORD=1,
9793 THUMB_RECORD,
9794 THUMB2_RECORD
9795 } record_type_t;
9796
9797
9798 static int
9799 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9800 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9801 {
9802
9803 struct regcache *reg_cache = arm_insn_r->regcache;
9804 ULONGEST u_regval[2]= {0};
9805
9806 uint32_t reg_src1 = 0, reg_src2 = 0;
9807 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9808
9809 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9810 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9811
9812 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9813 {
9814 /* 1) Handle misc store, immediate offset. */
9815 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9816 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9817 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9818 regcache_raw_read_unsigned (reg_cache, reg_src1,
9819 &u_regval[0]);
9820 if (ARM_PC_REGNUM == reg_src1)
9821 {
9822 /* If R15 was used as Rn, hence current PC+8. */
9823 u_regval[0] = u_regval[0] + 8;
9824 }
9825 offset_8 = (immed_high << 4) | immed_low;
9826 /* Calculate target store address. */
9827 if (14 == arm_insn_r->opcode)
9828 {
9829 tgt_mem_addr = u_regval[0] + offset_8;
9830 }
9831 else
9832 {
9833 tgt_mem_addr = u_regval[0] - offset_8;
9834 }
9835 if (ARM_RECORD_STRH == str_type)
9836 {
9837 record_buf_mem[0] = 2;
9838 record_buf_mem[1] = tgt_mem_addr;
9839 arm_insn_r->mem_rec_count = 1;
9840 }
9841 else if (ARM_RECORD_STRD == str_type)
9842 {
9843 record_buf_mem[0] = 4;
9844 record_buf_mem[1] = tgt_mem_addr;
9845 record_buf_mem[2] = 4;
9846 record_buf_mem[3] = tgt_mem_addr + 4;
9847 arm_insn_r->mem_rec_count = 2;
9848 }
9849 }
9850 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9851 {
9852 /* 2) Store, register offset. */
9853 /* Get Rm. */
9854 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9855 /* Get Rn. */
9856 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9857 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9858 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9859 if (15 == reg_src2)
9860 {
9861 /* If R15 was used as Rn, hence current PC+8. */
9862 u_regval[0] = u_regval[0] + 8;
9863 }
9864 /* Calculate target store address, Rn +/- Rm, register offset. */
9865 if (12 == arm_insn_r->opcode)
9866 {
9867 tgt_mem_addr = u_regval[0] + u_regval[1];
9868 }
9869 else
9870 {
9871 tgt_mem_addr = u_regval[1] - u_regval[0];
9872 }
9873 if (ARM_RECORD_STRH == str_type)
9874 {
9875 record_buf_mem[0] = 2;
9876 record_buf_mem[1] = tgt_mem_addr;
9877 arm_insn_r->mem_rec_count = 1;
9878 }
9879 else if (ARM_RECORD_STRD == str_type)
9880 {
9881 record_buf_mem[0] = 4;
9882 record_buf_mem[1] = tgt_mem_addr;
9883 record_buf_mem[2] = 4;
9884 record_buf_mem[3] = tgt_mem_addr + 4;
9885 arm_insn_r->mem_rec_count = 2;
9886 }
9887 }
9888 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9889 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9890 {
9891 /* 3) Store, immediate pre-indexed. */
9892 /* 5) Store, immediate post-indexed. */
9893 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9894 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9895 offset_8 = (immed_high << 4) | immed_low;
9896 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9897 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9898 /* Calculate target store address, Rn +/- Rm, register offset. */
9899 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9900 {
9901 tgt_mem_addr = u_regval[0] + offset_8;
9902 }
9903 else
9904 {
9905 tgt_mem_addr = u_regval[0] - offset_8;
9906 }
9907 if (ARM_RECORD_STRH == str_type)
9908 {
9909 record_buf_mem[0] = 2;
9910 record_buf_mem[1] = tgt_mem_addr;
9911 arm_insn_r->mem_rec_count = 1;
9912 }
9913 else if (ARM_RECORD_STRD == str_type)
9914 {
9915 record_buf_mem[0] = 4;
9916 record_buf_mem[1] = tgt_mem_addr;
9917 record_buf_mem[2] = 4;
9918 record_buf_mem[3] = tgt_mem_addr + 4;
9919 arm_insn_r->mem_rec_count = 2;
9920 }
9921 /* Record Rn also as it changes. */
9922 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9923 arm_insn_r->reg_rec_count = 1;
9924 }
9925 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9926 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9927 {
9928 /* 4) Store, register pre-indexed. */
9929 /* 6) Store, register post -indexed. */
9930 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9931 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9932 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9933 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9934 /* Calculate target store address, Rn +/- Rm, register offset. */
9935 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9936 {
9937 tgt_mem_addr = u_regval[0] + u_regval[1];
9938 }
9939 else
9940 {
9941 tgt_mem_addr = u_regval[1] - u_regval[0];
9942 }
9943 if (ARM_RECORD_STRH == str_type)
9944 {
9945 record_buf_mem[0] = 2;
9946 record_buf_mem[1] = tgt_mem_addr;
9947 arm_insn_r->mem_rec_count = 1;
9948 }
9949 else if (ARM_RECORD_STRD == str_type)
9950 {
9951 record_buf_mem[0] = 4;
9952 record_buf_mem[1] = tgt_mem_addr;
9953 record_buf_mem[2] = 4;
9954 record_buf_mem[3] = tgt_mem_addr + 4;
9955 arm_insn_r->mem_rec_count = 2;
9956 }
9957 /* Record Rn also as it changes. */
9958 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9959 arm_insn_r->reg_rec_count = 1;
9960 }
9961 return 0;
9962 }
9963
9964 /* Handling ARM extension space insns. */
9965
9966 static int
9967 arm_record_extension_space (insn_decode_record *arm_insn_r)
9968 {
9969 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
9970 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9971 uint32_t record_buf[8], record_buf_mem[8];
9972 uint32_t reg_src1 = 0;
9973 struct regcache *reg_cache = arm_insn_r->regcache;
9974 ULONGEST u_regval = 0;
9975
9976 gdb_assert (!INSN_RECORDED(arm_insn_r));
9977 /* Handle unconditional insn extension space. */
9978
9979 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9980 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9981 if (arm_insn_r->cond)
9982 {
9983 /* PLD has no affect on architectural state, it just affects
9984 the caches. */
9985 if (5 == ((opcode1 & 0xE0) >> 5))
9986 {
9987 /* BLX(1) */
9988 record_buf[0] = ARM_PS_REGNUM;
9989 record_buf[1] = ARM_LR_REGNUM;
9990 arm_insn_r->reg_rec_count = 2;
9991 }
9992 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9993 }
9994
9995
9996 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9997 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9998 {
9999 ret = -1;
10000 /* Undefined instruction on ARM V5; need to handle if later
10001 versions define it. */
10002 }
10003
10004 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10005 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10006 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10007
10008 /* Handle arithmetic insn extension space. */
10009 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10010 && !INSN_RECORDED(arm_insn_r))
10011 {
10012 /* Handle MLA(S) and MUL(S). */
10013 if (in_inclusive_range (insn_op1, 0U, 3U))
10014 {
10015 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10016 record_buf[1] = ARM_PS_REGNUM;
10017 arm_insn_r->reg_rec_count = 2;
10018 }
10019 else if (in_inclusive_range (insn_op1, 4U, 15U))
10020 {
10021 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10022 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10023 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10024 record_buf[2] = ARM_PS_REGNUM;
10025 arm_insn_r->reg_rec_count = 3;
10026 }
10027 }
10028
10029 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10030 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10031 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10032
10033 /* Handle control insn extension space. */
10034
10035 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10036 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10037 {
10038 if (!bit (arm_insn_r->arm_insn,25))
10039 {
10040 if (!bits (arm_insn_r->arm_insn, 4, 7))
10041 {
10042 if ((0 == insn_op1) || (2 == insn_op1))
10043 {
10044 /* MRS. */
10045 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10046 arm_insn_r->reg_rec_count = 1;
10047 }
10048 else if (1 == insn_op1)
10049 {
10050 /* CSPR is going to be changed. */
10051 record_buf[0] = ARM_PS_REGNUM;
10052 arm_insn_r->reg_rec_count = 1;
10053 }
10054 else if (3 == insn_op1)
10055 {
10056 /* SPSR is going to be changed. */
10057 /* We need to get SPSR value, which is yet to be done. */
10058 return -1;
10059 }
10060 }
10061 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10062 {
10063 if (1 == insn_op1)
10064 {
10065 /* BX. */
10066 record_buf[0] = ARM_PS_REGNUM;
10067 arm_insn_r->reg_rec_count = 1;
10068 }
10069 else if (3 == insn_op1)
10070 {
10071 /* CLZ. */
10072 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10073 arm_insn_r->reg_rec_count = 1;
10074 }
10075 }
10076 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10077 {
10078 /* BLX. */
10079 record_buf[0] = ARM_PS_REGNUM;
10080 record_buf[1] = ARM_LR_REGNUM;
10081 arm_insn_r->reg_rec_count = 2;
10082 }
10083 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10084 {
10085 /* QADD, QSUB, QDADD, QDSUB */
10086 record_buf[0] = ARM_PS_REGNUM;
10087 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10088 arm_insn_r->reg_rec_count = 2;
10089 }
10090 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10091 {
10092 /* BKPT. */
10093 record_buf[0] = ARM_PS_REGNUM;
10094 record_buf[1] = ARM_LR_REGNUM;
10095 arm_insn_r->reg_rec_count = 2;
10096
10097 /* Save SPSR also;how? */
10098 return -1;
10099 }
10100 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10101 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10102 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10103 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10104 )
10105 {
10106 if (0 == insn_op1 || 1 == insn_op1)
10107 {
10108 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10109 /* We dont do optimization for SMULW<y> where we
10110 need only Rd. */
10111 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10112 record_buf[1] = ARM_PS_REGNUM;
10113 arm_insn_r->reg_rec_count = 2;
10114 }
10115 else if (2 == insn_op1)
10116 {
10117 /* SMLAL<x><y>. */
10118 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10119 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10120 arm_insn_r->reg_rec_count = 2;
10121 }
10122 else if (3 == insn_op1)
10123 {
10124 /* SMUL<x><y>. */
10125 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10126 arm_insn_r->reg_rec_count = 1;
10127 }
10128 }
10129 }
10130 else
10131 {
10132 /* MSR : immediate form. */
10133 if (1 == insn_op1)
10134 {
10135 /* CSPR is going to be changed. */
10136 record_buf[0] = ARM_PS_REGNUM;
10137 arm_insn_r->reg_rec_count = 1;
10138 }
10139 else if (3 == insn_op1)
10140 {
10141 /* SPSR is going to be changed. */
10142 /* we need to get SPSR value, which is yet to be done */
10143 return -1;
10144 }
10145 }
10146 }
10147
10148 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10149 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10150 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10151
10152 /* Handle load/store insn extension space. */
10153
10154 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10155 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10156 && !INSN_RECORDED(arm_insn_r))
10157 {
10158 /* SWP/SWPB. */
10159 if (0 == insn_op1)
10160 {
10161 /* These insn, changes register and memory as well. */
10162 /* SWP or SWPB insn. */
10163 /* Get memory address given by Rn. */
10164 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10165 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10166 /* SWP insn ?, swaps word. */
10167 if (8 == arm_insn_r->opcode)
10168 {
10169 record_buf_mem[0] = 4;
10170 }
10171 else
10172 {
10173 /* SWPB insn, swaps only byte. */
10174 record_buf_mem[0] = 1;
10175 }
10176 record_buf_mem[1] = u_regval;
10177 arm_insn_r->mem_rec_count = 1;
10178 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10179 arm_insn_r->reg_rec_count = 1;
10180 }
10181 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10182 {
10183 /* STRH. */
10184 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10185 ARM_RECORD_STRH);
10186 }
10187 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10188 {
10189 /* LDRD. */
10190 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10191 record_buf[1] = record_buf[0] + 1;
10192 arm_insn_r->reg_rec_count = 2;
10193 }
10194 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10195 {
10196 /* STRD. */
10197 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10198 ARM_RECORD_STRD);
10199 }
10200 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10201 {
10202 /* LDRH, LDRSB, LDRSH. */
10203 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10204 arm_insn_r->reg_rec_count = 1;
10205 }
10206
10207 }
10208
10209 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10210 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10211 && !INSN_RECORDED(arm_insn_r))
10212 {
10213 ret = -1;
10214 /* Handle coprocessor insn extension space. */
10215 }
10216
10217 /* To be done for ARMv5 and later; as of now we return -1. */
10218 if (-1 == ret)
10219 return ret;
10220
10221 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10222 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10223
10224 return ret;
10225 }
10226
10227 /* Handling opcode 000 insns. */
10228
10229 static int
10230 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10231 {
10232 struct regcache *reg_cache = arm_insn_r->regcache;
10233 uint32_t record_buf[8], record_buf_mem[8];
10234 ULONGEST u_regval[2] = {0};
10235
10236 uint32_t reg_src1 = 0, reg_dest = 0;
10237 uint32_t opcode1 = 0;
10238
10239 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10240 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10241 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10242
10243 /* Data processing insn /multiply insn. */
10244 if (9 == arm_insn_r->decode
10245 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10246 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10247 {
10248 /* Handle multiply instructions. */
10249 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10250 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10251 {
10252 /* Handle MLA and MUL. */
10253 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10254 record_buf[1] = ARM_PS_REGNUM;
10255 arm_insn_r->reg_rec_count = 2;
10256 }
10257 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10258 {
10259 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10260 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10261 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10262 record_buf[2] = ARM_PS_REGNUM;
10263 arm_insn_r->reg_rec_count = 3;
10264 }
10265 }
10266 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10267 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10268 {
10269 /* Handle misc load insns, as 20th bit (L = 1). */
10270 /* LDR insn has a capability to do branching, if
10271 MOV LR, PC is precceded by LDR insn having Rn as R15
10272 in that case, it emulates branch and link insn, and hence we
10273 need to save CSPR and PC as well. I am not sure this is right
10274 place; as opcode = 010 LDR insn make this happen, if R15 was
10275 used. */
10276 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10277 if (15 != reg_dest)
10278 {
10279 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10280 arm_insn_r->reg_rec_count = 1;
10281 }
10282 else
10283 {
10284 record_buf[0] = reg_dest;
10285 record_buf[1] = ARM_PS_REGNUM;
10286 arm_insn_r->reg_rec_count = 2;
10287 }
10288 }
10289 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10290 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10291 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10292 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10293 {
10294 /* Handle MSR insn. */
10295 if (9 == arm_insn_r->opcode)
10296 {
10297 /* CSPR is going to be changed. */
10298 record_buf[0] = ARM_PS_REGNUM;
10299 arm_insn_r->reg_rec_count = 1;
10300 }
10301 else
10302 {
10303 /* SPSR is going to be changed. */
10304 /* How to read SPSR value? */
10305 return -1;
10306 }
10307 }
10308 else if (9 == arm_insn_r->decode
10309 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10310 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10311 {
10312 /* Handling SWP, SWPB. */
10313 /* These insn, changes register and memory as well. */
10314 /* SWP or SWPB insn. */
10315
10316 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10317 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10318 /* SWP insn ?, swaps word. */
10319 if (8 == arm_insn_r->opcode)
10320 {
10321 record_buf_mem[0] = 4;
10322 }
10323 else
10324 {
10325 /* SWPB insn, swaps only byte. */
10326 record_buf_mem[0] = 1;
10327 }
10328 record_buf_mem[1] = u_regval[0];
10329 arm_insn_r->mem_rec_count = 1;
10330 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10331 arm_insn_r->reg_rec_count = 1;
10332 }
10333 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10334 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10335 {
10336 /* Handle BLX, branch and link/exchange. */
10337 if (9 == arm_insn_r->opcode)
10338 {
10339 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10340 and R14 stores the return address. */
10341 record_buf[0] = ARM_PS_REGNUM;
10342 record_buf[1] = ARM_LR_REGNUM;
10343 arm_insn_r->reg_rec_count = 2;
10344 }
10345 }
10346 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10347 {
10348 /* Handle enhanced software breakpoint insn, BKPT. */
10349 /* CPSR is changed to be executed in ARM state, disabling normal
10350 interrupts, entering abort mode. */
10351 /* According to high vector configuration PC is set. */
10352 /* user hit breakpoint and type reverse, in
10353 that case, we need to go back with previous CPSR and
10354 Program Counter. */
10355 record_buf[0] = ARM_PS_REGNUM;
10356 record_buf[1] = ARM_LR_REGNUM;
10357 arm_insn_r->reg_rec_count = 2;
10358
10359 /* Save SPSR also; how? */
10360 return -1;
10361 }
10362 else if (11 == arm_insn_r->decode
10363 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10364 {
10365 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10366
10367 /* Handle str(x) insn */
10368 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10369 ARM_RECORD_STRH);
10370 }
10371 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10372 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10373 {
10374 /* Handle BX, branch and link/exchange. */
10375 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10376 record_buf[0] = ARM_PS_REGNUM;
10377 arm_insn_r->reg_rec_count = 1;
10378 }
10379 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10380 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10381 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10382 {
10383 /* Count leading zeros: CLZ. */
10384 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10385 arm_insn_r->reg_rec_count = 1;
10386 }
10387 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10388 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10389 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10390 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10391 )
10392 {
10393 /* Handle MRS insn. */
10394 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10395 arm_insn_r->reg_rec_count = 1;
10396 }
10397 else if (arm_insn_r->opcode <= 15)
10398 {
10399 /* Normal data processing insns. */
10400 /* Out of 11 shifter operands mode, all the insn modifies destination
10401 register, which is specified by 13-16 decode. */
10402 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10403 record_buf[1] = ARM_PS_REGNUM;
10404 arm_insn_r->reg_rec_count = 2;
10405 }
10406 else
10407 {
10408 return -1;
10409 }
10410
10411 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10412 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10413 return 0;
10414 }
10415
10416 /* Handling opcode 001 insns. */
10417
10418 static int
10419 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10420 {
10421 uint32_t record_buf[8], record_buf_mem[8];
10422
10423 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10424 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10425
10426 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10427 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10428 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10429 )
10430 {
10431 /* Handle MSR insn. */
10432 if (9 == arm_insn_r->opcode)
10433 {
10434 /* CSPR is going to be changed. */
10435 record_buf[0] = ARM_PS_REGNUM;
10436 arm_insn_r->reg_rec_count = 1;
10437 }
10438 else
10439 {
10440 /* SPSR is going to be changed. */
10441 }
10442 }
10443 else if (arm_insn_r->opcode <= 15)
10444 {
10445 /* Normal data processing insns. */
10446 /* Out of 11 shifter operands mode, all the insn modifies destination
10447 register, which is specified by 13-16 decode. */
10448 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10449 record_buf[1] = ARM_PS_REGNUM;
10450 arm_insn_r->reg_rec_count = 2;
10451 }
10452 else
10453 {
10454 return -1;
10455 }
10456
10457 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10458 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10459 return 0;
10460 }
10461
10462 static int
10463 arm_record_media (insn_decode_record *arm_insn_r)
10464 {
10465 uint32_t record_buf[8];
10466
10467 switch (bits (arm_insn_r->arm_insn, 22, 24))
10468 {
10469 case 0:
10470 /* Parallel addition and subtraction, signed */
10471 case 1:
10472 /* Parallel addition and subtraction, unsigned */
10473 case 2:
10474 case 3:
10475 /* Packing, unpacking, saturation and reversal */
10476 {
10477 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10478
10479 record_buf[arm_insn_r->reg_rec_count++] = rd;
10480 }
10481 break;
10482
10483 case 4:
10484 case 5:
10485 /* Signed multiplies */
10486 {
10487 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10488 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10489
10490 record_buf[arm_insn_r->reg_rec_count++] = rd;
10491 if (op1 == 0x0)
10492 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10493 else if (op1 == 0x4)
10494 record_buf[arm_insn_r->reg_rec_count++]
10495 = bits (arm_insn_r->arm_insn, 12, 15);
10496 }
10497 break;
10498
10499 case 6:
10500 {
10501 if (bit (arm_insn_r->arm_insn, 21)
10502 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10503 {
10504 /* SBFX */
10505 record_buf[arm_insn_r->reg_rec_count++]
10506 = bits (arm_insn_r->arm_insn, 12, 15);
10507 }
10508 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10509 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10510 {
10511 /* USAD8 and USADA8 */
10512 record_buf[arm_insn_r->reg_rec_count++]
10513 = bits (arm_insn_r->arm_insn, 16, 19);
10514 }
10515 }
10516 break;
10517
10518 case 7:
10519 {
10520 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10521 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10522 {
10523 /* Permanently UNDEFINED */
10524 return -1;
10525 }
10526 else
10527 {
10528 /* BFC, BFI and UBFX */
10529 record_buf[arm_insn_r->reg_rec_count++]
10530 = bits (arm_insn_r->arm_insn, 12, 15);
10531 }
10532 }
10533 break;
10534
10535 default:
10536 return -1;
10537 }
10538
10539 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10540
10541 return 0;
10542 }
10543
10544 /* Handle ARM mode instructions with opcode 010. */
10545
10546 static int
10547 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10548 {
10549 struct regcache *reg_cache = arm_insn_r->regcache;
10550
10551 uint32_t reg_base , reg_dest;
10552 uint32_t offset_12, tgt_mem_addr;
10553 uint32_t record_buf[8], record_buf_mem[8];
10554 unsigned char wback;
10555 ULONGEST u_regval;
10556
10557 /* Calculate wback. */
10558 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10559 || (bit (arm_insn_r->arm_insn, 21) == 1);
10560
10561 arm_insn_r->reg_rec_count = 0;
10562 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10563
10564 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10565 {
10566 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10567 and LDRT. */
10568
10569 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10570 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10571
10572 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10573 preceeds a LDR instruction having R15 as reg_base, it
10574 emulates a branch and link instruction, and hence we need to save
10575 CPSR and PC as well. */
10576 if (ARM_PC_REGNUM == reg_dest)
10577 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10578
10579 /* If wback is true, also save the base register, which is going to be
10580 written to. */
10581 if (wback)
10582 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10583 }
10584 else
10585 {
10586 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10587
10588 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10589 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10590
10591 /* Handle bit U. */
10592 if (bit (arm_insn_r->arm_insn, 23))
10593 {
10594 /* U == 1: Add the offset. */
10595 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10596 }
10597 else
10598 {
10599 /* U == 0: subtract the offset. */
10600 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10601 }
10602
10603 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10604 bytes. */
10605 if (bit (arm_insn_r->arm_insn, 22))
10606 {
10607 /* STRB and STRBT: 1 byte. */
10608 record_buf_mem[0] = 1;
10609 }
10610 else
10611 {
10612 /* STR and STRT: 4 bytes. */
10613 record_buf_mem[0] = 4;
10614 }
10615
10616 /* Handle bit P. */
10617 if (bit (arm_insn_r->arm_insn, 24))
10618 record_buf_mem[1] = tgt_mem_addr;
10619 else
10620 record_buf_mem[1] = (uint32_t) u_regval;
10621
10622 arm_insn_r->mem_rec_count = 1;
10623
10624 /* If wback is true, also save the base register, which is going to be
10625 written to. */
10626 if (wback)
10627 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10628 }
10629
10630 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10631 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10632 return 0;
10633 }
10634
10635 /* Handling opcode 011 insns. */
10636
10637 static int
10638 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10639 {
10640 struct regcache *reg_cache = arm_insn_r->regcache;
10641
10642 uint32_t shift_imm = 0;
10643 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10644 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10645 uint32_t record_buf[8], record_buf_mem[8];
10646
10647 LONGEST s_word;
10648 ULONGEST u_regval[2];
10649
10650 if (bit (arm_insn_r->arm_insn, 4))
10651 return arm_record_media (arm_insn_r);
10652
10653 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10654 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10655
10656 /* Handle enhanced store insns and LDRD DSP insn,
10657 order begins according to addressing modes for store insns
10658 STRH insn. */
10659
10660 /* LDR or STR? */
10661 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10662 {
10663 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10664 /* LDR insn has a capability to do branching, if
10665 MOV LR, PC is precedded by LDR insn having Rn as R15
10666 in that case, it emulates branch and link insn, and hence we
10667 need to save CSPR and PC as well. */
10668 if (15 != reg_dest)
10669 {
10670 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10671 arm_insn_r->reg_rec_count = 1;
10672 }
10673 else
10674 {
10675 record_buf[0] = reg_dest;
10676 record_buf[1] = ARM_PS_REGNUM;
10677 arm_insn_r->reg_rec_count = 2;
10678 }
10679 }
10680 else
10681 {
10682 if (! bits (arm_insn_r->arm_insn, 4, 11))
10683 {
10684 /* Store insn, register offset and register pre-indexed,
10685 register post-indexed. */
10686 /* Get Rm. */
10687 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10688 /* Get Rn. */
10689 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10690 regcache_raw_read_unsigned (reg_cache, reg_src1
10691 , &u_regval[0]);
10692 regcache_raw_read_unsigned (reg_cache, reg_src2
10693 , &u_regval[1]);
10694 if (15 == reg_src2)
10695 {
10696 /* If R15 was used as Rn, hence current PC+8. */
10697 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10698 u_regval[0] = u_regval[0] + 8;
10699 }
10700 /* Calculate target store address, Rn +/- Rm, register offset. */
10701 /* U == 1. */
10702 if (bit (arm_insn_r->arm_insn, 23))
10703 {
10704 tgt_mem_addr = u_regval[0] + u_regval[1];
10705 }
10706 else
10707 {
10708 tgt_mem_addr = u_regval[1] - u_regval[0];
10709 }
10710
10711 switch (arm_insn_r->opcode)
10712 {
10713 /* STR. */
10714 case 8:
10715 case 12:
10716 /* STR. */
10717 case 9:
10718 case 13:
10719 /* STRT. */
10720 case 1:
10721 case 5:
10722 /* STR. */
10723 case 0:
10724 case 4:
10725 record_buf_mem[0] = 4;
10726 break;
10727
10728 /* STRB. */
10729 case 10:
10730 case 14:
10731 /* STRB. */
10732 case 11:
10733 case 15:
10734 /* STRBT. */
10735 case 3:
10736 case 7:
10737 /* STRB. */
10738 case 2:
10739 case 6:
10740 record_buf_mem[0] = 1;
10741 break;
10742
10743 default:
10744 gdb_assert_not_reached ("no decoding pattern found");
10745 break;
10746 }
10747 record_buf_mem[1] = tgt_mem_addr;
10748 arm_insn_r->mem_rec_count = 1;
10749
10750 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10751 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10752 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10753 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10754 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10755 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10756 )
10757 {
10758 /* Rn is going to be changed in pre-indexed mode and
10759 post-indexed mode as well. */
10760 record_buf[0] = reg_src2;
10761 arm_insn_r->reg_rec_count = 1;
10762 }
10763 }
10764 else
10765 {
10766 /* Store insn, scaled register offset; scaled pre-indexed. */
10767 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10768 /* Get Rm. */
10769 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10770 /* Get Rn. */
10771 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10772 /* Get shift_imm. */
10773 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10774 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10775 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10776 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10777 /* Offset_12 used as shift. */
10778 switch (offset_12)
10779 {
10780 case 0:
10781 /* Offset_12 used as index. */
10782 offset_12 = u_regval[0] << shift_imm;
10783 break;
10784
10785 case 1:
10786 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10787 break;
10788
10789 case 2:
10790 if (!shift_imm)
10791 {
10792 if (bit (u_regval[0], 31))
10793 {
10794 offset_12 = 0xFFFFFFFF;
10795 }
10796 else
10797 {
10798 offset_12 = 0;
10799 }
10800 }
10801 else
10802 {
10803 /* This is arithmetic shift. */
10804 offset_12 = s_word >> shift_imm;
10805 }
10806 break;
10807
10808 case 3:
10809 if (!shift_imm)
10810 {
10811 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10812 &u_regval[1]);
10813 /* Get C flag value and shift it by 31. */
10814 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10815 | (u_regval[0]) >> 1);
10816 }
10817 else
10818 {
10819 offset_12 = (u_regval[0] >> shift_imm) \
10820 | (u_regval[0] <<
10821 (sizeof(uint32_t) - shift_imm));
10822 }
10823 break;
10824
10825 default:
10826 gdb_assert_not_reached ("no decoding pattern found");
10827 break;
10828 }
10829
10830 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10831 /* bit U set. */
10832 if (bit (arm_insn_r->arm_insn, 23))
10833 {
10834 tgt_mem_addr = u_regval[1] + offset_12;
10835 }
10836 else
10837 {
10838 tgt_mem_addr = u_regval[1] - offset_12;
10839 }
10840
10841 switch (arm_insn_r->opcode)
10842 {
10843 /* STR. */
10844 case 8:
10845 case 12:
10846 /* STR. */
10847 case 9:
10848 case 13:
10849 /* STRT. */
10850 case 1:
10851 case 5:
10852 /* STR. */
10853 case 0:
10854 case 4:
10855 record_buf_mem[0] = 4;
10856 break;
10857
10858 /* STRB. */
10859 case 10:
10860 case 14:
10861 /* STRB. */
10862 case 11:
10863 case 15:
10864 /* STRBT. */
10865 case 3:
10866 case 7:
10867 /* STRB. */
10868 case 2:
10869 case 6:
10870 record_buf_mem[0] = 1;
10871 break;
10872
10873 default:
10874 gdb_assert_not_reached ("no decoding pattern found");
10875 break;
10876 }
10877 record_buf_mem[1] = tgt_mem_addr;
10878 arm_insn_r->mem_rec_count = 1;
10879
10880 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10881 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10882 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10883 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10884 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10885 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10886 )
10887 {
10888 /* Rn is going to be changed in register scaled pre-indexed
10889 mode,and scaled post indexed mode. */
10890 record_buf[0] = reg_src2;
10891 arm_insn_r->reg_rec_count = 1;
10892 }
10893 }
10894 }
10895
10896 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10897 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10898 return 0;
10899 }
10900
10901 /* Handle ARM mode instructions with opcode 100. */
10902
10903 static int
10904 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10905 {
10906 struct regcache *reg_cache = arm_insn_r->regcache;
10907 uint32_t register_count = 0, register_bits;
10908 uint32_t reg_base, addr_mode;
10909 uint32_t record_buf[24], record_buf_mem[48];
10910 uint32_t wback;
10911 ULONGEST u_regval;
10912
10913 /* Fetch the list of registers. */
10914 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10915 arm_insn_r->reg_rec_count = 0;
10916
10917 /* Fetch the base register that contains the address we are loading data
10918 to. */
10919 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10920
10921 /* Calculate wback. */
10922 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10923
10924 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10925 {
10926 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10927
10928 /* Find out which registers are going to be loaded from memory. */
10929 while (register_bits)
10930 {
10931 if (register_bits & 0x00000001)
10932 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10933 register_bits = register_bits >> 1;
10934 register_count++;
10935 }
10936
10937
10938 /* If wback is true, also save the base register, which is going to be
10939 written to. */
10940 if (wback)
10941 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10942
10943 /* Save the CPSR register. */
10944 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10945 }
10946 else
10947 {
10948 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10949
10950 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10951
10952 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10953
10954 /* Find out how many registers are going to be stored to memory. */
10955 while (register_bits)
10956 {
10957 if (register_bits & 0x00000001)
10958 register_count++;
10959 register_bits = register_bits >> 1;
10960 }
10961
10962 switch (addr_mode)
10963 {
10964 /* STMDA (STMED): Decrement after. */
10965 case 0:
10966 record_buf_mem[1] = (uint32_t) u_regval
10967 - register_count * INT_REGISTER_SIZE + 4;
10968 break;
10969 /* STM (STMIA, STMEA): Increment after. */
10970 case 1:
10971 record_buf_mem[1] = (uint32_t) u_regval;
10972 break;
10973 /* STMDB (STMFD): Decrement before. */
10974 case 2:
10975 record_buf_mem[1] = (uint32_t) u_regval
10976 - register_count * INT_REGISTER_SIZE;
10977 break;
10978 /* STMIB (STMFA): Increment before. */
10979 case 3:
10980 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10981 break;
10982 default:
10983 gdb_assert_not_reached ("no decoding pattern found");
10984 break;
10985 }
10986
10987 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
10988 arm_insn_r->mem_rec_count = 1;
10989
10990 /* If wback is true, also save the base register, which is going to be
10991 written to. */
10992 if (wback)
10993 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10994 }
10995
10996 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10997 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10998 return 0;
10999 }
11000
11001 /* Handling opcode 101 insns. */
11002
11003 static int
11004 arm_record_b_bl (insn_decode_record *arm_insn_r)
11005 {
11006 uint32_t record_buf[8];
11007
11008 /* Handle B, BL, BLX(1) insns. */
11009 /* B simply branches so we do nothing here. */
11010 /* Note: BLX(1) doesnt fall here but instead it falls into
11011 extension space. */
11012 if (bit (arm_insn_r->arm_insn, 24))
11013 {
11014 record_buf[0] = ARM_LR_REGNUM;
11015 arm_insn_r->reg_rec_count = 1;
11016 }
11017
11018 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11019
11020 return 0;
11021 }
11022
11023 static int
11024 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11025 {
11026 printf_unfiltered (_("Process record does not support instruction "
11027 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11028 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11029
11030 return -1;
11031 }
11032
11033 /* Record handler for vector data transfer instructions. */
11034
11035 static int
11036 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11037 {
11038 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11039 uint32_t record_buf[4];
11040
11041 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11042 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11043 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11044 bit_l = bit (arm_insn_r->arm_insn, 20);
11045 bit_c = bit (arm_insn_r->arm_insn, 8);
11046
11047 /* Handle VMOV instruction. */
11048 if (bit_l && bit_c)
11049 {
11050 record_buf[0] = reg_t;
11051 arm_insn_r->reg_rec_count = 1;
11052 }
11053 else if (bit_l && !bit_c)
11054 {
11055 /* Handle VMOV instruction. */
11056 if (bits_a == 0x00)
11057 {
11058 record_buf[0] = reg_t;
11059 arm_insn_r->reg_rec_count = 1;
11060 }
11061 /* Handle VMRS instruction. */
11062 else if (bits_a == 0x07)
11063 {
11064 if (reg_t == 15)
11065 reg_t = ARM_PS_REGNUM;
11066
11067 record_buf[0] = reg_t;
11068 arm_insn_r->reg_rec_count = 1;
11069 }
11070 }
11071 else if (!bit_l && !bit_c)
11072 {
11073 /* Handle VMOV instruction. */
11074 if (bits_a == 0x00)
11075 {
11076 record_buf[0] = ARM_D0_REGNUM + reg_v;
11077
11078 arm_insn_r->reg_rec_count = 1;
11079 }
11080 /* Handle VMSR instruction. */
11081 else if (bits_a == 0x07)
11082 {
11083 record_buf[0] = ARM_FPSCR_REGNUM;
11084 arm_insn_r->reg_rec_count = 1;
11085 }
11086 }
11087 else if (!bit_l && bit_c)
11088 {
11089 /* Handle VMOV instruction. */
11090 if (!(bits_a & 0x04))
11091 {
11092 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11093 + ARM_D0_REGNUM;
11094 arm_insn_r->reg_rec_count = 1;
11095 }
11096 /* Handle VDUP instruction. */
11097 else
11098 {
11099 if (bit (arm_insn_r->arm_insn, 21))
11100 {
11101 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11102 record_buf[0] = reg_v + ARM_D0_REGNUM;
11103 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11104 arm_insn_r->reg_rec_count = 2;
11105 }
11106 else
11107 {
11108 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11109 record_buf[0] = reg_v + ARM_D0_REGNUM;
11110 arm_insn_r->reg_rec_count = 1;
11111 }
11112 }
11113 }
11114
11115 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11116 return 0;
11117 }
11118
11119 /* Record handler for extension register load/store instructions. */
11120
11121 static int
11122 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11123 {
11124 uint32_t opcode, single_reg;
11125 uint8_t op_vldm_vstm;
11126 uint32_t record_buf[8], record_buf_mem[128];
11127 ULONGEST u_regval = 0;
11128
11129 struct regcache *reg_cache = arm_insn_r->regcache;
11130
11131 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11132 single_reg = !bit (arm_insn_r->arm_insn, 8);
11133 op_vldm_vstm = opcode & 0x1b;
11134
11135 /* Handle VMOV instructions. */
11136 if ((opcode & 0x1e) == 0x04)
11137 {
11138 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11139 {
11140 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11141 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11142 arm_insn_r->reg_rec_count = 2;
11143 }
11144 else
11145 {
11146 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11147 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11148
11149 if (single_reg)
11150 {
11151 /* The first S register number m is REG_M:M (M is bit 5),
11152 the corresponding D register number is REG_M:M / 2, which
11153 is REG_M. */
11154 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11155 /* The second S register number is REG_M:M + 1, the
11156 corresponding D register number is (REG_M:M + 1) / 2.
11157 IOW, if bit M is 1, the first and second S registers
11158 are mapped to different D registers, otherwise, they are
11159 in the same D register. */
11160 if (bit_m)
11161 {
11162 record_buf[arm_insn_r->reg_rec_count++]
11163 = ARM_D0_REGNUM + reg_m + 1;
11164 }
11165 }
11166 else
11167 {
11168 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11169 arm_insn_r->reg_rec_count = 1;
11170 }
11171 }
11172 }
11173 /* Handle VSTM and VPUSH instructions. */
11174 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11175 || op_vldm_vstm == 0x12)
11176 {
11177 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11178 uint32_t memory_index = 0;
11179
11180 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11181 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11182 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11183 imm_off32 = imm_off8 << 2;
11184 memory_count = imm_off8;
11185
11186 if (bit (arm_insn_r->arm_insn, 23))
11187 start_address = u_regval;
11188 else
11189 start_address = u_regval - imm_off32;
11190
11191 if (bit (arm_insn_r->arm_insn, 21))
11192 {
11193 record_buf[0] = reg_rn;
11194 arm_insn_r->reg_rec_count = 1;
11195 }
11196
11197 while (memory_count > 0)
11198 {
11199 if (single_reg)
11200 {
11201 record_buf_mem[memory_index] = 4;
11202 record_buf_mem[memory_index + 1] = start_address;
11203 start_address = start_address + 4;
11204 memory_index = memory_index + 2;
11205 }
11206 else
11207 {
11208 record_buf_mem[memory_index] = 4;
11209 record_buf_mem[memory_index + 1] = start_address;
11210 record_buf_mem[memory_index + 2] = 4;
11211 record_buf_mem[memory_index + 3] = start_address + 4;
11212 start_address = start_address + 8;
11213 memory_index = memory_index + 4;
11214 }
11215 memory_count--;
11216 }
11217 arm_insn_r->mem_rec_count = (memory_index >> 1);
11218 }
11219 /* Handle VLDM instructions. */
11220 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11221 || op_vldm_vstm == 0x13)
11222 {
11223 uint32_t reg_count, reg_vd;
11224 uint32_t reg_index = 0;
11225 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11226
11227 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11228 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11229
11230 /* REG_VD is the first D register number. If the instruction
11231 loads memory to S registers (SINGLE_REG is TRUE), the register
11232 number is (REG_VD << 1 | bit D), so the corresponding D
11233 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11234 if (!single_reg)
11235 reg_vd = reg_vd | (bit_d << 4);
11236
11237 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11238 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11239
11240 /* If the instruction loads memory to D register, REG_COUNT should
11241 be divided by 2, according to the ARM Architecture Reference
11242 Manual. If the instruction loads memory to S register, divide by
11243 2 as well because two S registers are mapped to D register. */
11244 reg_count = reg_count / 2;
11245 if (single_reg && bit_d)
11246 {
11247 /* Increase the register count if S register list starts from
11248 an odd number (bit d is one). */
11249 reg_count++;
11250 }
11251
11252 while (reg_count > 0)
11253 {
11254 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11255 reg_count--;
11256 }
11257 arm_insn_r->reg_rec_count = reg_index;
11258 }
11259 /* VSTR Vector store register. */
11260 else if ((opcode & 0x13) == 0x10)
11261 {
11262 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11263 uint32_t memory_index = 0;
11264
11265 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11266 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11267 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11268 imm_off32 = imm_off8 << 2;
11269
11270 if (bit (arm_insn_r->arm_insn, 23))
11271 start_address = u_regval + imm_off32;
11272 else
11273 start_address = u_regval - imm_off32;
11274
11275 if (single_reg)
11276 {
11277 record_buf_mem[memory_index] = 4;
11278 record_buf_mem[memory_index + 1] = start_address;
11279 arm_insn_r->mem_rec_count = 1;
11280 }
11281 else
11282 {
11283 record_buf_mem[memory_index] = 4;
11284 record_buf_mem[memory_index + 1] = start_address;
11285 record_buf_mem[memory_index + 2] = 4;
11286 record_buf_mem[memory_index + 3] = start_address + 4;
11287 arm_insn_r->mem_rec_count = 2;
11288 }
11289 }
11290 /* VLDR Vector load register. */
11291 else if ((opcode & 0x13) == 0x11)
11292 {
11293 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11294
11295 if (!single_reg)
11296 {
11297 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11298 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11299 }
11300 else
11301 {
11302 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11303 /* Record register D rather than pseudo register S. */
11304 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11305 }
11306 arm_insn_r->reg_rec_count = 1;
11307 }
11308
11309 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11310 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11311 return 0;
11312 }
11313
11314 /* Record handler for arm/thumb mode VFP data processing instructions. */
11315
11316 static int
11317 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11318 {
11319 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11320 uint32_t record_buf[4];
11321 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11322 enum insn_types curr_insn_type = INSN_INV;
11323
11324 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11325 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11326 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11327 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11328 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11329 bit_d = bit (arm_insn_r->arm_insn, 22);
11330 opc1 = opc1 & 0x04;
11331
11332 /* Handle VMLA, VMLS. */
11333 if (opc1 == 0x00)
11334 {
11335 if (bit (arm_insn_r->arm_insn, 10))
11336 {
11337 if (bit (arm_insn_r->arm_insn, 6))
11338 curr_insn_type = INSN_T0;
11339 else
11340 curr_insn_type = INSN_T1;
11341 }
11342 else
11343 {
11344 if (dp_op_sz)
11345 curr_insn_type = INSN_T1;
11346 else
11347 curr_insn_type = INSN_T2;
11348 }
11349 }
11350 /* Handle VNMLA, VNMLS, VNMUL. */
11351 else if (opc1 == 0x01)
11352 {
11353 if (dp_op_sz)
11354 curr_insn_type = INSN_T1;
11355 else
11356 curr_insn_type = INSN_T2;
11357 }
11358 /* Handle VMUL. */
11359 else if (opc1 == 0x02 && !(opc3 & 0x01))
11360 {
11361 if (bit (arm_insn_r->arm_insn, 10))
11362 {
11363 if (bit (arm_insn_r->arm_insn, 6))
11364 curr_insn_type = INSN_T0;
11365 else
11366 curr_insn_type = INSN_T1;
11367 }
11368 else
11369 {
11370 if (dp_op_sz)
11371 curr_insn_type = INSN_T1;
11372 else
11373 curr_insn_type = INSN_T2;
11374 }
11375 }
11376 /* Handle VADD, VSUB. */
11377 else if (opc1 == 0x03)
11378 {
11379 if (!bit (arm_insn_r->arm_insn, 9))
11380 {
11381 if (bit (arm_insn_r->arm_insn, 6))
11382 curr_insn_type = INSN_T0;
11383 else
11384 curr_insn_type = INSN_T1;
11385 }
11386 else
11387 {
11388 if (dp_op_sz)
11389 curr_insn_type = INSN_T1;
11390 else
11391 curr_insn_type = INSN_T2;
11392 }
11393 }
11394 /* Handle VDIV. */
11395 else if (opc1 == 0x0b)
11396 {
11397 if (dp_op_sz)
11398 curr_insn_type = INSN_T1;
11399 else
11400 curr_insn_type = INSN_T2;
11401 }
11402 /* Handle all other vfp data processing instructions. */
11403 else if (opc1 == 0x0b)
11404 {
11405 /* Handle VMOV. */
11406 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11407 {
11408 if (bit (arm_insn_r->arm_insn, 4))
11409 {
11410 if (bit (arm_insn_r->arm_insn, 6))
11411 curr_insn_type = INSN_T0;
11412 else
11413 curr_insn_type = INSN_T1;
11414 }
11415 else
11416 {
11417 if (dp_op_sz)
11418 curr_insn_type = INSN_T1;
11419 else
11420 curr_insn_type = INSN_T2;
11421 }
11422 }
11423 /* Handle VNEG and VABS. */
11424 else if ((opc2 == 0x01 && opc3 == 0x01)
11425 || (opc2 == 0x00 && opc3 == 0x03))
11426 {
11427 if (!bit (arm_insn_r->arm_insn, 11))
11428 {
11429 if (bit (arm_insn_r->arm_insn, 6))
11430 curr_insn_type = INSN_T0;
11431 else
11432 curr_insn_type = INSN_T1;
11433 }
11434 else
11435 {
11436 if (dp_op_sz)
11437 curr_insn_type = INSN_T1;
11438 else
11439 curr_insn_type = INSN_T2;
11440 }
11441 }
11442 /* Handle VSQRT. */
11443 else if (opc2 == 0x01 && opc3 == 0x03)
11444 {
11445 if (dp_op_sz)
11446 curr_insn_type = INSN_T1;
11447 else
11448 curr_insn_type = INSN_T2;
11449 }
11450 /* Handle VCVT. */
11451 else if (opc2 == 0x07 && opc3 == 0x03)
11452 {
11453 if (!dp_op_sz)
11454 curr_insn_type = INSN_T1;
11455 else
11456 curr_insn_type = INSN_T2;
11457 }
11458 else if (opc3 & 0x01)
11459 {
11460 /* Handle VCVT. */
11461 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11462 {
11463 if (!bit (arm_insn_r->arm_insn, 18))
11464 curr_insn_type = INSN_T2;
11465 else
11466 {
11467 if (dp_op_sz)
11468 curr_insn_type = INSN_T1;
11469 else
11470 curr_insn_type = INSN_T2;
11471 }
11472 }
11473 /* Handle VCVT. */
11474 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11475 {
11476 if (dp_op_sz)
11477 curr_insn_type = INSN_T1;
11478 else
11479 curr_insn_type = INSN_T2;
11480 }
11481 /* Handle VCVTB, VCVTT. */
11482 else if ((opc2 & 0x0e) == 0x02)
11483 curr_insn_type = INSN_T2;
11484 /* Handle VCMP, VCMPE. */
11485 else if ((opc2 & 0x0e) == 0x04)
11486 curr_insn_type = INSN_T3;
11487 }
11488 }
11489
11490 switch (curr_insn_type)
11491 {
11492 case INSN_T0:
11493 reg_vd = reg_vd | (bit_d << 4);
11494 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11495 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11496 arm_insn_r->reg_rec_count = 2;
11497 break;
11498
11499 case INSN_T1:
11500 reg_vd = reg_vd | (bit_d << 4);
11501 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11502 arm_insn_r->reg_rec_count = 1;
11503 break;
11504
11505 case INSN_T2:
11506 reg_vd = (reg_vd << 1) | bit_d;
11507 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11508 arm_insn_r->reg_rec_count = 1;
11509 break;
11510
11511 case INSN_T3:
11512 record_buf[0] = ARM_FPSCR_REGNUM;
11513 arm_insn_r->reg_rec_count = 1;
11514 break;
11515
11516 default:
11517 gdb_assert_not_reached ("no decoding pattern found");
11518 break;
11519 }
11520
11521 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11522 return 0;
11523 }
11524
11525 /* Handling opcode 110 insns. */
11526
11527 static int
11528 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11529 {
11530 uint32_t op1, op1_ebit, coproc;
11531
11532 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11533 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11534 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11535
11536 if ((coproc & 0x0e) == 0x0a)
11537 {
11538 /* Handle extension register ld/st instructions. */
11539 if (!(op1 & 0x20))
11540 return arm_record_exreg_ld_st_insn (arm_insn_r);
11541
11542 /* 64-bit transfers between arm core and extension registers. */
11543 if ((op1 & 0x3e) == 0x04)
11544 return arm_record_exreg_ld_st_insn (arm_insn_r);
11545 }
11546 else
11547 {
11548 /* Handle coprocessor ld/st instructions. */
11549 if (!(op1 & 0x3a))
11550 {
11551 /* Store. */
11552 if (!op1_ebit)
11553 return arm_record_unsupported_insn (arm_insn_r);
11554 else
11555 /* Load. */
11556 return arm_record_unsupported_insn (arm_insn_r);
11557 }
11558
11559 /* Move to coprocessor from two arm core registers. */
11560 if (op1 == 0x4)
11561 return arm_record_unsupported_insn (arm_insn_r);
11562
11563 /* Move to two arm core registers from coprocessor. */
11564 if (op1 == 0x5)
11565 {
11566 uint32_t reg_t[2];
11567
11568 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11569 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11570 arm_insn_r->reg_rec_count = 2;
11571
11572 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11573 return 0;
11574 }
11575 }
11576 return arm_record_unsupported_insn (arm_insn_r);
11577 }
11578
11579 /* Handling opcode 111 insns. */
11580
11581 static int
11582 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11583 {
11584 uint32_t op, op1_sbit, op1_ebit, coproc;
11585 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11586 struct regcache *reg_cache = arm_insn_r->regcache;
11587
11588 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11589 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11590 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11591 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11592 op = bit (arm_insn_r->arm_insn, 4);
11593
11594 /* Handle arm SWI/SVC system call instructions. */
11595 if (op1_sbit)
11596 {
11597 if (tdep->arm_syscall_record != NULL)
11598 {
11599 ULONGEST svc_operand, svc_number;
11600
11601 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11602
11603 if (svc_operand) /* OABI. */
11604 svc_number = svc_operand - 0x900000;
11605 else /* EABI. */
11606 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11607
11608 return tdep->arm_syscall_record (reg_cache, svc_number);
11609 }
11610 else
11611 {
11612 printf_unfiltered (_("no syscall record support\n"));
11613 return -1;
11614 }
11615 }
11616
11617 if ((coproc & 0x0e) == 0x0a)
11618 {
11619 /* VFP data-processing instructions. */
11620 if (!op1_sbit && !op)
11621 return arm_record_vfp_data_proc_insn (arm_insn_r);
11622
11623 /* Advanced SIMD, VFP instructions. */
11624 if (!op1_sbit && op)
11625 return arm_record_vdata_transfer_insn (arm_insn_r);
11626 }
11627 else
11628 {
11629 /* Coprocessor data operations. */
11630 if (!op1_sbit && !op)
11631 return arm_record_unsupported_insn (arm_insn_r);
11632
11633 /* Move to Coprocessor from ARM core register. */
11634 if (!op1_sbit && !op1_ebit && op)
11635 return arm_record_unsupported_insn (arm_insn_r);
11636
11637 /* Move to arm core register from coprocessor. */
11638 if (!op1_sbit && op1_ebit && op)
11639 {
11640 uint32_t record_buf[1];
11641
11642 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11643 if (record_buf[0] == 15)
11644 record_buf[0] = ARM_PS_REGNUM;
11645
11646 arm_insn_r->reg_rec_count = 1;
11647 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11648 record_buf);
11649 return 0;
11650 }
11651 }
11652
11653 return arm_record_unsupported_insn (arm_insn_r);
11654 }
11655
11656 /* Handling opcode 000 insns. */
11657
11658 static int
11659 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11660 {
11661 uint32_t record_buf[8];
11662 uint32_t reg_src1 = 0;
11663
11664 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11665
11666 record_buf[0] = ARM_PS_REGNUM;
11667 record_buf[1] = reg_src1;
11668 thumb_insn_r->reg_rec_count = 2;
11669
11670 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11671
11672 return 0;
11673 }
11674
11675
11676 /* Handling opcode 001 insns. */
11677
11678 static int
11679 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11680 {
11681 uint32_t record_buf[8];
11682 uint32_t reg_src1 = 0;
11683
11684 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11685
11686 record_buf[0] = ARM_PS_REGNUM;
11687 record_buf[1] = reg_src1;
11688 thumb_insn_r->reg_rec_count = 2;
11689
11690 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11691
11692 return 0;
11693 }
11694
11695 /* Handling opcode 010 insns. */
11696
11697 static int
11698 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11699 {
11700 struct regcache *reg_cache = thumb_insn_r->regcache;
11701 uint32_t record_buf[8], record_buf_mem[8];
11702
11703 uint32_t reg_src1 = 0, reg_src2 = 0;
11704 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11705
11706 ULONGEST u_regval[2] = {0};
11707
11708 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11709
11710 if (bit (thumb_insn_r->arm_insn, 12))
11711 {
11712 /* Handle load/store register offset. */
11713 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11714
11715 if (in_inclusive_range (opB, 4U, 7U))
11716 {
11717 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11718 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11719 record_buf[0] = reg_src1;
11720 thumb_insn_r->reg_rec_count = 1;
11721 }
11722 else if (in_inclusive_range (opB, 0U, 2U))
11723 {
11724 /* STR(2), STRB(2), STRH(2) . */
11725 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11726 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11727 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11728 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11729 if (0 == opB)
11730 record_buf_mem[0] = 4; /* STR (2). */
11731 else if (2 == opB)
11732 record_buf_mem[0] = 1; /* STRB (2). */
11733 else if (1 == opB)
11734 record_buf_mem[0] = 2; /* STRH (2). */
11735 record_buf_mem[1] = u_regval[0] + u_regval[1];
11736 thumb_insn_r->mem_rec_count = 1;
11737 }
11738 }
11739 else if (bit (thumb_insn_r->arm_insn, 11))
11740 {
11741 /* Handle load from literal pool. */
11742 /* LDR(3). */
11743 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11744 record_buf[0] = reg_src1;
11745 thumb_insn_r->reg_rec_count = 1;
11746 }
11747 else if (opcode1)
11748 {
11749 /* Special data instructions and branch and exchange */
11750 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11751 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11752 if ((3 == opcode2) && (!opcode3))
11753 {
11754 /* Branch with exchange. */
11755 record_buf[0] = ARM_PS_REGNUM;
11756 thumb_insn_r->reg_rec_count = 1;
11757 }
11758 else
11759 {
11760 /* Format 8; special data processing insns. */
11761 record_buf[0] = ARM_PS_REGNUM;
11762 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11763 | bits (thumb_insn_r->arm_insn, 0, 2));
11764 thumb_insn_r->reg_rec_count = 2;
11765 }
11766 }
11767 else
11768 {
11769 /* Format 5; data processing insns. */
11770 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11771 if (bit (thumb_insn_r->arm_insn, 7))
11772 {
11773 reg_src1 = reg_src1 + 8;
11774 }
11775 record_buf[0] = ARM_PS_REGNUM;
11776 record_buf[1] = reg_src1;
11777 thumb_insn_r->reg_rec_count = 2;
11778 }
11779
11780 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11781 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11782 record_buf_mem);
11783
11784 return 0;
11785 }
11786
11787 /* Handling opcode 001 insns. */
11788
11789 static int
11790 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11791 {
11792 struct regcache *reg_cache = thumb_insn_r->regcache;
11793 uint32_t record_buf[8], record_buf_mem[8];
11794
11795 uint32_t reg_src1 = 0;
11796 uint32_t opcode = 0, immed_5 = 0;
11797
11798 ULONGEST u_regval = 0;
11799
11800 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11801
11802 if (opcode)
11803 {
11804 /* LDR(1). */
11805 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11806 record_buf[0] = reg_src1;
11807 thumb_insn_r->reg_rec_count = 1;
11808 }
11809 else
11810 {
11811 /* STR(1). */
11812 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11813 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11814 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11815 record_buf_mem[0] = 4;
11816 record_buf_mem[1] = u_regval + (immed_5 * 4);
11817 thumb_insn_r->mem_rec_count = 1;
11818 }
11819
11820 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11821 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11822 record_buf_mem);
11823
11824 return 0;
11825 }
11826
11827 /* Handling opcode 100 insns. */
11828
11829 static int
11830 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11831 {
11832 struct regcache *reg_cache = thumb_insn_r->regcache;
11833 uint32_t record_buf[8], record_buf_mem[8];
11834
11835 uint32_t reg_src1 = 0;
11836 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11837
11838 ULONGEST u_regval = 0;
11839
11840 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11841
11842 if (3 == opcode)
11843 {
11844 /* LDR(4). */
11845 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11846 record_buf[0] = reg_src1;
11847 thumb_insn_r->reg_rec_count = 1;
11848 }
11849 else if (1 == opcode)
11850 {
11851 /* LDRH(1). */
11852 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11853 record_buf[0] = reg_src1;
11854 thumb_insn_r->reg_rec_count = 1;
11855 }
11856 else if (2 == opcode)
11857 {
11858 /* STR(3). */
11859 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11860 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11861 record_buf_mem[0] = 4;
11862 record_buf_mem[1] = u_regval + (immed_8 * 4);
11863 thumb_insn_r->mem_rec_count = 1;
11864 }
11865 else if (0 == opcode)
11866 {
11867 /* STRH(1). */
11868 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11869 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11870 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11871 record_buf_mem[0] = 2;
11872 record_buf_mem[1] = u_regval + (immed_5 * 2);
11873 thumb_insn_r->mem_rec_count = 1;
11874 }
11875
11876 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11877 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11878 record_buf_mem);
11879
11880 return 0;
11881 }
11882
11883 /* Handling opcode 101 insns. */
11884
11885 static int
11886 thumb_record_misc (insn_decode_record *thumb_insn_r)
11887 {
11888 struct regcache *reg_cache = thumb_insn_r->regcache;
11889
11890 uint32_t opcode = 0;
11891 uint32_t register_bits = 0, register_count = 0;
11892 uint32_t index = 0, start_address = 0;
11893 uint32_t record_buf[24], record_buf_mem[48];
11894 uint32_t reg_src1;
11895
11896 ULONGEST u_regval = 0;
11897
11898 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11899
11900 if (opcode == 0 || opcode == 1)
11901 {
11902 /* ADR and ADD (SP plus immediate) */
11903
11904 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11905 record_buf[0] = reg_src1;
11906 thumb_insn_r->reg_rec_count = 1;
11907 }
11908 else
11909 {
11910 /* Miscellaneous 16-bit instructions */
11911 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11912
11913 switch (opcode2)
11914 {
11915 case 6:
11916 /* SETEND and CPS */
11917 break;
11918 case 0:
11919 /* ADD/SUB (SP plus immediate) */
11920 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11921 record_buf[0] = ARM_SP_REGNUM;
11922 thumb_insn_r->reg_rec_count = 1;
11923 break;
11924 case 1: /* fall through */
11925 case 3: /* fall through */
11926 case 9: /* fall through */
11927 case 11:
11928 /* CBNZ, CBZ */
11929 break;
11930 case 2:
11931 /* SXTH, SXTB, UXTH, UXTB */
11932 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11933 thumb_insn_r->reg_rec_count = 1;
11934 break;
11935 case 4: /* fall through */
11936 case 5:
11937 /* PUSH. */
11938 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11939 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11940 while (register_bits)
11941 {
11942 if (register_bits & 0x00000001)
11943 register_count++;
11944 register_bits = register_bits >> 1;
11945 }
11946 start_address = u_regval - \
11947 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11948 thumb_insn_r->mem_rec_count = register_count;
11949 while (register_count)
11950 {
11951 record_buf_mem[(register_count * 2) - 1] = start_address;
11952 record_buf_mem[(register_count * 2) - 2] = 4;
11953 start_address = start_address + 4;
11954 register_count--;
11955 }
11956 record_buf[0] = ARM_SP_REGNUM;
11957 thumb_insn_r->reg_rec_count = 1;
11958 break;
11959 case 10:
11960 /* REV, REV16, REVSH */
11961 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11962 thumb_insn_r->reg_rec_count = 1;
11963 break;
11964 case 12: /* fall through */
11965 case 13:
11966 /* POP. */
11967 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11968 while (register_bits)
11969 {
11970 if (register_bits & 0x00000001)
11971 record_buf[index++] = register_count;
11972 register_bits = register_bits >> 1;
11973 register_count++;
11974 }
11975 record_buf[index++] = ARM_PS_REGNUM;
11976 record_buf[index++] = ARM_SP_REGNUM;
11977 thumb_insn_r->reg_rec_count = index;
11978 break;
11979 case 0xe:
11980 /* BKPT insn. */
11981 /* Handle enhanced software breakpoint insn, BKPT. */
11982 /* CPSR is changed to be executed in ARM state, disabling normal
11983 interrupts, entering abort mode. */
11984 /* According to high vector configuration PC is set. */
11985 /* User hits breakpoint and type reverse, in that case, we need to go back with
11986 previous CPSR and Program Counter. */
11987 record_buf[0] = ARM_PS_REGNUM;
11988 record_buf[1] = ARM_LR_REGNUM;
11989 thumb_insn_r->reg_rec_count = 2;
11990 /* We need to save SPSR value, which is not yet done. */
11991 printf_unfiltered (_("Process record does not support instruction "
11992 "0x%0x at address %s.\n"),
11993 thumb_insn_r->arm_insn,
11994 paddress (thumb_insn_r->gdbarch,
11995 thumb_insn_r->this_addr));
11996 return -1;
11997
11998 case 0xf:
11999 /* If-Then, and hints */
12000 break;
12001 default:
12002 return -1;
12003 };
12004 }
12005
12006 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12007 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12008 record_buf_mem);
12009
12010 return 0;
12011 }
12012
12013 /* Handling opcode 110 insns. */
12014
12015 static int
12016 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12017 {
12018 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12019 struct regcache *reg_cache = thumb_insn_r->regcache;
12020
12021 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12022 uint32_t reg_src1 = 0;
12023 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12024 uint32_t index = 0, start_address = 0;
12025 uint32_t record_buf[24], record_buf_mem[48];
12026
12027 ULONGEST u_regval = 0;
12028
12029 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12030 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12031
12032 if (1 == opcode2)
12033 {
12034
12035 /* LDMIA. */
12036 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12037 /* Get Rn. */
12038 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12039 while (register_bits)
12040 {
12041 if (register_bits & 0x00000001)
12042 record_buf[index++] = register_count;
12043 register_bits = register_bits >> 1;
12044 register_count++;
12045 }
12046 record_buf[index++] = reg_src1;
12047 thumb_insn_r->reg_rec_count = index;
12048 }
12049 else if (0 == opcode2)
12050 {
12051 /* It handles both STMIA. */
12052 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12053 /* Get Rn. */
12054 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12055 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12056 while (register_bits)
12057 {
12058 if (register_bits & 0x00000001)
12059 register_count++;
12060 register_bits = register_bits >> 1;
12061 }
12062 start_address = u_regval;
12063 thumb_insn_r->mem_rec_count = register_count;
12064 while (register_count)
12065 {
12066 record_buf_mem[(register_count * 2) - 1] = start_address;
12067 record_buf_mem[(register_count * 2) - 2] = 4;
12068 start_address = start_address + 4;
12069 register_count--;
12070 }
12071 }
12072 else if (0x1F == opcode1)
12073 {
12074 /* Handle arm syscall insn. */
12075 if (tdep->arm_syscall_record != NULL)
12076 {
12077 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12078 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12079 }
12080 else
12081 {
12082 printf_unfiltered (_("no syscall record support\n"));
12083 return -1;
12084 }
12085 }
12086
12087 /* B (1), conditional branch is automatically taken care in process_record,
12088 as PC is saved there. */
12089
12090 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12091 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12092 record_buf_mem);
12093
12094 return ret;
12095 }
12096
12097 /* Handling opcode 111 insns. */
12098
12099 static int
12100 thumb_record_branch (insn_decode_record *thumb_insn_r)
12101 {
12102 uint32_t record_buf[8];
12103 uint32_t bits_h = 0;
12104
12105 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12106
12107 if (2 == bits_h || 3 == bits_h)
12108 {
12109 /* BL */
12110 record_buf[0] = ARM_LR_REGNUM;
12111 thumb_insn_r->reg_rec_count = 1;
12112 }
12113 else if (1 == bits_h)
12114 {
12115 /* BLX(1). */
12116 record_buf[0] = ARM_PS_REGNUM;
12117 record_buf[1] = ARM_LR_REGNUM;
12118 thumb_insn_r->reg_rec_count = 2;
12119 }
12120
12121 /* B(2) is automatically taken care in process_record, as PC is
12122 saved there. */
12123
12124 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12125
12126 return 0;
12127 }
12128
12129 /* Handler for thumb2 load/store multiple instructions. */
12130
12131 static int
12132 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12133 {
12134 struct regcache *reg_cache = thumb2_insn_r->regcache;
12135
12136 uint32_t reg_rn, op;
12137 uint32_t register_bits = 0, register_count = 0;
12138 uint32_t index = 0, start_address = 0;
12139 uint32_t record_buf[24], record_buf_mem[48];
12140
12141 ULONGEST u_regval = 0;
12142
12143 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12144 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12145
12146 if (0 == op || 3 == op)
12147 {
12148 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12149 {
12150 /* Handle RFE instruction. */
12151 record_buf[0] = ARM_PS_REGNUM;
12152 thumb2_insn_r->reg_rec_count = 1;
12153 }
12154 else
12155 {
12156 /* Handle SRS instruction after reading banked SP. */
12157 return arm_record_unsupported_insn (thumb2_insn_r);
12158 }
12159 }
12160 else if (1 == op || 2 == op)
12161 {
12162 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12163 {
12164 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12165 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12166 while (register_bits)
12167 {
12168 if (register_bits & 0x00000001)
12169 record_buf[index++] = register_count;
12170
12171 register_count++;
12172 register_bits = register_bits >> 1;
12173 }
12174 record_buf[index++] = reg_rn;
12175 record_buf[index++] = ARM_PS_REGNUM;
12176 thumb2_insn_r->reg_rec_count = index;
12177 }
12178 else
12179 {
12180 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12181 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12182 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12183 while (register_bits)
12184 {
12185 if (register_bits & 0x00000001)
12186 register_count++;
12187
12188 register_bits = register_bits >> 1;
12189 }
12190
12191 if (1 == op)
12192 {
12193 /* Start address calculation for LDMDB/LDMEA. */
12194 start_address = u_regval;
12195 }
12196 else if (2 == op)
12197 {
12198 /* Start address calculation for LDMDB/LDMEA. */
12199 start_address = u_regval - register_count * 4;
12200 }
12201
12202 thumb2_insn_r->mem_rec_count = register_count;
12203 while (register_count)
12204 {
12205 record_buf_mem[register_count * 2 - 1] = start_address;
12206 record_buf_mem[register_count * 2 - 2] = 4;
12207 start_address = start_address + 4;
12208 register_count--;
12209 }
12210 record_buf[0] = reg_rn;
12211 record_buf[1] = ARM_PS_REGNUM;
12212 thumb2_insn_r->reg_rec_count = 2;
12213 }
12214 }
12215
12216 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12217 record_buf_mem);
12218 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12219 record_buf);
12220 return ARM_RECORD_SUCCESS;
12221 }
12222
12223 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12224 instructions. */
12225
12226 static int
12227 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12228 {
12229 struct regcache *reg_cache = thumb2_insn_r->regcache;
12230
12231 uint32_t reg_rd, reg_rn, offset_imm;
12232 uint32_t reg_dest1, reg_dest2;
12233 uint32_t address, offset_addr;
12234 uint32_t record_buf[8], record_buf_mem[8];
12235 uint32_t op1, op2, op3;
12236
12237 ULONGEST u_regval[2];
12238
12239 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12240 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12241 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12242
12243 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12244 {
12245 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12246 {
12247 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12248 record_buf[0] = reg_dest1;
12249 record_buf[1] = ARM_PS_REGNUM;
12250 thumb2_insn_r->reg_rec_count = 2;
12251 }
12252
12253 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12254 {
12255 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12256 record_buf[2] = reg_dest2;
12257 thumb2_insn_r->reg_rec_count = 3;
12258 }
12259 }
12260 else
12261 {
12262 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12263 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12264
12265 if (0 == op1 && 0 == op2)
12266 {
12267 /* Handle STREX. */
12268 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12269 address = u_regval[0] + (offset_imm * 4);
12270 record_buf_mem[0] = 4;
12271 record_buf_mem[1] = address;
12272 thumb2_insn_r->mem_rec_count = 1;
12273 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12274 record_buf[0] = reg_rd;
12275 thumb2_insn_r->reg_rec_count = 1;
12276 }
12277 else if (1 == op1 && 0 == op2)
12278 {
12279 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12280 record_buf[0] = reg_rd;
12281 thumb2_insn_r->reg_rec_count = 1;
12282 address = u_regval[0];
12283 record_buf_mem[1] = address;
12284
12285 if (4 == op3)
12286 {
12287 /* Handle STREXB. */
12288 record_buf_mem[0] = 1;
12289 thumb2_insn_r->mem_rec_count = 1;
12290 }
12291 else if (5 == op3)
12292 {
12293 /* Handle STREXH. */
12294 record_buf_mem[0] = 2 ;
12295 thumb2_insn_r->mem_rec_count = 1;
12296 }
12297 else if (7 == op3)
12298 {
12299 /* Handle STREXD. */
12300 address = u_regval[0];
12301 record_buf_mem[0] = 4;
12302 record_buf_mem[2] = 4;
12303 record_buf_mem[3] = address + 4;
12304 thumb2_insn_r->mem_rec_count = 2;
12305 }
12306 }
12307 else
12308 {
12309 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12310
12311 if (bit (thumb2_insn_r->arm_insn, 24))
12312 {
12313 if (bit (thumb2_insn_r->arm_insn, 23))
12314 offset_addr = u_regval[0] + (offset_imm * 4);
12315 else
12316 offset_addr = u_regval[0] - (offset_imm * 4);
12317
12318 address = offset_addr;
12319 }
12320 else
12321 address = u_regval[0];
12322
12323 record_buf_mem[0] = 4;
12324 record_buf_mem[1] = address;
12325 record_buf_mem[2] = 4;
12326 record_buf_mem[3] = address + 4;
12327 thumb2_insn_r->mem_rec_count = 2;
12328 record_buf[0] = reg_rn;
12329 thumb2_insn_r->reg_rec_count = 1;
12330 }
12331 }
12332
12333 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12334 record_buf);
12335 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12336 record_buf_mem);
12337 return ARM_RECORD_SUCCESS;
12338 }
12339
12340 /* Handler for thumb2 data processing (shift register and modified immediate)
12341 instructions. */
12342
12343 static int
12344 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12345 {
12346 uint32_t reg_rd, op;
12347 uint32_t record_buf[8];
12348
12349 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12350 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12351
12352 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12353 {
12354 record_buf[0] = ARM_PS_REGNUM;
12355 thumb2_insn_r->reg_rec_count = 1;
12356 }
12357 else
12358 {
12359 record_buf[0] = reg_rd;
12360 record_buf[1] = ARM_PS_REGNUM;
12361 thumb2_insn_r->reg_rec_count = 2;
12362 }
12363
12364 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12365 record_buf);
12366 return ARM_RECORD_SUCCESS;
12367 }
12368
12369 /* Generic handler for thumb2 instructions which effect destination and PS
12370 registers. */
12371
12372 static int
12373 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12374 {
12375 uint32_t reg_rd;
12376 uint32_t record_buf[8];
12377
12378 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12379
12380 record_buf[0] = reg_rd;
12381 record_buf[1] = ARM_PS_REGNUM;
12382 thumb2_insn_r->reg_rec_count = 2;
12383
12384 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12385 record_buf);
12386 return ARM_RECORD_SUCCESS;
12387 }
12388
12389 /* Handler for thumb2 branch and miscellaneous control instructions. */
12390
12391 static int
12392 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12393 {
12394 uint32_t op, op1, op2;
12395 uint32_t record_buf[8];
12396
12397 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12398 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12399 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12400
12401 /* Handle MSR insn. */
12402 if (!(op1 & 0x2) && 0x38 == op)
12403 {
12404 if (!(op2 & 0x3))
12405 {
12406 /* CPSR is going to be changed. */
12407 record_buf[0] = ARM_PS_REGNUM;
12408 thumb2_insn_r->reg_rec_count = 1;
12409 }
12410 else
12411 {
12412 arm_record_unsupported_insn(thumb2_insn_r);
12413 return -1;
12414 }
12415 }
12416 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12417 {
12418 /* BLX. */
12419 record_buf[0] = ARM_PS_REGNUM;
12420 record_buf[1] = ARM_LR_REGNUM;
12421 thumb2_insn_r->reg_rec_count = 2;
12422 }
12423
12424 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12425 record_buf);
12426 return ARM_RECORD_SUCCESS;
12427 }
12428
12429 /* Handler for thumb2 store single data item instructions. */
12430
12431 static int
12432 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12433 {
12434 struct regcache *reg_cache = thumb2_insn_r->regcache;
12435
12436 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12437 uint32_t address, offset_addr;
12438 uint32_t record_buf[8], record_buf_mem[8];
12439 uint32_t op1, op2;
12440
12441 ULONGEST u_regval[2];
12442
12443 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12444 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12445 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12446 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12447
12448 if (bit (thumb2_insn_r->arm_insn, 23))
12449 {
12450 /* T2 encoding. */
12451 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12452 offset_addr = u_regval[0] + offset_imm;
12453 address = offset_addr;
12454 }
12455 else
12456 {
12457 /* T3 encoding. */
12458 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12459 {
12460 /* Handle STRB (register). */
12461 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12462 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12463 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12464 offset_addr = u_regval[1] << shift_imm;
12465 address = u_regval[0] + offset_addr;
12466 }
12467 else
12468 {
12469 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12470 if (bit (thumb2_insn_r->arm_insn, 10))
12471 {
12472 if (bit (thumb2_insn_r->arm_insn, 9))
12473 offset_addr = u_regval[0] + offset_imm;
12474 else
12475 offset_addr = u_regval[0] - offset_imm;
12476
12477 address = offset_addr;
12478 }
12479 else
12480 address = u_regval[0];
12481 }
12482 }
12483
12484 switch (op1)
12485 {
12486 /* Store byte instructions. */
12487 case 4:
12488 case 0:
12489 record_buf_mem[0] = 1;
12490 break;
12491 /* Store half word instructions. */
12492 case 1:
12493 case 5:
12494 record_buf_mem[0] = 2;
12495 break;
12496 /* Store word instructions. */
12497 case 2:
12498 case 6:
12499 record_buf_mem[0] = 4;
12500 break;
12501
12502 default:
12503 gdb_assert_not_reached ("no decoding pattern found");
12504 break;
12505 }
12506
12507 record_buf_mem[1] = address;
12508 thumb2_insn_r->mem_rec_count = 1;
12509 record_buf[0] = reg_rn;
12510 thumb2_insn_r->reg_rec_count = 1;
12511
12512 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12513 record_buf);
12514 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12515 record_buf_mem);
12516 return ARM_RECORD_SUCCESS;
12517 }
12518
12519 /* Handler for thumb2 load memory hints instructions. */
12520
12521 static int
12522 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12523 {
12524 uint32_t record_buf[8];
12525 uint32_t reg_rt, reg_rn;
12526
12527 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12528 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12529
12530 if (ARM_PC_REGNUM != reg_rt)
12531 {
12532 record_buf[0] = reg_rt;
12533 record_buf[1] = reg_rn;
12534 record_buf[2] = ARM_PS_REGNUM;
12535 thumb2_insn_r->reg_rec_count = 3;
12536
12537 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12538 record_buf);
12539 return ARM_RECORD_SUCCESS;
12540 }
12541
12542 return ARM_RECORD_FAILURE;
12543 }
12544
12545 /* Handler for thumb2 load word instructions. */
12546
12547 static int
12548 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12549 {
12550 uint32_t record_buf[8];
12551
12552 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12553 record_buf[1] = ARM_PS_REGNUM;
12554 thumb2_insn_r->reg_rec_count = 2;
12555
12556 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12557 record_buf);
12558 return ARM_RECORD_SUCCESS;
12559 }
12560
12561 /* Handler for thumb2 long multiply, long multiply accumulate, and
12562 divide instructions. */
12563
12564 static int
12565 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12566 {
12567 uint32_t opcode1 = 0, opcode2 = 0;
12568 uint32_t record_buf[8];
12569
12570 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12571 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12572
12573 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12574 {
12575 /* Handle SMULL, UMULL, SMULAL. */
12576 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12577 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12578 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12579 record_buf[2] = ARM_PS_REGNUM;
12580 thumb2_insn_r->reg_rec_count = 3;
12581 }
12582 else if (1 == opcode1 || 3 == opcode2)
12583 {
12584 /* Handle SDIV and UDIV. */
12585 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12586 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12587 record_buf[2] = ARM_PS_REGNUM;
12588 thumb2_insn_r->reg_rec_count = 3;
12589 }
12590 else
12591 return ARM_RECORD_FAILURE;
12592
12593 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12594 record_buf);
12595 return ARM_RECORD_SUCCESS;
12596 }
12597
12598 /* Record handler for thumb32 coprocessor instructions. */
12599
12600 static int
12601 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12602 {
12603 if (bit (thumb2_insn_r->arm_insn, 25))
12604 return arm_record_coproc_data_proc (thumb2_insn_r);
12605 else
12606 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12607 }
12608
12609 /* Record handler for advance SIMD structure load/store instructions. */
12610
12611 static int
12612 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12613 {
12614 struct regcache *reg_cache = thumb2_insn_r->regcache;
12615 uint32_t l_bit, a_bit, b_bits;
12616 uint32_t record_buf[128], record_buf_mem[128];
12617 uint32_t reg_rn, reg_vd, address, f_elem;
12618 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12619 uint8_t f_ebytes;
12620
12621 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12622 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12623 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12624 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12625 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12626 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12627 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12628 f_elem = 8 / f_ebytes;
12629
12630 if (!l_bit)
12631 {
12632 ULONGEST u_regval = 0;
12633 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12634 address = u_regval;
12635
12636 if (!a_bit)
12637 {
12638 /* Handle VST1. */
12639 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12640 {
12641 if (b_bits == 0x07)
12642 bf_regs = 1;
12643 else if (b_bits == 0x0a)
12644 bf_regs = 2;
12645 else if (b_bits == 0x06)
12646 bf_regs = 3;
12647 else if (b_bits == 0x02)
12648 bf_regs = 4;
12649 else
12650 bf_regs = 0;
12651
12652 for (index_r = 0; index_r < bf_regs; index_r++)
12653 {
12654 for (index_e = 0; index_e < f_elem; index_e++)
12655 {
12656 record_buf_mem[index_m++] = f_ebytes;
12657 record_buf_mem[index_m++] = address;
12658 address = address + f_ebytes;
12659 thumb2_insn_r->mem_rec_count += 1;
12660 }
12661 }
12662 }
12663 /* Handle VST2. */
12664 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12665 {
12666 if (b_bits == 0x09 || b_bits == 0x08)
12667 bf_regs = 1;
12668 else if (b_bits == 0x03)
12669 bf_regs = 2;
12670 else
12671 bf_regs = 0;
12672
12673 for (index_r = 0; index_r < bf_regs; index_r++)
12674 for (index_e = 0; index_e < f_elem; index_e++)
12675 {
12676 for (loop_t = 0; loop_t < 2; loop_t++)
12677 {
12678 record_buf_mem[index_m++] = f_ebytes;
12679 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12680 thumb2_insn_r->mem_rec_count += 1;
12681 }
12682 address = address + (2 * f_ebytes);
12683 }
12684 }
12685 /* Handle VST3. */
12686 else if ((b_bits & 0x0e) == 0x04)
12687 {
12688 for (index_e = 0; index_e < f_elem; index_e++)
12689 {
12690 for (loop_t = 0; loop_t < 3; loop_t++)
12691 {
12692 record_buf_mem[index_m++] = f_ebytes;
12693 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12694 thumb2_insn_r->mem_rec_count += 1;
12695 }
12696 address = address + (3 * f_ebytes);
12697 }
12698 }
12699 /* Handle VST4. */
12700 else if (!(b_bits & 0x0e))
12701 {
12702 for (index_e = 0; index_e < f_elem; index_e++)
12703 {
12704 for (loop_t = 0; loop_t < 4; loop_t++)
12705 {
12706 record_buf_mem[index_m++] = f_ebytes;
12707 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12708 thumb2_insn_r->mem_rec_count += 1;
12709 }
12710 address = address + (4 * f_ebytes);
12711 }
12712 }
12713 }
12714 else
12715 {
12716 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12717
12718 if (bft_size == 0x00)
12719 f_ebytes = 1;
12720 else if (bft_size == 0x01)
12721 f_ebytes = 2;
12722 else if (bft_size == 0x02)
12723 f_ebytes = 4;
12724 else
12725 f_ebytes = 0;
12726
12727 /* Handle VST1. */
12728 if (!(b_bits & 0x0b) || b_bits == 0x08)
12729 thumb2_insn_r->mem_rec_count = 1;
12730 /* Handle VST2. */
12731 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12732 thumb2_insn_r->mem_rec_count = 2;
12733 /* Handle VST3. */
12734 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12735 thumb2_insn_r->mem_rec_count = 3;
12736 /* Handle VST4. */
12737 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12738 thumb2_insn_r->mem_rec_count = 4;
12739
12740 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12741 {
12742 record_buf_mem[index_m] = f_ebytes;
12743 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12744 }
12745 }
12746 }
12747 else
12748 {
12749 if (!a_bit)
12750 {
12751 /* Handle VLD1. */
12752 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12753 thumb2_insn_r->reg_rec_count = 1;
12754 /* Handle VLD2. */
12755 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12756 thumb2_insn_r->reg_rec_count = 2;
12757 /* Handle VLD3. */
12758 else if ((b_bits & 0x0e) == 0x04)
12759 thumb2_insn_r->reg_rec_count = 3;
12760 /* Handle VLD4. */
12761 else if (!(b_bits & 0x0e))
12762 thumb2_insn_r->reg_rec_count = 4;
12763 }
12764 else
12765 {
12766 /* Handle VLD1. */
12767 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12768 thumb2_insn_r->reg_rec_count = 1;
12769 /* Handle VLD2. */
12770 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12771 thumb2_insn_r->reg_rec_count = 2;
12772 /* Handle VLD3. */
12773 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12774 thumb2_insn_r->reg_rec_count = 3;
12775 /* Handle VLD4. */
12776 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12777 thumb2_insn_r->reg_rec_count = 4;
12778
12779 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12780 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12781 }
12782 }
12783
12784 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12785 {
12786 record_buf[index_r] = reg_rn;
12787 thumb2_insn_r->reg_rec_count += 1;
12788 }
12789
12790 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12791 record_buf);
12792 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12793 record_buf_mem);
12794 return 0;
12795 }
12796
12797 /* Decodes thumb2 instruction type and invokes its record handler. */
12798
12799 static unsigned int
12800 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12801 {
12802 uint32_t op, op1, op2;
12803
12804 op = bit (thumb2_insn_r->arm_insn, 15);
12805 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12806 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12807
12808 if (op1 == 0x01)
12809 {
12810 if (!(op2 & 0x64 ))
12811 {
12812 /* Load/store multiple instruction. */
12813 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12814 }
12815 else if ((op2 & 0x64) == 0x4)
12816 {
12817 /* Load/store (dual/exclusive) and table branch instruction. */
12818 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12819 }
12820 else if ((op2 & 0x60) == 0x20)
12821 {
12822 /* Data-processing (shifted register). */
12823 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12824 }
12825 else if (op2 & 0x40)
12826 {
12827 /* Co-processor instructions. */
12828 return thumb2_record_coproc_insn (thumb2_insn_r);
12829 }
12830 }
12831 else if (op1 == 0x02)
12832 {
12833 if (op)
12834 {
12835 /* Branches and miscellaneous control instructions. */
12836 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12837 }
12838 else if (op2 & 0x20)
12839 {
12840 /* Data-processing (plain binary immediate) instruction. */
12841 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12842 }
12843 else
12844 {
12845 /* Data-processing (modified immediate). */
12846 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12847 }
12848 }
12849 else if (op1 == 0x03)
12850 {
12851 if (!(op2 & 0x71 ))
12852 {
12853 /* Store single data item. */
12854 return thumb2_record_str_single_data (thumb2_insn_r);
12855 }
12856 else if (!((op2 & 0x71) ^ 0x10))
12857 {
12858 /* Advanced SIMD or structure load/store instructions. */
12859 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12860 }
12861 else if (!((op2 & 0x67) ^ 0x01))
12862 {
12863 /* Load byte, memory hints instruction. */
12864 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12865 }
12866 else if (!((op2 & 0x67) ^ 0x03))
12867 {
12868 /* Load halfword, memory hints instruction. */
12869 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12870 }
12871 else if (!((op2 & 0x67) ^ 0x05))
12872 {
12873 /* Load word instruction. */
12874 return thumb2_record_ld_word (thumb2_insn_r);
12875 }
12876 else if (!((op2 & 0x70) ^ 0x20))
12877 {
12878 /* Data-processing (register) instruction. */
12879 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12880 }
12881 else if (!((op2 & 0x78) ^ 0x30))
12882 {
12883 /* Multiply, multiply accumulate, abs diff instruction. */
12884 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12885 }
12886 else if (!((op2 & 0x78) ^ 0x38))
12887 {
12888 /* Long multiply, long multiply accumulate, and divide. */
12889 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12890 }
12891 else if (op2 & 0x40)
12892 {
12893 /* Co-processor instructions. */
12894 return thumb2_record_coproc_insn (thumb2_insn_r);
12895 }
12896 }
12897
12898 return -1;
12899 }
12900
12901 namespace {
12902 /* Abstract memory reader. */
12903
12904 class abstract_memory_reader
12905 {
12906 public:
12907 /* Read LEN bytes of target memory at address MEMADDR, placing the
12908 results in GDB's memory at BUF. Return true on success. */
12909
12910 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12911 };
12912
12913 /* Instruction reader from real target. */
12914
12915 class instruction_reader : public abstract_memory_reader
12916 {
12917 public:
12918 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len)
12919 {
12920 if (target_read_memory (memaddr, buf, len))
12921 return false;
12922 else
12923 return true;
12924 }
12925 };
12926
12927 } // namespace
12928
12929 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12930 and positive val on fauilure. */
12931
12932 static int
12933 extract_arm_insn (abstract_memory_reader& reader,
12934 insn_decode_record *insn_record, uint32_t insn_size)
12935 {
12936 gdb_byte buf[insn_size];
12937
12938 memset (&buf[0], 0, insn_size);
12939
12940 if (!reader.read (insn_record->this_addr, buf, insn_size))
12941 return 1;
12942 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12943 insn_size,
12944 gdbarch_byte_order_for_code (insn_record->gdbarch));
12945 return 0;
12946 }
12947
12948 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12949
12950 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12951 dispatch it. */
12952
12953 static int
12954 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
12955 record_type_t record_type, uint32_t insn_size)
12956 {
12957
12958 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12959 instruction. */
12960 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
12961 {
12962 arm_record_data_proc_misc_ld_str, /* 000. */
12963 arm_record_data_proc_imm, /* 001. */
12964 arm_record_ld_st_imm_offset, /* 010. */
12965 arm_record_ld_st_reg_offset, /* 011. */
12966 arm_record_ld_st_multiple, /* 100. */
12967 arm_record_b_bl, /* 101. */
12968 arm_record_asimd_vfp_coproc, /* 110. */
12969 arm_record_coproc_data_proc /* 111. */
12970 };
12971
12972 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12973 instruction. */
12974 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
12975 { \
12976 thumb_record_shift_add_sub, /* 000. */
12977 thumb_record_add_sub_cmp_mov, /* 001. */
12978 thumb_record_ld_st_reg_offset, /* 010. */
12979 thumb_record_ld_st_imm_offset, /* 011. */
12980 thumb_record_ld_st_stack, /* 100. */
12981 thumb_record_misc, /* 101. */
12982 thumb_record_ldm_stm_swi, /* 110. */
12983 thumb_record_branch /* 111. */
12984 };
12985
12986 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12987 uint32_t insn_id = 0;
12988
12989 if (extract_arm_insn (reader, arm_record, insn_size))
12990 {
12991 if (record_debug)
12992 {
12993 printf_unfiltered (_("Process record: error reading memory at "
12994 "addr %s len = %d.\n"),
12995 paddress (arm_record->gdbarch,
12996 arm_record->this_addr), insn_size);
12997 }
12998 return -1;
12999 }
13000 else if (ARM_RECORD == record_type)
13001 {
13002 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13003 insn_id = bits (arm_record->arm_insn, 25, 27);
13004
13005 if (arm_record->cond == 0xf)
13006 ret = arm_record_extension_space (arm_record);
13007 else
13008 {
13009 /* If this insn has fallen into extension space
13010 then we need not decode it anymore. */
13011 ret = arm_handle_insn[insn_id] (arm_record);
13012 }
13013 if (ret != ARM_RECORD_SUCCESS)
13014 {
13015 arm_record_unsupported_insn (arm_record);
13016 ret = -1;
13017 }
13018 }
13019 else if (THUMB_RECORD == record_type)
13020 {
13021 /* As thumb does not have condition codes, we set negative. */
13022 arm_record->cond = -1;
13023 insn_id = bits (arm_record->arm_insn, 13, 15);
13024 ret = thumb_handle_insn[insn_id] (arm_record);
13025 if (ret != ARM_RECORD_SUCCESS)
13026 {
13027 arm_record_unsupported_insn (arm_record);
13028 ret = -1;
13029 }
13030 }
13031 else if (THUMB2_RECORD == record_type)
13032 {
13033 /* As thumb does not have condition codes, we set negative. */
13034 arm_record->cond = -1;
13035
13036 /* Swap first half of 32bit thumb instruction with second half. */
13037 arm_record->arm_insn
13038 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13039
13040 ret = thumb2_record_decode_insn_handler (arm_record);
13041
13042 if (ret != ARM_RECORD_SUCCESS)
13043 {
13044 arm_record_unsupported_insn (arm_record);
13045 ret = -1;
13046 }
13047 }
13048 else
13049 {
13050 /* Throw assertion. */
13051 gdb_assert_not_reached ("not a valid instruction, could not decode");
13052 }
13053
13054 return ret;
13055 }
13056
13057 #if GDB_SELF_TEST
13058 namespace selftests {
13059
13060 /* Provide both 16-bit and 32-bit thumb instructions. */
13061
13062 class instruction_reader_thumb : public abstract_memory_reader
13063 {
13064 public:
13065 template<size_t SIZE>
13066 instruction_reader_thumb (enum bfd_endian endian,
13067 const uint16_t (&insns)[SIZE])
13068 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13069 {}
13070
13071 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len)
13072 {
13073 SELF_CHECK (len == 4 || len == 2);
13074 SELF_CHECK (memaddr % 2 == 0);
13075 SELF_CHECK ((memaddr / 2) < m_insns_size);
13076
13077 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13078 if (len == 4)
13079 {
13080 store_unsigned_integer (&buf[2], 2, m_endian,
13081 m_insns[memaddr / 2 + 1]);
13082 }
13083 return true;
13084 }
13085
13086 private:
13087 enum bfd_endian m_endian;
13088 const uint16_t *m_insns;
13089 size_t m_insns_size;
13090 };
13091
13092 static void
13093 arm_record_test (void)
13094 {
13095 struct gdbarch_info info;
13096 gdbarch_info_init (&info);
13097 info.bfd_arch_info = bfd_scan_arch ("arm");
13098
13099 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13100
13101 SELF_CHECK (gdbarch != NULL);
13102
13103 /* 16-bit Thumb instructions. */
13104 {
13105 insn_decode_record arm_record;
13106
13107 memset (&arm_record, 0, sizeof (insn_decode_record));
13108 arm_record.gdbarch = gdbarch;
13109
13110 static const uint16_t insns[] = {
13111 /* db b2 uxtb r3, r3 */
13112 0xb2db,
13113 /* cd 58 ldr r5, [r1, r3] */
13114 0x58cd,
13115 };
13116
13117 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13118 instruction_reader_thumb reader (endian, insns);
13119 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13120 THUMB_INSN_SIZE_BYTES);
13121
13122 SELF_CHECK (ret == 0);
13123 SELF_CHECK (arm_record.mem_rec_count == 0);
13124 SELF_CHECK (arm_record.reg_rec_count == 1);
13125 SELF_CHECK (arm_record.arm_regs[0] == 3);
13126
13127 arm_record.this_addr += 2;
13128 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13129 THUMB_INSN_SIZE_BYTES);
13130
13131 SELF_CHECK (ret == 0);
13132 SELF_CHECK (arm_record.mem_rec_count == 0);
13133 SELF_CHECK (arm_record.reg_rec_count == 1);
13134 SELF_CHECK (arm_record.arm_regs[0] == 5);
13135 }
13136
13137 /* 32-bit Thumb-2 instructions. */
13138 {
13139 insn_decode_record arm_record;
13140
13141 memset (&arm_record, 0, sizeof (insn_decode_record));
13142 arm_record.gdbarch = gdbarch;
13143
13144 static const uint16_t insns[] = {
13145 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13146 0xee1d, 0x7f70,
13147 };
13148
13149 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13150 instruction_reader_thumb reader (endian, insns);
13151 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13152 THUMB2_INSN_SIZE_BYTES);
13153
13154 SELF_CHECK (ret == 0);
13155 SELF_CHECK (arm_record.mem_rec_count == 0);
13156 SELF_CHECK (arm_record.reg_rec_count == 1);
13157 SELF_CHECK (arm_record.arm_regs[0] == 7);
13158 }
13159 }
13160 } // namespace selftests
13161 #endif /* GDB_SELF_TEST */
13162
13163 /* Cleans up local record registers and memory allocations. */
13164
13165 static void
13166 deallocate_reg_mem (insn_decode_record *record)
13167 {
13168 xfree (record->arm_regs);
13169 xfree (record->arm_mems);
13170 }
13171
13172
13173 /* Parse the current instruction and record the values of the registers and
13174 memory that will be changed in current instruction to record_arch_list".
13175 Return -1 if something is wrong. */
13176
13177 int
13178 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13179 CORE_ADDR insn_addr)
13180 {
13181
13182 uint32_t no_of_rec = 0;
13183 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13184 ULONGEST t_bit = 0, insn_id = 0;
13185
13186 ULONGEST u_regval = 0;
13187
13188 insn_decode_record arm_record;
13189
13190 memset (&arm_record, 0, sizeof (insn_decode_record));
13191 arm_record.regcache = regcache;
13192 arm_record.this_addr = insn_addr;
13193 arm_record.gdbarch = gdbarch;
13194
13195
13196 if (record_debug > 1)
13197 {
13198 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13199 "addr = %s\n",
13200 paddress (gdbarch, arm_record.this_addr));
13201 }
13202
13203 instruction_reader reader;
13204 if (extract_arm_insn (reader, &arm_record, 2))
13205 {
13206 if (record_debug)
13207 {
13208 printf_unfiltered (_("Process record: error reading memory at "
13209 "addr %s len = %d.\n"),
13210 paddress (arm_record.gdbarch,
13211 arm_record.this_addr), 2);
13212 }
13213 return -1;
13214 }
13215
13216 /* Check the insn, whether it is thumb or arm one. */
13217
13218 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13219 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13220
13221
13222 if (!(u_regval & t_bit))
13223 {
13224 /* We are decoding arm insn. */
13225 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13226 }
13227 else
13228 {
13229 insn_id = bits (arm_record.arm_insn, 11, 15);
13230 /* is it thumb2 insn? */
13231 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13232 {
13233 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13234 THUMB2_INSN_SIZE_BYTES);
13235 }
13236 else
13237 {
13238 /* We are decoding thumb insn. */
13239 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13240 THUMB_INSN_SIZE_BYTES);
13241 }
13242 }
13243
13244 if (0 == ret)
13245 {
13246 /* Record registers. */
13247 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13248 if (arm_record.arm_regs)
13249 {
13250 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13251 {
13252 if (record_full_arch_list_add_reg
13253 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13254 ret = -1;
13255 }
13256 }
13257 /* Record memories. */
13258 if (arm_record.arm_mems)
13259 {
13260 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13261 {
13262 if (record_full_arch_list_add_mem
13263 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13264 arm_record.arm_mems[no_of_rec].len))
13265 ret = -1;
13266 }
13267 }
13268
13269 if (record_full_arch_list_add_end ())
13270 ret = -1;
13271 }
13272
13273
13274 deallocate_reg_mem (&arm_record);
13275
13276 return ret;
13277 }