1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2014 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
31 #include "reggroups.h"
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "gdb/sim-arm.h"
52 #include "coff/internal.h"
58 #include "record-full.h"
60 #include "features/arm-with-m.c"
61 #include "features/arm-with-m-fpa-layout.c"
62 #include "features/arm-with-m-vfp-d16.c"
63 #include "features/arm-with-iwmmxt.c"
64 #include "features/arm-with-vfpv2.c"
65 #include "features/arm-with-vfpv3.c"
66 #include "features/arm-with-neon.c"
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71 it as Thumb function. The MSB of the minimal symbol's "info" field
72 is used for this purpose.
74 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
75 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77 #define MSYMBOL_SET_SPECIAL(msym) \
78 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80 #define MSYMBOL_IS_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym)
83 /* Per-objfile data used for mapping symbols. */
84 static const struct objfile_data
*arm_objfile_data_key
;
86 struct arm_mapping_symbol
91 typedef struct arm_mapping_symbol arm_mapping_symbol_s
;
92 DEF_VEC_O(arm_mapping_symbol_s
);
94 struct arm_per_objfile
96 VEC(arm_mapping_symbol_s
) **section_maps
;
99 /* The list of available "set arm ..." and "show arm ..." commands. */
100 static struct cmd_list_element
*setarmcmdlist
= NULL
;
101 static struct cmd_list_element
*showarmcmdlist
= NULL
;
103 /* The type of floating-point to use. Keep this in sync with enum
104 arm_float_model, and the help string in _initialize_arm_tdep. */
105 static const char *const fp_model_strings
[] =
115 /* A variable that can be configured by the user. */
116 static enum arm_float_model arm_fp_model
= ARM_FLOAT_AUTO
;
117 static const char *current_fp_model
= "auto";
119 /* The ABI to use. Keep this in sync with arm_abi_kind. */
120 static const char *const arm_abi_strings
[] =
128 /* A variable that can be configured by the user. */
129 static enum arm_abi_kind arm_abi_global
= ARM_ABI_AUTO
;
130 static const char *arm_abi_string
= "auto";
132 /* The execution mode to assume. */
133 static const char *const arm_mode_strings
[] =
141 static const char *arm_fallback_mode_string
= "auto";
142 static const char *arm_force_mode_string
= "auto";
144 /* Internal override of the execution mode. -1 means no override,
145 0 means override to ARM mode, 1 means override to Thumb mode.
146 The effect is the same as if arm_force_mode has been set by the
147 user (except the internal override has precedence over a user's
148 arm_force_mode override). */
149 static int arm_override_mode
= -1;
151 /* Number of different reg name sets (options). */
152 static int num_disassembly_options
;
154 /* The standard register names, and all the valid aliases for them. Note
155 that `fp', `sp' and `pc' are not added in this alias list, because they
156 have been added as builtin user registers in
157 std-regs.c:_initialize_frame_reg. */
162 } arm_register_aliases
[] = {
163 /* Basic register numbers. */
180 /* Synonyms (argument and variable registers). */
193 /* Other platform-specific names for r9. */
199 /* Names used by GCC (not listed in the ARM EABI). */
201 /* A special name from the older ATPCS. */
205 static const char *const arm_register_names
[] =
206 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
207 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
208 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
209 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
210 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
211 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
212 "fps", "cpsr" }; /* 24 25 */
214 /* Valid register name styles. */
215 static const char **valid_disassembly_styles
;
217 /* Disassembly style to use. Default to "std" register names. */
218 static const char *disassembly_style
;
220 /* This is used to keep the bfd arch_info in sync with the disassembly
222 static void set_disassembly_style_sfunc(char *, int,
223 struct cmd_list_element
*);
224 static void set_disassembly_style (void);
226 static void convert_from_extended (const struct floatformat
*, const void *,
228 static void convert_to_extended (const struct floatformat
*, void *,
231 static enum register_status
arm_neon_quad_read (struct gdbarch
*gdbarch
,
232 struct regcache
*regcache
,
233 int regnum
, gdb_byte
*buf
);
234 static void arm_neon_quad_write (struct gdbarch
*gdbarch
,
235 struct regcache
*regcache
,
236 int regnum
, const gdb_byte
*buf
);
238 static int thumb_insn_size (unsigned short inst1
);
240 struct arm_prologue_cache
242 /* The stack pointer at the time this frame was created; i.e. the
243 caller's stack pointer when this function was called. It is used
244 to identify this frame. */
247 /* The frame base for this frame is just prev_sp - frame size.
248 FRAMESIZE is the distance from the frame pointer to the
249 initial stack pointer. */
253 /* The register used to hold the frame pointer for this frame. */
256 /* Saved register offsets. */
257 struct trad_frame_saved_reg
*saved_regs
;
260 static CORE_ADDR
arm_analyze_prologue (struct gdbarch
*gdbarch
,
261 CORE_ADDR prologue_start
,
262 CORE_ADDR prologue_end
,
263 struct arm_prologue_cache
*cache
);
265 /* Architecture version for displaced stepping. This effects the behaviour of
266 certain instructions, and really should not be hard-wired. */
268 #define DISPLACED_STEPPING_ARCH_VERSION 5
270 /* Addresses for calling Thumb functions have the bit 0 set.
271 Here are some macros to test, set, or clear bit 0 of addresses. */
272 #define IS_THUMB_ADDR(addr) ((addr) & 1)
273 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
274 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
276 /* Set to true if the 32-bit mode is in use. */
280 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
283 arm_psr_thumb_bit (struct gdbarch
*gdbarch
)
285 if (gdbarch_tdep (gdbarch
)->is_m
)
291 /* Determine if FRAME is executing in Thumb mode. */
294 arm_frame_is_thumb (struct frame_info
*frame
)
297 ULONGEST t_bit
= arm_psr_thumb_bit (get_frame_arch (frame
));
299 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
300 directly (from a signal frame or dummy frame) or by interpreting
301 the saved LR (from a prologue or DWARF frame). So consult it and
302 trust the unwinders. */
303 cpsr
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
305 return (cpsr
& t_bit
) != 0;
308 /* Callback for VEC_lower_bound. */
311 arm_compare_mapping_symbols (const struct arm_mapping_symbol
*lhs
,
312 const struct arm_mapping_symbol
*rhs
)
314 return lhs
->value
< rhs
->value
;
317 /* Search for the mapping symbol covering MEMADDR. If one is found,
318 return its type. Otherwise, return 0. If START is non-NULL,
319 set *START to the location of the mapping symbol. */
322 arm_find_mapping_symbol (CORE_ADDR memaddr
, CORE_ADDR
*start
)
324 struct obj_section
*sec
;
326 /* If there are mapping symbols, consult them. */
327 sec
= find_pc_section (memaddr
);
330 struct arm_per_objfile
*data
;
331 VEC(arm_mapping_symbol_s
) *map
;
332 struct arm_mapping_symbol map_key
= { memaddr
- obj_section_addr (sec
),
336 data
= objfile_data (sec
->objfile
, arm_objfile_data_key
);
339 map
= data
->section_maps
[sec
->the_bfd_section
->index
];
340 if (!VEC_empty (arm_mapping_symbol_s
, map
))
342 struct arm_mapping_symbol
*map_sym
;
344 idx
= VEC_lower_bound (arm_mapping_symbol_s
, map
, &map_key
,
345 arm_compare_mapping_symbols
);
347 /* VEC_lower_bound finds the earliest ordered insertion
348 point. If the following symbol starts at this exact
349 address, we use that; otherwise, the preceding
350 mapping symbol covers this address. */
351 if (idx
< VEC_length (arm_mapping_symbol_s
, map
))
353 map_sym
= VEC_index (arm_mapping_symbol_s
, map
, idx
);
354 if (map_sym
->value
== map_key
.value
)
357 *start
= map_sym
->value
+ obj_section_addr (sec
);
358 return map_sym
->type
;
364 map_sym
= VEC_index (arm_mapping_symbol_s
, map
, idx
- 1);
366 *start
= map_sym
->value
+ obj_section_addr (sec
);
367 return map_sym
->type
;
376 /* Determine if the program counter specified in MEMADDR is in a Thumb
377 function. This function should be called for addresses unrelated to
378 any executing frame; otherwise, prefer arm_frame_is_thumb. */
381 arm_pc_is_thumb (struct gdbarch
*gdbarch
, CORE_ADDR memaddr
)
383 struct bound_minimal_symbol sym
;
385 struct displaced_step_closure
* dsc
386 = get_displaced_step_closure_by_addr(memaddr
);
388 /* If checking the mode of displaced instruction in copy area, the mode
389 should be determined by instruction on the original address. */
393 fprintf_unfiltered (gdb_stdlog
,
394 "displaced: check mode of %.8lx instead of %.8lx\n",
395 (unsigned long) dsc
->insn_addr
,
396 (unsigned long) memaddr
);
397 memaddr
= dsc
->insn_addr
;
400 /* If bit 0 of the address is set, assume this is a Thumb address. */
401 if (IS_THUMB_ADDR (memaddr
))
404 /* Respect internal mode override if active. */
405 if (arm_override_mode
!= -1)
406 return arm_override_mode
;
408 /* If the user wants to override the symbol table, let him. */
409 if (strcmp (arm_force_mode_string
, "arm") == 0)
411 if (strcmp (arm_force_mode_string
, "thumb") == 0)
414 /* ARM v6-M and v7-M are always in Thumb mode. */
415 if (gdbarch_tdep (gdbarch
)->is_m
)
418 /* If there are mapping symbols, consult them. */
419 type
= arm_find_mapping_symbol (memaddr
, NULL
);
423 /* Thumb functions have a "special" bit set in minimal symbols. */
424 sym
= lookup_minimal_symbol_by_pc (memaddr
);
426 return (MSYMBOL_IS_SPECIAL (sym
.minsym
));
428 /* If the user wants to override the fallback mode, let them. */
429 if (strcmp (arm_fallback_mode_string
, "arm") == 0)
431 if (strcmp (arm_fallback_mode_string
, "thumb") == 0)
434 /* If we couldn't find any symbol, but we're talking to a running
435 target, then trust the current value of $cpsr. This lets
436 "display/i $pc" always show the correct mode (though if there is
437 a symbol table we will not reach here, so it still may not be
438 displayed in the mode it will be executed). */
439 if (target_has_registers
)
440 return arm_frame_is_thumb (get_current_frame ());
442 /* Otherwise we're out of luck; we assume ARM. */
446 /* Remove useless bits from addresses in a running program. */
448 arm_addr_bits_remove (struct gdbarch
*gdbarch
, CORE_ADDR val
)
450 /* On M-profile devices, do not strip the low bit from EXC_RETURN
451 (the magic exception return address). */
452 if (gdbarch_tdep (gdbarch
)->is_m
453 && (val
& 0xfffffff0) == 0xfffffff0)
457 return UNMAKE_THUMB_ADDR (val
);
459 return (val
& 0x03fffffc);
462 /* Return 1 if PC is the start of a compiler helper function which
463 can be safely ignored during prologue skipping. IS_THUMB is true
464 if the function is known to be a Thumb function due to the way it
467 skip_prologue_function (struct gdbarch
*gdbarch
, CORE_ADDR pc
, int is_thumb
)
469 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
470 struct bound_minimal_symbol msym
;
472 msym
= lookup_minimal_symbol_by_pc (pc
);
473 if (msym
.minsym
!= NULL
474 && BMSYMBOL_VALUE_ADDRESS (msym
) == pc
475 && MSYMBOL_LINKAGE_NAME (msym
.minsym
) != NULL
)
477 const char *name
= MSYMBOL_LINKAGE_NAME (msym
.minsym
);
479 /* The GNU linker's Thumb call stub to foo is named
481 if (strstr (name
, "_from_thumb") != NULL
)
484 /* On soft-float targets, __truncdfsf2 is called to convert promoted
485 arguments to their argument types in non-prototyped
487 if (strncmp (name
, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
489 if (strncmp (name
, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
492 /* Internal functions related to thread-local storage. */
493 if (strncmp (name
, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
495 if (strncmp (name
, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
500 /* If we run against a stripped glibc, we may be unable to identify
501 special functions by name. Check for one important case,
502 __aeabi_read_tp, by comparing the *code* against the default
503 implementation (this is hand-written ARM assembler in glibc). */
506 && read_memory_unsigned_integer (pc
, 4, byte_order_for_code
)
507 == 0xe3e00a0f /* mov r0, #0xffff0fff */
508 && read_memory_unsigned_integer (pc
+ 4, 4, byte_order_for_code
)
509 == 0xe240f01f) /* sub pc, r0, #31 */
516 /* Support routines for instruction parsing. */
517 #define submask(x) ((1L << ((x) + 1)) - 1)
518 #define bit(obj,st) (((obj) >> (st)) & 1)
519 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
520 #define sbits(obj,st,fn) \
521 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
522 #define BranchDest(addr,instr) \
523 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
525 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
526 the first 16-bit of instruction, and INSN2 is the second 16-bit of
528 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
529 ((bits ((insn1), 0, 3) << 12) \
530 | (bits ((insn1), 10, 10) << 11) \
531 | (bits ((insn2), 12, 14) << 8) \
532 | bits ((insn2), 0, 7))
534 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
535 the 32-bit instruction. */
536 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
537 ((bits ((insn), 16, 19) << 12) \
538 | bits ((insn), 0, 11))
540 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
543 thumb_expand_immediate (unsigned int imm
)
545 unsigned int count
= imm
>> 7;
553 return (imm
& 0xff) | ((imm
& 0xff) << 16);
555 return ((imm
& 0xff) << 8) | ((imm
& 0xff) << 24);
557 return (imm
& 0xff) | ((imm
& 0xff) << 8)
558 | ((imm
& 0xff) << 16) | ((imm
& 0xff) << 24);
561 return (0x80 | (imm
& 0x7f)) << (32 - count
);
564 /* Return 1 if the 16-bit Thumb instruction INST might change
565 control flow, 0 otherwise. */
568 thumb_instruction_changes_pc (unsigned short inst
)
570 if ((inst
& 0xff00) == 0xbd00) /* pop {rlist, pc} */
573 if ((inst
& 0xf000) == 0xd000) /* conditional branch */
576 if ((inst
& 0xf800) == 0xe000) /* unconditional branch */
579 if ((inst
& 0xff00) == 0x4700) /* bx REG, blx REG */
582 if ((inst
& 0xff87) == 0x4687) /* mov pc, REG */
585 if ((inst
& 0xf500) == 0xb100) /* CBNZ or CBZ. */
591 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
592 might change control flow, 0 otherwise. */
595 thumb2_instruction_changes_pc (unsigned short inst1
, unsigned short inst2
)
597 if ((inst1
& 0xf800) == 0xf000 && (inst2
& 0x8000) == 0x8000)
599 /* Branches and miscellaneous control instructions. */
601 if ((inst2
& 0x1000) != 0 || (inst2
& 0xd001) == 0xc000)
606 else if (inst1
== 0xf3de && (inst2
& 0xff00) == 0x3f00)
608 /* SUBS PC, LR, #imm8. */
611 else if ((inst2
& 0xd000) == 0x8000 && (inst1
& 0x0380) != 0x0380)
613 /* Conditional branch. */
620 if ((inst1
& 0xfe50) == 0xe810)
622 /* Load multiple or RFE. */
624 if (bit (inst1
, 7) && !bit (inst1
, 8))
630 else if (!bit (inst1
, 7) && bit (inst1
, 8))
636 else if (bit (inst1
, 7) && bit (inst1
, 8))
641 else if (!bit (inst1
, 7) && !bit (inst1
, 8))
650 if ((inst1
& 0xffef) == 0xea4f && (inst2
& 0xfff0) == 0x0f00)
652 /* MOV PC or MOVS PC. */
656 if ((inst1
& 0xff70) == 0xf850 && (inst2
& 0xf000) == 0xf000)
659 if (bits (inst1
, 0, 3) == 15)
665 if ((inst2
& 0x0fc0) == 0x0000)
671 if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf000)
677 if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf010)
686 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
687 epilogue, 0 otherwise. */
690 thumb_instruction_restores_sp (unsigned short insn
)
692 return (insn
== 0x46bd /* mov sp, r7 */
693 || (insn
& 0xff80) == 0xb000 /* add sp, imm */
694 || (insn
& 0xfe00) == 0xbc00); /* pop <registers> */
697 /* Analyze a Thumb prologue, looking for a recognizable stack frame
698 and frame pointer. Scan until we encounter a store that could
699 clobber the stack frame unexpectedly, or an unknown instruction.
700 Return the last address which is definitely safe to skip for an
701 initial breakpoint. */
704 thumb_analyze_prologue (struct gdbarch
*gdbarch
,
705 CORE_ADDR start
, CORE_ADDR limit
,
706 struct arm_prologue_cache
*cache
)
708 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
709 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
712 struct pv_area
*stack
;
713 struct cleanup
*back_to
;
715 CORE_ADDR unrecognized_pc
= 0;
717 for (i
= 0; i
< 16; i
++)
718 regs
[i
] = pv_register (i
, 0);
719 stack
= make_pv_area (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
720 back_to
= make_cleanup_free_pv_area (stack
);
722 while (start
< limit
)
726 insn
= read_memory_unsigned_integer (start
, 2, byte_order_for_code
);
728 if ((insn
& 0xfe00) == 0xb400) /* push { rlist } */
733 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
736 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
737 whether to save LR (R14). */
738 mask
= (insn
& 0xff) | ((insn
& 0x100) << 6);
740 /* Calculate offsets of saved R0-R7 and LR. */
741 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
742 if (mask
& (1 << regno
))
744 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
746 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
749 else if ((insn
& 0xff80) == 0xb080) /* sub sp, #imm */
751 offset
= (insn
& 0x7f) << 2; /* get scaled offset */
752 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
755 else if (thumb_instruction_restores_sp (insn
))
757 /* Don't scan past the epilogue. */
760 else if ((insn
& 0xf800) == 0xa800) /* add Rd, sp, #imm */
761 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[ARM_SP_REGNUM
],
763 else if ((insn
& 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
764 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
765 regs
[bits (insn
, 0, 2)] = pv_add_constant (regs
[bits (insn
, 3, 5)],
767 else if ((insn
& 0xf800) == 0x3000 /* add Rd, #imm */
768 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
769 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[bits (insn
, 8, 10)],
771 else if ((insn
& 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
772 && pv_is_register (regs
[bits (insn
, 6, 8)], ARM_SP_REGNUM
)
773 && pv_is_constant (regs
[bits (insn
, 3, 5)]))
774 regs
[bits (insn
, 0, 2)] = pv_add (regs
[bits (insn
, 3, 5)],
775 regs
[bits (insn
, 6, 8)]);
776 else if ((insn
& 0xff00) == 0x4400 /* add Rd, Rm */
777 && pv_is_constant (regs
[bits (insn
, 3, 6)]))
779 int rd
= (bit (insn
, 7) << 3) + bits (insn
, 0, 2);
780 int rm
= bits (insn
, 3, 6);
781 regs
[rd
] = pv_add (regs
[rd
], regs
[rm
]);
783 else if ((insn
& 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
785 int dst_reg
= (insn
& 0x7) + ((insn
& 0x80) >> 4);
786 int src_reg
= (insn
& 0x78) >> 3;
787 regs
[dst_reg
] = regs
[src_reg
];
789 else if ((insn
& 0xf800) == 0x9000) /* str rd, [sp, #off] */
791 /* Handle stores to the stack. Normally pushes are used,
792 but with GCC -mtpcs-frame, there may be other stores
793 in the prologue to create the frame. */
794 int regno
= (insn
>> 8) & 0x7;
797 offset
= (insn
& 0xff) << 2;
798 addr
= pv_add_constant (regs
[ARM_SP_REGNUM
], offset
);
800 if (pv_area_store_would_trash (stack
, addr
))
803 pv_area_store (stack
, addr
, 4, regs
[regno
]);
805 else if ((insn
& 0xf800) == 0x6000) /* str rd, [rn, #off] */
807 int rd
= bits (insn
, 0, 2);
808 int rn
= bits (insn
, 3, 5);
811 offset
= bits (insn
, 6, 10) << 2;
812 addr
= pv_add_constant (regs
[rn
], offset
);
814 if (pv_area_store_would_trash (stack
, addr
))
817 pv_area_store (stack
, addr
, 4, regs
[rd
]);
819 else if (((insn
& 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
820 || (insn
& 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
821 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
822 /* Ignore stores of argument registers to the stack. */
824 else if ((insn
& 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
825 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
826 /* Ignore block loads from the stack, potentially copying
827 parameters from memory. */
829 else if ((insn
& 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
830 || ((insn
& 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
831 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
)))
832 /* Similarly ignore single loads from the stack. */
834 else if ((insn
& 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
835 || (insn
& 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
836 /* Skip register copies, i.e. saves to another register
837 instead of the stack. */
839 else if ((insn
& 0xf800) == 0x2000) /* movs Rd, #imm */
840 /* Recognize constant loads; even with small stacks these are necessary
842 regs
[bits (insn
, 8, 10)] = pv_constant (bits (insn
, 0, 7));
843 else if ((insn
& 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
845 /* Constant pool loads, for the same reason. */
846 unsigned int constant
;
849 loc
= start
+ 4 + bits (insn
, 0, 7) * 4;
850 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
851 regs
[bits (insn
, 8, 10)] = pv_constant (constant
);
853 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instructions. */
855 unsigned short inst2
;
857 inst2
= read_memory_unsigned_integer (start
+ 2, 2,
858 byte_order_for_code
);
860 if ((insn
& 0xf800) == 0xf000 && (inst2
& 0xe800) == 0xe800)
862 /* BL, BLX. Allow some special function calls when
863 skipping the prologue; GCC generates these before
864 storing arguments to the stack. */
866 int j1
, j2
, imm1
, imm2
;
868 imm1
= sbits (insn
, 0, 10);
869 imm2
= bits (inst2
, 0, 10);
870 j1
= bit (inst2
, 13);
871 j2
= bit (inst2
, 11);
873 offset
= ((imm1
<< 12) + (imm2
<< 1));
874 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
876 nextpc
= start
+ 4 + offset
;
877 /* For BLX make sure to clear the low bits. */
878 if (bit (inst2
, 12) == 0)
879 nextpc
= nextpc
& 0xfffffffc;
881 if (!skip_prologue_function (gdbarch
, nextpc
,
882 bit (inst2
, 12) != 0))
886 else if ((insn
& 0xffd0) == 0xe900 /* stmdb Rn{!},
888 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
890 pv_t addr
= regs
[bits (insn
, 0, 3)];
893 if (pv_area_store_would_trash (stack
, addr
))
896 /* Calculate offsets of saved registers. */
897 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
898 if (inst2
& (1 << regno
))
900 addr
= pv_add_constant (addr
, -4);
901 pv_area_store (stack
, addr
, 4, regs
[regno
]);
905 regs
[bits (insn
, 0, 3)] = addr
;
908 else if ((insn
& 0xff50) == 0xe940 /* strd Rt, Rt2,
910 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
912 int regno1
= bits (inst2
, 12, 15);
913 int regno2
= bits (inst2
, 8, 11);
914 pv_t addr
= regs
[bits (insn
, 0, 3)];
916 offset
= inst2
& 0xff;
918 addr
= pv_add_constant (addr
, offset
);
920 addr
= pv_add_constant (addr
, -offset
);
922 if (pv_area_store_would_trash (stack
, addr
))
925 pv_area_store (stack
, addr
, 4, regs
[regno1
]);
926 pv_area_store (stack
, pv_add_constant (addr
, 4),
930 regs
[bits (insn
, 0, 3)] = addr
;
933 else if ((insn
& 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
934 && (inst2
& 0x0c00) == 0x0c00
935 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
937 int regno
= bits (inst2
, 12, 15);
938 pv_t addr
= regs
[bits (insn
, 0, 3)];
940 offset
= inst2
& 0xff;
942 addr
= pv_add_constant (addr
, offset
);
944 addr
= pv_add_constant (addr
, -offset
);
946 if (pv_area_store_would_trash (stack
, addr
))
949 pv_area_store (stack
, addr
, 4, regs
[regno
]);
952 regs
[bits (insn
, 0, 3)] = addr
;
955 else if ((insn
& 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
956 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
958 int regno
= bits (inst2
, 12, 15);
961 offset
= inst2
& 0xfff;
962 addr
= pv_add_constant (regs
[bits (insn
, 0, 3)], offset
);
964 if (pv_area_store_would_trash (stack
, addr
))
967 pv_area_store (stack
, addr
, 4, regs
[regno
]);
970 else if ((insn
& 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
971 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
972 /* Ignore stores of argument registers to the stack. */
975 else if ((insn
& 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
976 && (inst2
& 0x0d00) == 0x0c00
977 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
978 /* Ignore stores of argument registers to the stack. */
981 else if ((insn
& 0xffd0) == 0xe890 /* ldmia Rn[!],
983 && (inst2
& 0x8000) == 0x0000
984 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
985 /* Ignore block loads from the stack, potentially copying
986 parameters from memory. */
989 else if ((insn
& 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
991 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
992 /* Similarly ignore dual loads from the stack. */
995 else if ((insn
& 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
996 && (inst2
& 0x0d00) == 0x0c00
997 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
998 /* Similarly ignore single loads from the stack. */
1001 else if ((insn
& 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1002 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
1003 /* Similarly ignore single loads from the stack. */
1006 else if ((insn
& 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1007 && (inst2
& 0x8000) == 0x0000)
1009 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1010 | (bits (inst2
, 12, 14) << 8)
1011 | bits (inst2
, 0, 7));
1013 regs
[bits (inst2
, 8, 11)]
1014 = pv_add_constant (regs
[bits (insn
, 0, 3)],
1015 thumb_expand_immediate (imm
));
1018 else if ((insn
& 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1019 && (inst2
& 0x8000) == 0x0000)
1021 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1022 | (bits (inst2
, 12, 14) << 8)
1023 | bits (inst2
, 0, 7));
1025 regs
[bits (inst2
, 8, 11)]
1026 = pv_add_constant (regs
[bits (insn
, 0, 3)], imm
);
1029 else if ((insn
& 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1030 && (inst2
& 0x8000) == 0x0000)
1032 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1033 | (bits (inst2
, 12, 14) << 8)
1034 | bits (inst2
, 0, 7));
1036 regs
[bits (inst2
, 8, 11)]
1037 = pv_add_constant (regs
[bits (insn
, 0, 3)],
1038 - (CORE_ADDR
) thumb_expand_immediate (imm
));
1041 else if ((insn
& 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1042 && (inst2
& 0x8000) == 0x0000)
1044 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1045 | (bits (inst2
, 12, 14) << 8)
1046 | bits (inst2
, 0, 7));
1048 regs
[bits (inst2
, 8, 11)]
1049 = pv_add_constant (regs
[bits (insn
, 0, 3)], - (CORE_ADDR
) imm
);
1052 else if ((insn
& 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1054 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1055 | (bits (inst2
, 12, 14) << 8)
1056 | bits (inst2
, 0, 7));
1058 regs
[bits (inst2
, 8, 11)]
1059 = pv_constant (thumb_expand_immediate (imm
));
1062 else if ((insn
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1065 = EXTRACT_MOVW_MOVT_IMM_T (insn
, inst2
);
1067 regs
[bits (inst2
, 8, 11)] = pv_constant (imm
);
1070 else if (insn
== 0xea5f /* mov.w Rd,Rm */
1071 && (inst2
& 0xf0f0) == 0)
1073 int dst_reg
= (inst2
& 0x0f00) >> 8;
1074 int src_reg
= inst2
& 0xf;
1075 regs
[dst_reg
] = regs
[src_reg
];
1078 else if ((insn
& 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1080 /* Constant pool loads. */
1081 unsigned int constant
;
1084 offset
= bits (inst2
, 0, 11);
1086 loc
= start
+ 4 + offset
;
1088 loc
= start
+ 4 - offset
;
1090 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1091 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1094 else if ((insn
& 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1096 /* Constant pool loads. */
1097 unsigned int constant
;
1100 offset
= bits (inst2
, 0, 7) << 2;
1102 loc
= start
+ 4 + offset
;
1104 loc
= start
+ 4 - offset
;
1106 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1107 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1109 constant
= read_memory_unsigned_integer (loc
+ 4, 4, byte_order
);
1110 regs
[bits (inst2
, 8, 11)] = pv_constant (constant
);
1113 else if (thumb2_instruction_changes_pc (insn
, inst2
))
1115 /* Don't scan past anything that might change control flow. */
1120 /* The optimizer might shove anything into the prologue,
1121 so we just skip what we don't recognize. */
1122 unrecognized_pc
= start
;
1127 else if (thumb_instruction_changes_pc (insn
))
1129 /* Don't scan past anything that might change control flow. */
1134 /* The optimizer might shove anything into the prologue,
1135 so we just skip what we don't recognize. */
1136 unrecognized_pc
= start
;
1143 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1144 paddress (gdbarch
, start
));
1146 if (unrecognized_pc
== 0)
1147 unrecognized_pc
= start
;
1151 do_cleanups (back_to
);
1152 return unrecognized_pc
;
1155 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1157 /* Frame pointer is fp. Frame size is constant. */
1158 cache
->framereg
= ARM_FP_REGNUM
;
1159 cache
->framesize
= -regs
[ARM_FP_REGNUM
].k
;
1161 else if (pv_is_register (regs
[THUMB_FP_REGNUM
], ARM_SP_REGNUM
))
1163 /* Frame pointer is r7. Frame size is constant. */
1164 cache
->framereg
= THUMB_FP_REGNUM
;
1165 cache
->framesize
= -regs
[THUMB_FP_REGNUM
].k
;
1169 /* Try the stack pointer... this is a bit desperate. */
1170 cache
->framereg
= ARM_SP_REGNUM
;
1171 cache
->framesize
= -regs
[ARM_SP_REGNUM
].k
;
1174 for (i
= 0; i
< 16; i
++)
1175 if (pv_area_find_reg (stack
, gdbarch
, i
, &offset
))
1176 cache
->saved_regs
[i
].addr
= offset
;
1178 do_cleanups (back_to
);
1179 return unrecognized_pc
;
1183 /* Try to analyze the instructions starting from PC, which load symbol
1184 __stack_chk_guard. Return the address of instruction after loading this
1185 symbol, set the dest register number to *BASEREG, and set the size of
1186 instructions for loading symbol in OFFSET. Return 0 if instructions are
1190 arm_analyze_load_stack_chk_guard(CORE_ADDR pc
, struct gdbarch
*gdbarch
,
1191 unsigned int *destreg
, int *offset
)
1193 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1194 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1195 unsigned int low
, high
, address
;
1200 unsigned short insn1
1201 = read_memory_unsigned_integer (pc
, 2, byte_order_for_code
);
1203 if ((insn1
& 0xf800) == 0x4800) /* ldr Rd, #immed */
1205 *destreg
= bits (insn1
, 8, 10);
1207 address
= (pc
& 0xfffffffc) + 4 + (bits (insn1
, 0, 7) << 2);
1208 address
= read_memory_unsigned_integer (address
, 4,
1209 byte_order_for_code
);
1211 else if ((insn1
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1213 unsigned short insn2
1214 = read_memory_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
1216 low
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1219 = read_memory_unsigned_integer (pc
+ 4, 2, byte_order_for_code
);
1221 = read_memory_unsigned_integer (pc
+ 6, 2, byte_order_for_code
);
1223 /* movt Rd, #const */
1224 if ((insn1
& 0xfbc0) == 0xf2c0)
1226 high
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1227 *destreg
= bits (insn2
, 8, 11);
1229 address
= (high
<< 16 | low
);
1236 = read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
1238 if ((insn
& 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1240 address
= bits (insn
, 0, 11) + pc
+ 8;
1241 address
= read_memory_unsigned_integer (address
, 4,
1242 byte_order_for_code
);
1244 *destreg
= bits (insn
, 12, 15);
1247 else if ((insn
& 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1249 low
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1252 = read_memory_unsigned_integer (pc
+ 4, 4, byte_order_for_code
);
1254 if ((insn
& 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1256 high
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1257 *destreg
= bits (insn
, 12, 15);
1259 address
= (high
<< 16 | low
);
1267 /* Try to skip a sequence of instructions used for stack protector. If PC
1268 points to the first instruction of this sequence, return the address of
1269 first instruction after this sequence, otherwise, return original PC.
1271 On arm, this sequence of instructions is composed of mainly three steps,
1272 Step 1: load symbol __stack_chk_guard,
1273 Step 2: load from address of __stack_chk_guard,
1274 Step 3: store it to somewhere else.
1276 Usually, instructions on step 2 and step 3 are the same on various ARM
1277 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1278 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1279 instructions in step 1 vary from different ARM architectures. On ARMv7,
1282 movw Rn, #:lower16:__stack_chk_guard
1283 movt Rn, #:upper16:__stack_chk_guard
1290 .word __stack_chk_guard
1292 Since ldr/str is a very popular instruction, we can't use them as
1293 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1294 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1295 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1298 arm_skip_stack_protector(CORE_ADDR pc
, struct gdbarch
*gdbarch
)
1300 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1301 unsigned int basereg
;
1302 struct bound_minimal_symbol stack_chk_guard
;
1304 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1307 /* Try to parse the instructions in Step 1. */
1308 addr
= arm_analyze_load_stack_chk_guard (pc
, gdbarch
,
1313 stack_chk_guard
= lookup_minimal_symbol_by_pc (addr
);
1314 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1315 Otherwise, this sequence cannot be for stack protector. */
1316 if (stack_chk_guard
.minsym
== NULL
1317 || strncmp (MSYMBOL_LINKAGE_NAME (stack_chk_guard
.minsym
),
1318 "__stack_chk_guard",
1319 strlen ("__stack_chk_guard")) != 0)
1324 unsigned int destreg
;
1326 = read_memory_unsigned_integer (pc
+ offset
, 2, byte_order_for_code
);
1328 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1329 if ((insn
& 0xf800) != 0x6800)
1331 if (bits (insn
, 3, 5) != basereg
)
1333 destreg
= bits (insn
, 0, 2);
1335 insn
= read_memory_unsigned_integer (pc
+ offset
+ 2, 2,
1336 byte_order_for_code
);
1337 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1338 if ((insn
& 0xf800) != 0x6000)
1340 if (destreg
!= bits (insn
, 0, 2))
1345 unsigned int destreg
;
1347 = read_memory_unsigned_integer (pc
+ offset
, 4, byte_order_for_code
);
1349 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1350 if ((insn
& 0x0e500000) != 0x04100000)
1352 if (bits (insn
, 16, 19) != basereg
)
1354 destreg
= bits (insn
, 12, 15);
1355 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1356 insn
= read_memory_unsigned_integer (pc
+ offset
+ 4,
1357 4, byte_order_for_code
);
1358 if ((insn
& 0x0e500000) != 0x04000000)
1360 if (bits (insn
, 12, 15) != destreg
)
1363 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1366 return pc
+ offset
+ 4;
1368 return pc
+ offset
+ 8;
1371 /* Advance the PC across any function entry prologue instructions to
1372 reach some "real" code.
1374 The APCS (ARM Procedure Call Standard) defines the following
1378 [stmfd sp!, {a1,a2,a3,a4}]
1379 stmfd sp!, {...,fp,ip,lr,pc}
1380 [stfe f7, [sp, #-12]!]
1381 [stfe f6, [sp, #-12]!]
1382 [stfe f5, [sp, #-12]!]
1383 [stfe f4, [sp, #-12]!]
1384 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1387 arm_skip_prologue (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
1389 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1392 CORE_ADDR func_addr
, limit_pc
;
1394 /* See if we can determine the end of the prologue via the symbol table.
1395 If so, then return either PC, or the PC after the prologue, whichever
1397 if (find_pc_partial_function (pc
, NULL
, &func_addr
, NULL
))
1399 CORE_ADDR post_prologue_pc
1400 = skip_prologue_using_sal (gdbarch
, func_addr
);
1401 struct compunit_symtab
*cust
= find_pc_compunit_symtab (func_addr
);
1403 if (post_prologue_pc
)
1405 = arm_skip_stack_protector (post_prologue_pc
, gdbarch
);
1408 /* GCC always emits a line note before the prologue and another
1409 one after, even if the two are at the same address or on the
1410 same line. Take advantage of this so that we do not need to
1411 know every instruction that might appear in the prologue. We
1412 will have producer information for most binaries; if it is
1413 missing (e.g. for -gstabs), assuming the GNU tools. */
1414 if (post_prologue_pc
1416 || COMPUNIT_PRODUCER (cust
) == NULL
1417 || strncmp (COMPUNIT_PRODUCER (cust
), "GNU ",
1418 sizeof ("GNU ") - 1) == 0
1419 || strncmp (COMPUNIT_PRODUCER (cust
), "clang ",
1420 sizeof ("clang ") - 1) == 0))
1421 return post_prologue_pc
;
1423 if (post_prologue_pc
!= 0)
1425 CORE_ADDR analyzed_limit
;
1427 /* For non-GCC compilers, make sure the entire line is an
1428 acceptable prologue; GDB will round this function's
1429 return value up to the end of the following line so we
1430 can not skip just part of a line (and we do not want to).
1432 RealView does not treat the prologue specially, but does
1433 associate prologue code with the opening brace; so this
1434 lets us skip the first line if we think it is the opening
1436 if (arm_pc_is_thumb (gdbarch
, func_addr
))
1437 analyzed_limit
= thumb_analyze_prologue (gdbarch
, func_addr
,
1438 post_prologue_pc
, NULL
);
1440 analyzed_limit
= arm_analyze_prologue (gdbarch
, func_addr
,
1441 post_prologue_pc
, NULL
);
1443 if (analyzed_limit
!= post_prologue_pc
)
1446 return post_prologue_pc
;
1450 /* Can't determine prologue from the symbol table, need to examine
1453 /* Find an upper limit on the function prologue using the debug
1454 information. If the debug information could not be used to provide
1455 that bound, then use an arbitrary large number as the upper bound. */
1456 /* Like arm_scan_prologue, stop no later than pc + 64. */
1457 limit_pc
= skip_prologue_using_sal (gdbarch
, pc
);
1459 limit_pc
= pc
+ 64; /* Magic. */
1462 /* Check if this is Thumb code. */
1463 if (arm_pc_is_thumb (gdbarch
, pc
))
1464 return thumb_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1466 for (skip_pc
= pc
; skip_pc
< limit_pc
; skip_pc
+= 4)
1468 inst
= read_memory_unsigned_integer (skip_pc
, 4, byte_order_for_code
);
1470 /* "mov ip, sp" is no longer a required part of the prologue. */
1471 if (inst
== 0xe1a0c00d) /* mov ip, sp */
1474 if ((inst
& 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1477 if ((inst
& 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1480 /* Some prologues begin with "str lr, [sp, #-4]!". */
1481 if (inst
== 0xe52de004) /* str lr, [sp, #-4]! */
1484 if ((inst
& 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1487 if ((inst
& 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1490 /* Any insns after this point may float into the code, if it makes
1491 for better instruction scheduling, so we skip them only if we
1492 find them, but still consider the function to be frame-ful. */
1494 /* We may have either one sfmfd instruction here, or several stfe
1495 insns, depending on the version of floating point code we
1497 if ((inst
& 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1500 if ((inst
& 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1503 if ((inst
& 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1506 if ((inst
& 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1509 if ((inst
& 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1510 || (inst
& 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1511 || (inst
& 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1514 if ((inst
& 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1515 || (inst
& 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1516 || (inst
& 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1519 /* Un-recognized instruction; stop scanning. */
1523 return skip_pc
; /* End of prologue. */
1527 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1528 This function decodes a Thumb function prologue to determine:
1529 1) the size of the stack frame
1530 2) which registers are saved on it
1531 3) the offsets of saved regs
1532 4) the offset from the stack pointer to the frame pointer
1534 A typical Thumb function prologue would create this stack frame
1535 (offsets relative to FP)
1536 old SP -> 24 stack parameters
1539 R7 -> 0 local variables (16 bytes)
1540 SP -> -12 additional stack space (12 bytes)
1541 The frame size would thus be 36 bytes, and the frame offset would be
1542 12 bytes. The frame register is R7.
1544 The comments for thumb_skip_prolog() describe the algorithm we use
1545 to detect the end of the prolog. */
1549 thumb_scan_prologue (struct gdbarch
*gdbarch
, CORE_ADDR prev_pc
,
1550 CORE_ADDR block_addr
, struct arm_prologue_cache
*cache
)
1552 CORE_ADDR prologue_start
;
1553 CORE_ADDR prologue_end
;
1555 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1558 /* See comment in arm_scan_prologue for an explanation of
1560 if (prologue_end
> prologue_start
+ 64)
1562 prologue_end
= prologue_start
+ 64;
1566 /* We're in the boondocks: we have no idea where the start of the
1570 prologue_end
= min (prologue_end
, prev_pc
);
1572 thumb_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1575 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1578 arm_instruction_changes_pc (uint32_t this_instr
)
1580 if (bits (this_instr
, 28, 31) == INST_NV
)
1581 /* Unconditional instructions. */
1582 switch (bits (this_instr
, 24, 27))
1586 /* Branch with Link and change to Thumb. */
1591 /* Coprocessor register transfer. */
1592 if (bits (this_instr
, 12, 15) == 15)
1593 error (_("Invalid update to pc in instruction"));
1599 switch (bits (this_instr
, 25, 27))
1602 if (bits (this_instr
, 23, 24) == 2 && bit (this_instr
, 20) == 0)
1604 /* Multiplies and extra load/stores. */
1605 if (bit (this_instr
, 4) == 1 && bit (this_instr
, 7) == 1)
1606 /* Neither multiplies nor extension load/stores are allowed
1610 /* Otherwise, miscellaneous instructions. */
1612 /* BX <reg>, BXJ <reg>, BLX <reg> */
1613 if (bits (this_instr
, 4, 27) == 0x12fff1
1614 || bits (this_instr
, 4, 27) == 0x12fff2
1615 || bits (this_instr
, 4, 27) == 0x12fff3)
1618 /* Other miscellaneous instructions are unpredictable if they
1622 /* Data processing instruction. Fall through. */
1625 if (bits (this_instr
, 12, 15) == 15)
1632 /* Media instructions and architecturally undefined instructions. */
1633 if (bits (this_instr
, 25, 27) == 3 && bit (this_instr
, 4) == 1)
1637 if (bit (this_instr
, 20) == 0)
1641 if (bits (this_instr
, 12, 15) == ARM_PC_REGNUM
)
1647 /* Load/store multiple. */
1648 if (bit (this_instr
, 20) == 1 && bit (this_instr
, 15) == 1)
1654 /* Branch and branch with link. */
1659 /* Coprocessor transfers or SWIs can not affect PC. */
1663 internal_error (__FILE__
, __LINE__
, _("bad value in switch"));
1667 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1668 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1669 fill it in. Return the first address not recognized as a prologue
1672 We recognize all the instructions typically found in ARM prologues,
1673 plus harmless instructions which can be skipped (either for analysis
1674 purposes, or a more restrictive set that can be skipped when finding
1675 the end of the prologue). */
1678 arm_analyze_prologue (struct gdbarch
*gdbarch
,
1679 CORE_ADDR prologue_start
, CORE_ADDR prologue_end
,
1680 struct arm_prologue_cache
*cache
)
1682 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1683 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1685 CORE_ADDR offset
, current_pc
;
1686 pv_t regs
[ARM_FPS_REGNUM
];
1687 struct pv_area
*stack
;
1688 struct cleanup
*back_to
;
1689 CORE_ADDR unrecognized_pc
= 0;
1691 /* Search the prologue looking for instructions that set up the
1692 frame pointer, adjust the stack pointer, and save registers.
1694 Be careful, however, and if it doesn't look like a prologue,
1695 don't try to scan it. If, for instance, a frameless function
1696 begins with stmfd sp!, then we will tell ourselves there is
1697 a frame, which will confuse stack traceback, as well as "finish"
1698 and other operations that rely on a knowledge of the stack
1701 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1702 regs
[regno
] = pv_register (regno
, 0);
1703 stack
= make_pv_area (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
1704 back_to
= make_cleanup_free_pv_area (stack
);
1706 for (current_pc
= prologue_start
;
1707 current_pc
< prologue_end
;
1711 = read_memory_unsigned_integer (current_pc
, 4, byte_order_for_code
);
1713 if (insn
== 0xe1a0c00d) /* mov ip, sp */
1715 regs
[ARM_IP_REGNUM
] = regs
[ARM_SP_REGNUM
];
1718 else if ((insn
& 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1719 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1721 unsigned imm
= insn
& 0xff; /* immediate value */
1722 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1723 int rd
= bits (insn
, 12, 15);
1724 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1725 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], imm
);
1728 else if ((insn
& 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1729 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1731 unsigned imm
= insn
& 0xff; /* immediate value */
1732 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1733 int rd
= bits (insn
, 12, 15);
1734 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1735 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], -imm
);
1738 else if ((insn
& 0xffff0fff) == 0xe52d0004) /* str Rd,
1741 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1743 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1744 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4,
1745 regs
[bits (insn
, 12, 15)]);
1748 else if ((insn
& 0xffff0000) == 0xe92d0000)
1749 /* stmfd sp!, {..., fp, ip, lr, pc}
1751 stmfd sp!, {a1, a2, a3, a4} */
1753 int mask
= insn
& 0xffff;
1755 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1758 /* Calculate offsets of saved registers. */
1759 for (regno
= ARM_PC_REGNUM
; regno
>= 0; regno
--)
1760 if (mask
& (1 << regno
))
1763 = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1764 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
1767 else if ((insn
& 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1768 || (insn
& 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1769 || (insn
& 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1771 /* No need to add this to saved_regs -- it's just an arg reg. */
1774 else if ((insn
& 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1775 || (insn
& 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1776 || (insn
& 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1778 /* No need to add this to saved_regs -- it's just an arg reg. */
1781 else if ((insn
& 0xfff00000) == 0xe8800000 /* stm Rn,
1783 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1785 /* No need to add this to saved_regs -- it's just arg regs. */
1788 else if ((insn
& 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1790 unsigned imm
= insn
& 0xff; /* immediate value */
1791 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1792 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1793 regs
[ARM_FP_REGNUM
] = pv_add_constant (regs
[ARM_IP_REGNUM
], -imm
);
1795 else if ((insn
& 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1797 unsigned imm
= insn
& 0xff; /* immediate value */
1798 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1799 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1800 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -imm
);
1802 else if ((insn
& 0xffff7fff) == 0xed6d0103 /* stfe f?,
1804 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1806 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1809 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1810 regno
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x07);
1811 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 12, regs
[regno
]);
1813 else if ((insn
& 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1815 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1817 int n_saved_fp_regs
;
1818 unsigned int fp_start_reg
, fp_bound_reg
;
1820 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1823 if ((insn
& 0x800) == 0x800) /* N0 is set */
1825 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1826 n_saved_fp_regs
= 3;
1828 n_saved_fp_regs
= 1;
1832 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1833 n_saved_fp_regs
= 2;
1835 n_saved_fp_regs
= 4;
1838 fp_start_reg
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x7);
1839 fp_bound_reg
= fp_start_reg
+ n_saved_fp_regs
;
1840 for (; fp_start_reg
< fp_bound_reg
; fp_start_reg
++)
1842 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1843 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 12,
1844 regs
[fp_start_reg
++]);
1847 else if ((insn
& 0xff000000) == 0xeb000000 && cache
== NULL
) /* bl */
1849 /* Allow some special function calls when skipping the
1850 prologue; GCC generates these before storing arguments to
1852 CORE_ADDR dest
= BranchDest (current_pc
, insn
);
1854 if (skip_prologue_function (gdbarch
, dest
, 0))
1859 else if ((insn
& 0xf0000000) != 0xe0000000)
1860 break; /* Condition not true, exit early. */
1861 else if (arm_instruction_changes_pc (insn
))
1862 /* Don't scan past anything that might change control flow. */
1864 else if ((insn
& 0xfe500000) == 0xe8100000 /* ldm */
1865 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1866 /* Ignore block loads from the stack, potentially copying
1867 parameters from memory. */
1869 else if ((insn
& 0xfc500000) == 0xe4100000
1870 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1871 /* Similarly ignore single loads from the stack. */
1873 else if ((insn
& 0xffff0ff0) == 0xe1a00000)
1874 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1875 register instead of the stack. */
1879 /* The optimizer might shove anything into the prologue,
1880 so we just skip what we don't recognize. */
1881 unrecognized_pc
= current_pc
;
1886 if (unrecognized_pc
== 0)
1887 unrecognized_pc
= current_pc
;
1891 int framereg
, framesize
;
1893 /* The frame size is just the distance from the frame register
1894 to the original stack pointer. */
1895 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1897 /* Frame pointer is fp. */
1898 framereg
= ARM_FP_REGNUM
;
1899 framesize
= -regs
[ARM_FP_REGNUM
].k
;
1903 /* Try the stack pointer... this is a bit desperate. */
1904 framereg
= ARM_SP_REGNUM
;
1905 framesize
= -regs
[ARM_SP_REGNUM
].k
;
1908 cache
->framereg
= framereg
;
1909 cache
->framesize
= framesize
;
1911 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1912 if (pv_area_find_reg (stack
, gdbarch
, regno
, &offset
))
1913 cache
->saved_regs
[regno
].addr
= offset
;
1917 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1918 paddress (gdbarch
, unrecognized_pc
));
1920 do_cleanups (back_to
);
1921 return unrecognized_pc
;
1925 arm_scan_prologue (struct frame_info
*this_frame
,
1926 struct arm_prologue_cache
*cache
)
1928 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1929 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1931 CORE_ADDR prologue_start
, prologue_end
, current_pc
;
1932 CORE_ADDR prev_pc
= get_frame_pc (this_frame
);
1933 CORE_ADDR block_addr
= get_frame_address_in_block (this_frame
);
1934 pv_t regs
[ARM_FPS_REGNUM
];
1935 struct pv_area
*stack
;
1936 struct cleanup
*back_to
;
1939 /* Assume there is no frame until proven otherwise. */
1940 cache
->framereg
= ARM_SP_REGNUM
;
1941 cache
->framesize
= 0;
1943 /* Check for Thumb prologue. */
1944 if (arm_frame_is_thumb (this_frame
))
1946 thumb_scan_prologue (gdbarch
, prev_pc
, block_addr
, cache
);
1950 /* Find the function prologue. If we can't find the function in
1951 the symbol table, peek in the stack frame to find the PC. */
1952 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1955 /* One way to find the end of the prologue (which works well
1956 for unoptimized code) is to do the following:
1958 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1961 prologue_end = prev_pc;
1962 else if (sal.end < prologue_end)
1963 prologue_end = sal.end;
1965 This mechanism is very accurate so long as the optimizer
1966 doesn't move any instructions from the function body into the
1967 prologue. If this happens, sal.end will be the last
1968 instruction in the first hunk of prologue code just before
1969 the first instruction that the scheduler has moved from
1970 the body to the prologue.
1972 In order to make sure that we scan all of the prologue
1973 instructions, we use a slightly less accurate mechanism which
1974 may scan more than necessary. To help compensate for this
1975 lack of accuracy, the prologue scanning loop below contains
1976 several clauses which'll cause the loop to terminate early if
1977 an implausible prologue instruction is encountered.
1983 is a suitable endpoint since it accounts for the largest
1984 possible prologue plus up to five instructions inserted by
1987 if (prologue_end
> prologue_start
+ 64)
1989 prologue_end
= prologue_start
+ 64; /* See above. */
1994 /* We have no symbol information. Our only option is to assume this
1995 function has a standard stack frame and the normal frame register.
1996 Then, we can find the value of our frame pointer on entrance to
1997 the callee (or at the present moment if this is the innermost frame).
1998 The value stored there should be the address of the stmfd + 8. */
1999 CORE_ADDR frame_loc
;
2000 LONGEST return_value
;
2002 frame_loc
= get_frame_register_unsigned (this_frame
, ARM_FP_REGNUM
);
2003 if (!safe_read_memory_integer (frame_loc
, 4, byte_order
, &return_value
))
2007 prologue_start
= gdbarch_addr_bits_remove
2008 (gdbarch
, return_value
) - 8;
2009 prologue_end
= prologue_start
+ 64; /* See above. */
2013 if (prev_pc
< prologue_end
)
2014 prologue_end
= prev_pc
;
2016 arm_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
2019 static struct arm_prologue_cache
*
2020 arm_make_prologue_cache (struct frame_info
*this_frame
)
2023 struct arm_prologue_cache
*cache
;
2024 CORE_ADDR unwound_fp
;
2026 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2027 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2029 arm_scan_prologue (this_frame
, cache
);
2031 unwound_fp
= get_frame_register_unsigned (this_frame
, cache
->framereg
);
2032 if (unwound_fp
== 0)
2035 cache
->prev_sp
= unwound_fp
+ cache
->framesize
;
2037 /* Calculate actual addresses of saved registers using offsets
2038 determined by arm_scan_prologue. */
2039 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
2040 if (trad_frame_addr_p (cache
->saved_regs
, reg
))
2041 cache
->saved_regs
[reg
].addr
+= cache
->prev_sp
;
2046 /* Our frame ID for a normal frame is the current function's starting PC
2047 and the caller's SP when we were called. */
2050 arm_prologue_this_id (struct frame_info
*this_frame
,
2052 struct frame_id
*this_id
)
2054 struct arm_prologue_cache
*cache
;
2058 if (*this_cache
== NULL
)
2059 *this_cache
= arm_make_prologue_cache (this_frame
);
2060 cache
= *this_cache
;
2062 /* This is meant to halt the backtrace at "_start". */
2063 pc
= get_frame_pc (this_frame
);
2064 if (pc
<= gdbarch_tdep (get_frame_arch (this_frame
))->lowest_pc
)
2067 /* If we've hit a wall, stop. */
2068 if (cache
->prev_sp
== 0)
2071 /* Use function start address as part of the frame ID. If we cannot
2072 identify the start address (due to missing symbol information),
2073 fall back to just using the current PC. */
2074 func
= get_frame_func (this_frame
);
2078 id
= frame_id_build (cache
->prev_sp
, func
);
2082 static struct value
*
2083 arm_prologue_prev_register (struct frame_info
*this_frame
,
2087 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2088 struct arm_prologue_cache
*cache
;
2090 if (*this_cache
== NULL
)
2091 *this_cache
= arm_make_prologue_cache (this_frame
);
2092 cache
= *this_cache
;
2094 /* If we are asked to unwind the PC, then we need to return the LR
2095 instead. The prologue may save PC, but it will point into this
2096 frame's prologue, not the next frame's resume location. Also
2097 strip the saved T bit. A valid LR may have the low bit set, but
2098 a valid PC never does. */
2099 if (prev_regnum
== ARM_PC_REGNUM
)
2103 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
2104 return frame_unwind_got_constant (this_frame
, prev_regnum
,
2105 arm_addr_bits_remove (gdbarch
, lr
));
2108 /* SP is generally not saved to the stack, but this frame is
2109 identified by the next frame's stack pointer at the time of the call.
2110 The value was already reconstructed into PREV_SP. */
2111 if (prev_regnum
== ARM_SP_REGNUM
)
2112 return frame_unwind_got_constant (this_frame
, prev_regnum
, cache
->prev_sp
);
2114 /* The CPSR may have been changed by the call instruction and by the
2115 called function. The only bit we can reconstruct is the T bit,
2116 by checking the low bit of LR as of the call. This is a reliable
2117 indicator of Thumb-ness except for some ARM v4T pre-interworking
2118 Thumb code, which could get away with a clear low bit as long as
2119 the called function did not use bx. Guess that all other
2120 bits are unchanged; the condition flags are presumably lost,
2121 but the processor status is likely valid. */
2122 if (prev_regnum
== ARM_PS_REGNUM
)
2125 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
2127 cpsr
= get_frame_register_unsigned (this_frame
, prev_regnum
);
2128 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
2129 if (IS_THUMB_ADDR (lr
))
2133 return frame_unwind_got_constant (this_frame
, prev_regnum
, cpsr
);
2136 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
2140 struct frame_unwind arm_prologue_unwind
= {
2142 default_frame_unwind_stop_reason
,
2143 arm_prologue_this_id
,
2144 arm_prologue_prev_register
,
2146 default_frame_sniffer
2149 /* Maintain a list of ARM exception table entries per objfile, similar to the
2150 list of mapping symbols. We only cache entries for standard ARM-defined
2151 personality routines; the cache will contain only the frame unwinding
2152 instructions associated with the entry (not the descriptors). */
2154 static const struct objfile_data
*arm_exidx_data_key
;
2156 struct arm_exidx_entry
2161 typedef struct arm_exidx_entry arm_exidx_entry_s
;
2162 DEF_VEC_O(arm_exidx_entry_s
);
2164 struct arm_exidx_data
2166 VEC(arm_exidx_entry_s
) **section_maps
;
2170 arm_exidx_data_free (struct objfile
*objfile
, void *arg
)
2172 struct arm_exidx_data
*data
= arg
;
2175 for (i
= 0; i
< objfile
->obfd
->section_count
; i
++)
2176 VEC_free (arm_exidx_entry_s
, data
->section_maps
[i
]);
2180 arm_compare_exidx_entries (const struct arm_exidx_entry
*lhs
,
2181 const struct arm_exidx_entry
*rhs
)
2183 return lhs
->addr
< rhs
->addr
;
2186 static struct obj_section
*
2187 arm_obj_section_from_vma (struct objfile
*objfile
, bfd_vma vma
)
2189 struct obj_section
*osect
;
2191 ALL_OBJFILE_OSECTIONS (objfile
, osect
)
2192 if (bfd_get_section_flags (objfile
->obfd
,
2193 osect
->the_bfd_section
) & SEC_ALLOC
)
2195 bfd_vma start
, size
;
2196 start
= bfd_get_section_vma (objfile
->obfd
, osect
->the_bfd_section
);
2197 size
= bfd_get_section_size (osect
->the_bfd_section
);
2199 if (start
<= vma
&& vma
< start
+ size
)
2206 /* Parse contents of exception table and exception index sections
2207 of OBJFILE, and fill in the exception table entry cache.
2209 For each entry that refers to a standard ARM-defined personality
2210 routine, extract the frame unwinding instructions (from either
2211 the index or the table section). The unwinding instructions
2213 - extracting them from the rest of the table data
2214 - converting to host endianness
2215 - appending the implicit 0xb0 ("Finish") code
2217 The extracted and normalized instructions are stored for later
2218 retrieval by the arm_find_exidx_entry routine. */
2221 arm_exidx_new_objfile (struct objfile
*objfile
)
2223 struct cleanup
*cleanups
;
2224 struct arm_exidx_data
*data
;
2225 asection
*exidx
, *extab
;
2226 bfd_vma exidx_vma
= 0, extab_vma
= 0;
2227 bfd_size_type exidx_size
= 0, extab_size
= 0;
2228 gdb_byte
*exidx_data
= NULL
, *extab_data
= NULL
;
2231 /* If we've already touched this file, do nothing. */
2232 if (!objfile
|| objfile_data (objfile
, arm_exidx_data_key
) != NULL
)
2234 cleanups
= make_cleanup (null_cleanup
, NULL
);
2236 /* Read contents of exception table and index. */
2237 exidx
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.exidx");
2240 exidx_vma
= bfd_section_vma (objfile
->obfd
, exidx
);
2241 exidx_size
= bfd_get_section_size (exidx
);
2242 exidx_data
= xmalloc (exidx_size
);
2243 make_cleanup (xfree
, exidx_data
);
2245 if (!bfd_get_section_contents (objfile
->obfd
, exidx
,
2246 exidx_data
, 0, exidx_size
))
2248 do_cleanups (cleanups
);
2253 extab
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.extab");
2256 extab_vma
= bfd_section_vma (objfile
->obfd
, extab
);
2257 extab_size
= bfd_get_section_size (extab
);
2258 extab_data
= xmalloc (extab_size
);
2259 make_cleanup (xfree
, extab_data
);
2261 if (!bfd_get_section_contents (objfile
->obfd
, extab
,
2262 extab_data
, 0, extab_size
))
2264 do_cleanups (cleanups
);
2269 /* Allocate exception table data structure. */
2270 data
= OBSTACK_ZALLOC (&objfile
->objfile_obstack
, struct arm_exidx_data
);
2271 set_objfile_data (objfile
, arm_exidx_data_key
, data
);
2272 data
->section_maps
= OBSTACK_CALLOC (&objfile
->objfile_obstack
,
2273 objfile
->obfd
->section_count
,
2274 VEC(arm_exidx_entry_s
) *);
2276 /* Fill in exception table. */
2277 for (i
= 0; i
< exidx_size
/ 8; i
++)
2279 struct arm_exidx_entry new_exidx_entry
;
2280 bfd_vma idx
= bfd_h_get_32 (objfile
->obfd
, exidx_data
+ i
* 8);
2281 bfd_vma val
= bfd_h_get_32 (objfile
->obfd
, exidx_data
+ i
* 8 + 4);
2282 bfd_vma addr
= 0, word
= 0;
2283 int n_bytes
= 0, n_words
= 0;
2284 struct obj_section
*sec
;
2285 gdb_byte
*entry
= NULL
;
2287 /* Extract address of start of function. */
2288 idx
= ((idx
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2289 idx
+= exidx_vma
+ i
* 8;
2291 /* Find section containing function and compute section offset. */
2292 sec
= arm_obj_section_from_vma (objfile
, idx
);
2295 idx
-= bfd_get_section_vma (objfile
->obfd
, sec
->the_bfd_section
);
2297 /* Determine address of exception table entry. */
2300 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2302 else if ((val
& 0xff000000) == 0x80000000)
2304 /* Exception table entry embedded in .ARM.exidx
2305 -- must be short form. */
2309 else if (!(val
& 0x80000000))
2311 /* Exception table entry in .ARM.extab. */
2312 addr
= ((val
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2313 addr
+= exidx_vma
+ i
* 8 + 4;
2315 if (addr
>= extab_vma
&& addr
+ 4 <= extab_vma
+ extab_size
)
2317 word
= bfd_h_get_32 (objfile
->obfd
,
2318 extab_data
+ addr
- extab_vma
);
2321 if ((word
& 0xff000000) == 0x80000000)
2326 else if ((word
& 0xff000000) == 0x81000000
2327 || (word
& 0xff000000) == 0x82000000)
2331 n_words
= ((word
>> 16) & 0xff);
2333 else if (!(word
& 0x80000000))
2336 struct obj_section
*pers_sec
;
2337 int gnu_personality
= 0;
2339 /* Custom personality routine. */
2340 pers
= ((word
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2341 pers
= UNMAKE_THUMB_ADDR (pers
+ addr
- 4);
2343 /* Check whether we've got one of the variants of the
2344 GNU personality routines. */
2345 pers_sec
= arm_obj_section_from_vma (objfile
, pers
);
2348 static const char *personality
[] =
2350 "__gcc_personality_v0",
2351 "__gxx_personality_v0",
2352 "__gcj_personality_v0",
2353 "__gnu_objc_personality_v0",
2357 CORE_ADDR pc
= pers
+ obj_section_offset (pers_sec
);
2360 for (k
= 0; personality
[k
]; k
++)
2361 if (lookup_minimal_symbol_by_pc_name
2362 (pc
, personality
[k
], objfile
))
2364 gnu_personality
= 1;
2369 /* If so, the next word contains a word count in the high
2370 byte, followed by the same unwind instructions as the
2371 pre-defined forms. */
2373 && addr
+ 4 <= extab_vma
+ extab_size
)
2375 word
= bfd_h_get_32 (objfile
->obfd
,
2376 extab_data
+ addr
- extab_vma
);
2379 n_words
= ((word
>> 24) & 0xff);
2385 /* Sanity check address. */
2387 if (addr
< extab_vma
|| addr
+ 4 * n_words
> extab_vma
+ extab_size
)
2388 n_words
= n_bytes
= 0;
2390 /* The unwind instructions reside in WORD (only the N_BYTES least
2391 significant bytes are valid), followed by N_WORDS words in the
2392 extab section starting at ADDR. */
2393 if (n_bytes
|| n_words
)
2395 gdb_byte
*p
= entry
= obstack_alloc (&objfile
->objfile_obstack
,
2396 n_bytes
+ n_words
* 4 + 1);
2399 *p
++ = (gdb_byte
) ((word
>> (8 * n_bytes
)) & 0xff);
2403 word
= bfd_h_get_32 (objfile
->obfd
,
2404 extab_data
+ addr
- extab_vma
);
2407 *p
++ = (gdb_byte
) ((word
>> 24) & 0xff);
2408 *p
++ = (gdb_byte
) ((word
>> 16) & 0xff);
2409 *p
++ = (gdb_byte
) ((word
>> 8) & 0xff);
2410 *p
++ = (gdb_byte
) (word
& 0xff);
2413 /* Implied "Finish" to terminate the list. */
2417 /* Push entry onto vector. They are guaranteed to always
2418 appear in order of increasing addresses. */
2419 new_exidx_entry
.addr
= idx
;
2420 new_exidx_entry
.entry
= entry
;
2421 VEC_safe_push (arm_exidx_entry_s
,
2422 data
->section_maps
[sec
->the_bfd_section
->index
],
2426 do_cleanups (cleanups
);
2429 /* Search for the exception table entry covering MEMADDR. If one is found,
2430 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2431 set *START to the start of the region covered by this entry. */
2434 arm_find_exidx_entry (CORE_ADDR memaddr
, CORE_ADDR
*start
)
2436 struct obj_section
*sec
;
2438 sec
= find_pc_section (memaddr
);
2441 struct arm_exidx_data
*data
;
2442 VEC(arm_exidx_entry_s
) *map
;
2443 struct arm_exidx_entry map_key
= { memaddr
- obj_section_addr (sec
), 0 };
2446 data
= objfile_data (sec
->objfile
, arm_exidx_data_key
);
2449 map
= data
->section_maps
[sec
->the_bfd_section
->index
];
2450 if (!VEC_empty (arm_exidx_entry_s
, map
))
2452 struct arm_exidx_entry
*map_sym
;
2454 idx
= VEC_lower_bound (arm_exidx_entry_s
, map
, &map_key
,
2455 arm_compare_exidx_entries
);
2457 /* VEC_lower_bound finds the earliest ordered insertion
2458 point. If the following symbol starts at this exact
2459 address, we use that; otherwise, the preceding
2460 exception table entry covers this address. */
2461 if (idx
< VEC_length (arm_exidx_entry_s
, map
))
2463 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
);
2464 if (map_sym
->addr
== map_key
.addr
)
2467 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2468 return map_sym
->entry
;
2474 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
- 1);
2476 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2477 return map_sym
->entry
;
2486 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2487 instruction list from the ARM exception table entry ENTRY, allocate and
2488 return a prologue cache structure describing how to unwind this frame.
2490 Return NULL if the unwinding instruction list contains a "spare",
2491 "reserved" or "refuse to unwind" instruction as defined in section
2492 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2493 for the ARM Architecture" document. */
2495 static struct arm_prologue_cache
*
2496 arm_exidx_fill_cache (struct frame_info
*this_frame
, gdb_byte
*entry
)
2501 struct arm_prologue_cache
*cache
;
2502 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2503 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2509 /* Whenever we reload SP, we actually have to retrieve its
2510 actual value in the current frame. */
2513 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2515 int reg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2516 vsp
= get_frame_register_unsigned (this_frame
, reg
);
2520 CORE_ADDR addr
= cache
->saved_regs
[ARM_SP_REGNUM
].addr
;
2521 vsp
= get_frame_memory_unsigned (this_frame
, addr
, 4);
2527 /* Decode next unwind instruction. */
2530 if ((insn
& 0xc0) == 0)
2532 int offset
= insn
& 0x3f;
2533 vsp
+= (offset
<< 2) + 4;
2535 else if ((insn
& 0xc0) == 0x40)
2537 int offset
= insn
& 0x3f;
2538 vsp
-= (offset
<< 2) + 4;
2540 else if ((insn
& 0xf0) == 0x80)
2542 int mask
= ((insn
& 0xf) << 8) | *entry
++;
2545 /* The special case of an all-zero mask identifies
2546 "Refuse to unwind". We return NULL to fall back
2547 to the prologue analyzer. */
2551 /* Pop registers r4..r15 under mask. */
2552 for (i
= 0; i
< 12; i
++)
2553 if (mask
& (1 << i
))
2555 cache
->saved_regs
[4 + i
].addr
= vsp
;
2559 /* Special-case popping SP -- we need to reload vsp. */
2560 if (mask
& (1 << (ARM_SP_REGNUM
- 4)))
2563 else if ((insn
& 0xf0) == 0x90)
2565 int reg
= insn
& 0xf;
2567 /* Reserved cases. */
2568 if (reg
== ARM_SP_REGNUM
|| reg
== ARM_PC_REGNUM
)
2571 /* Set SP from another register and mark VSP for reload. */
2572 cache
->saved_regs
[ARM_SP_REGNUM
] = cache
->saved_regs
[reg
];
2575 else if ((insn
& 0xf0) == 0xa0)
2577 int count
= insn
& 0x7;
2578 int pop_lr
= (insn
& 0x8) != 0;
2581 /* Pop r4..r[4+count]. */
2582 for (i
= 0; i
<= count
; i
++)
2584 cache
->saved_regs
[4 + i
].addr
= vsp
;
2588 /* If indicated by flag, pop LR as well. */
2591 cache
->saved_regs
[ARM_LR_REGNUM
].addr
= vsp
;
2595 else if (insn
== 0xb0)
2597 /* We could only have updated PC by popping into it; if so, it
2598 will show up as address. Otherwise, copy LR into PC. */
2599 if (!trad_frame_addr_p (cache
->saved_regs
, ARM_PC_REGNUM
))
2600 cache
->saved_regs
[ARM_PC_REGNUM
]
2601 = cache
->saved_regs
[ARM_LR_REGNUM
];
2606 else if (insn
== 0xb1)
2608 int mask
= *entry
++;
2611 /* All-zero mask and mask >= 16 is "spare". */
2612 if (mask
== 0 || mask
>= 16)
2615 /* Pop r0..r3 under mask. */
2616 for (i
= 0; i
< 4; i
++)
2617 if (mask
& (1 << i
))
2619 cache
->saved_regs
[i
].addr
= vsp
;
2623 else if (insn
== 0xb2)
2625 ULONGEST offset
= 0;
2630 offset
|= (*entry
& 0x7f) << shift
;
2633 while (*entry
++ & 0x80);
2635 vsp
+= 0x204 + (offset
<< 2);
2637 else if (insn
== 0xb3)
2639 int start
= *entry
>> 4;
2640 int count
= (*entry
++) & 0xf;
2643 /* Only registers D0..D15 are valid here. */
2644 if (start
+ count
>= 16)
2647 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2648 for (i
= 0; i
<= count
; i
++)
2650 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2654 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2657 else if ((insn
& 0xf8) == 0xb8)
2659 int count
= insn
& 0x7;
2662 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2663 for (i
= 0; i
<= count
; i
++)
2665 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2669 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2672 else if (insn
== 0xc6)
2674 int start
= *entry
>> 4;
2675 int count
= (*entry
++) & 0xf;
2678 /* Only registers WR0..WR15 are valid. */
2679 if (start
+ count
>= 16)
2682 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2683 for (i
= 0; i
<= count
; i
++)
2685 cache
->saved_regs
[ARM_WR0_REGNUM
+ start
+ i
].addr
= vsp
;
2689 else if (insn
== 0xc7)
2691 int mask
= *entry
++;
2694 /* All-zero mask and mask >= 16 is "spare". */
2695 if (mask
== 0 || mask
>= 16)
2698 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2699 for (i
= 0; i
< 4; i
++)
2700 if (mask
& (1 << i
))
2702 cache
->saved_regs
[ARM_WCGR0_REGNUM
+ i
].addr
= vsp
;
2706 else if ((insn
& 0xf8) == 0xc0)
2708 int count
= insn
& 0x7;
2711 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2712 for (i
= 0; i
<= count
; i
++)
2714 cache
->saved_regs
[ARM_WR0_REGNUM
+ 10 + i
].addr
= vsp
;
2718 else if (insn
== 0xc8)
2720 int start
= *entry
>> 4;
2721 int count
= (*entry
++) & 0xf;
2724 /* Only registers D0..D31 are valid. */
2725 if (start
+ count
>= 16)
2728 /* Pop VFP double-precision registers
2729 D[16+start]..D[16+start+count]. */
2730 for (i
= 0; i
<= count
; i
++)
2732 cache
->saved_regs
[ARM_D0_REGNUM
+ 16 + start
+ i
].addr
= vsp
;
2736 else if (insn
== 0xc9)
2738 int start
= *entry
>> 4;
2739 int count
= (*entry
++) & 0xf;
2742 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2743 for (i
= 0; i
<= count
; i
++)
2745 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2749 else if ((insn
& 0xf8) == 0xd0)
2751 int count
= insn
& 0x7;
2754 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2755 for (i
= 0; i
<= count
; i
++)
2757 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2763 /* Everything else is "spare". */
2768 /* If we restore SP from a register, assume this was the frame register.
2769 Otherwise just fall back to SP as frame register. */
2770 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2771 cache
->framereg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2773 cache
->framereg
= ARM_SP_REGNUM
;
2775 /* Determine offset to previous frame. */
2777 = vsp
- get_frame_register_unsigned (this_frame
, cache
->framereg
);
2779 /* We already got the previous SP. */
2780 cache
->prev_sp
= vsp
;
2785 /* Unwinding via ARM exception table entries. Note that the sniffer
2786 already computes a filled-in prologue cache, which is then used
2787 with the same arm_prologue_this_id and arm_prologue_prev_register
2788 routines also used for prologue-parsing based unwinding. */
2791 arm_exidx_unwind_sniffer (const struct frame_unwind
*self
,
2792 struct frame_info
*this_frame
,
2793 void **this_prologue_cache
)
2795 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2796 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
2797 CORE_ADDR addr_in_block
, exidx_region
, func_start
;
2798 struct arm_prologue_cache
*cache
;
2801 /* See if we have an ARM exception table entry covering this address. */
2802 addr_in_block
= get_frame_address_in_block (this_frame
);
2803 entry
= arm_find_exidx_entry (addr_in_block
, &exidx_region
);
2807 /* The ARM exception table does not describe unwind information
2808 for arbitrary PC values, but is guaranteed to be correct only
2809 at call sites. We have to decide here whether we want to use
2810 ARM exception table information for this frame, or fall back
2811 to using prologue parsing. (Note that if we have DWARF CFI,
2812 this sniffer isn't even called -- CFI is always preferred.)
2814 Before we make this decision, however, we check whether we
2815 actually have *symbol* information for the current frame.
2816 If not, prologue parsing would not work anyway, so we might
2817 as well use the exception table and hope for the best. */
2818 if (find_pc_partial_function (addr_in_block
, NULL
, &func_start
, NULL
))
2822 /* If the next frame is "normal", we are at a call site in this
2823 frame, so exception information is guaranteed to be valid. */
2824 if (get_next_frame (this_frame
)
2825 && get_frame_type (get_next_frame (this_frame
)) == NORMAL_FRAME
)
2828 /* We also assume exception information is valid if we're currently
2829 blocked in a system call. The system library is supposed to
2830 ensure this, so that e.g. pthread cancellation works. */
2831 if (arm_frame_is_thumb (this_frame
))
2835 if (safe_read_memory_integer (get_frame_pc (this_frame
) - 2, 2,
2836 byte_order_for_code
, &insn
)
2837 && (insn
& 0xff00) == 0xdf00 /* svc */)
2844 if (safe_read_memory_integer (get_frame_pc (this_frame
) - 4, 4,
2845 byte_order_for_code
, &insn
)
2846 && (insn
& 0x0f000000) == 0x0f000000 /* svc */)
2850 /* Bail out if we don't know that exception information is valid. */
2854 /* The ARM exception index does not mark the *end* of the region
2855 covered by the entry, and some functions will not have any entry.
2856 To correctly recognize the end of the covered region, the linker
2857 should have inserted dummy records with a CANTUNWIND marker.
2859 Unfortunately, current versions of GNU ld do not reliably do
2860 this, and thus we may have found an incorrect entry above.
2861 As a (temporary) sanity check, we only use the entry if it
2862 lies *within* the bounds of the function. Note that this check
2863 might reject perfectly valid entries that just happen to cover
2864 multiple functions; therefore this check ought to be removed
2865 once the linker is fixed. */
2866 if (func_start
> exidx_region
)
2870 /* Decode the list of unwinding instructions into a prologue cache.
2871 Note that this may fail due to e.g. a "refuse to unwind" code. */
2872 cache
= arm_exidx_fill_cache (this_frame
, entry
);
2876 *this_prologue_cache
= cache
;
2880 struct frame_unwind arm_exidx_unwind
= {
2882 default_frame_unwind_stop_reason
,
2883 arm_prologue_this_id
,
2884 arm_prologue_prev_register
,
2886 arm_exidx_unwind_sniffer
2889 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2890 trampoline, return the target PC. Otherwise return 0.
2892 void call0a (char c, short s, int i, long l) {}
2896 (*pointer_to_call0a) (c, s, i, l);
2899 Instead of calling a stub library function _call_via_xx (xx is
2900 the register name), GCC may inline the trampoline in the object
2901 file as below (register r2 has the address of call0a).
2904 .type main, %function
2913 The trampoline 'bx r2' doesn't belong to main. */
2916 arm_skip_bx_reg (struct frame_info
*frame
, CORE_ADDR pc
)
2918 /* The heuristics of recognizing such trampoline is that FRAME is
2919 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2920 if (arm_frame_is_thumb (frame
))
2924 if (target_read_memory (pc
, buf
, 2) == 0)
2926 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
2927 enum bfd_endian byte_order_for_code
2928 = gdbarch_byte_order_for_code (gdbarch
);
2930 = extract_unsigned_integer (buf
, 2, byte_order_for_code
);
2932 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
2935 = get_frame_register_unsigned (frame
, bits (insn
, 3, 6));
2937 /* Clear the LSB so that gdb core sets step-resume
2938 breakpoint at the right address. */
2939 return UNMAKE_THUMB_ADDR (dest
);
2947 static struct arm_prologue_cache
*
2948 arm_make_stub_cache (struct frame_info
*this_frame
)
2950 struct arm_prologue_cache
*cache
;
2952 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2953 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2955 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2960 /* Our frame ID for a stub frame is the current SP and LR. */
2963 arm_stub_this_id (struct frame_info
*this_frame
,
2965 struct frame_id
*this_id
)
2967 struct arm_prologue_cache
*cache
;
2969 if (*this_cache
== NULL
)
2970 *this_cache
= arm_make_stub_cache (this_frame
);
2971 cache
= *this_cache
;
2973 *this_id
= frame_id_build (cache
->prev_sp
, get_frame_pc (this_frame
));
2977 arm_stub_unwind_sniffer (const struct frame_unwind
*self
,
2978 struct frame_info
*this_frame
,
2979 void **this_prologue_cache
)
2981 CORE_ADDR addr_in_block
;
2983 CORE_ADDR pc
, start_addr
;
2986 addr_in_block
= get_frame_address_in_block (this_frame
);
2987 pc
= get_frame_pc (this_frame
);
2988 if (in_plt_section (addr_in_block
)
2989 /* We also use the stub winder if the target memory is unreadable
2990 to avoid having the prologue unwinder trying to read it. */
2991 || target_read_memory (pc
, dummy
, 4) != 0)
2994 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0
2995 && arm_skip_bx_reg (this_frame
, pc
) != 0)
3001 struct frame_unwind arm_stub_unwind
= {
3003 default_frame_unwind_stop_reason
,
3005 arm_prologue_prev_register
,
3007 arm_stub_unwind_sniffer
3010 /* Put here the code to store, into CACHE->saved_regs, the addresses
3011 of the saved registers of frame described by THIS_FRAME. CACHE is
3014 static struct arm_prologue_cache
*
3015 arm_m_exception_cache (struct frame_info
*this_frame
)
3017 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
3018 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
3019 struct arm_prologue_cache
*cache
;
3020 CORE_ADDR unwound_sp
;
3023 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
3024 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
3026 unwound_sp
= get_frame_register_unsigned (this_frame
,
3029 /* The hardware saves eight 32-bit words, comprising xPSR,
3030 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3031 "B1.5.6 Exception entry behavior" in
3032 "ARMv7-M Architecture Reference Manual". */
3033 cache
->saved_regs
[0].addr
= unwound_sp
;
3034 cache
->saved_regs
[1].addr
= unwound_sp
+ 4;
3035 cache
->saved_regs
[2].addr
= unwound_sp
+ 8;
3036 cache
->saved_regs
[3].addr
= unwound_sp
+ 12;
3037 cache
->saved_regs
[12].addr
= unwound_sp
+ 16;
3038 cache
->saved_regs
[14].addr
= unwound_sp
+ 20;
3039 cache
->saved_regs
[15].addr
= unwound_sp
+ 24;
3040 cache
->saved_regs
[ARM_PS_REGNUM
].addr
= unwound_sp
+ 28;
3042 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3043 aligner between the top of the 32-byte stack frame and the
3044 previous context's stack pointer. */
3045 cache
->prev_sp
= unwound_sp
+ 32;
3046 if (safe_read_memory_integer (unwound_sp
+ 28, 4, byte_order
, &xpsr
)
3047 && (xpsr
& (1 << 9)) != 0)
3048 cache
->prev_sp
+= 4;
3053 /* Implementation of function hook 'this_id' in
3054 'struct frame_uwnind'. */
3057 arm_m_exception_this_id (struct frame_info
*this_frame
,
3059 struct frame_id
*this_id
)
3061 struct arm_prologue_cache
*cache
;
3063 if (*this_cache
== NULL
)
3064 *this_cache
= arm_m_exception_cache (this_frame
);
3065 cache
= *this_cache
;
3067 /* Our frame ID for a stub frame is the current SP and LR. */
3068 *this_id
= frame_id_build (cache
->prev_sp
,
3069 get_frame_pc (this_frame
));
3072 /* Implementation of function hook 'prev_register' in
3073 'struct frame_uwnind'. */
3075 static struct value
*
3076 arm_m_exception_prev_register (struct frame_info
*this_frame
,
3080 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
3081 struct arm_prologue_cache
*cache
;
3083 if (*this_cache
== NULL
)
3084 *this_cache
= arm_m_exception_cache (this_frame
);
3085 cache
= *this_cache
;
3087 /* The value was already reconstructed into PREV_SP. */
3088 if (prev_regnum
== ARM_SP_REGNUM
)
3089 return frame_unwind_got_constant (this_frame
, prev_regnum
,
3092 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
3096 /* Implementation of function hook 'sniffer' in
3097 'struct frame_uwnind'. */
3100 arm_m_exception_unwind_sniffer (const struct frame_unwind
*self
,
3101 struct frame_info
*this_frame
,
3102 void **this_prologue_cache
)
3104 CORE_ADDR this_pc
= get_frame_pc (this_frame
);
3106 /* No need to check is_m; this sniffer is only registered for
3107 M-profile architectures. */
3109 /* Exception frames return to one of these magic PCs. Other values
3110 are not defined as of v7-M. See details in "B1.5.8 Exception
3111 return behavior" in "ARMv7-M Architecture Reference Manual". */
3112 if (this_pc
== 0xfffffff1 || this_pc
== 0xfffffff9
3113 || this_pc
== 0xfffffffd)
3119 /* Frame unwinder for M-profile exceptions. */
3121 struct frame_unwind arm_m_exception_unwind
=
3124 default_frame_unwind_stop_reason
,
3125 arm_m_exception_this_id
,
3126 arm_m_exception_prev_register
,
3128 arm_m_exception_unwind_sniffer
3132 arm_normal_frame_base (struct frame_info
*this_frame
, void **this_cache
)
3134 struct arm_prologue_cache
*cache
;
3136 if (*this_cache
== NULL
)
3137 *this_cache
= arm_make_prologue_cache (this_frame
);
3138 cache
= *this_cache
;
3140 return cache
->prev_sp
- cache
->framesize
;
3143 struct frame_base arm_normal_base
= {
3144 &arm_prologue_unwind
,
3145 arm_normal_frame_base
,
3146 arm_normal_frame_base
,
3147 arm_normal_frame_base
3150 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3151 dummy frame. The frame ID's base needs to match the TOS value
3152 saved by save_dummy_frame_tos() and returned from
3153 arm_push_dummy_call, and the PC needs to match the dummy frame's
3156 static struct frame_id
3157 arm_dummy_id (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3159 return frame_id_build (get_frame_register_unsigned (this_frame
,
3161 get_frame_pc (this_frame
));
3164 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3165 be used to construct the previous frame's ID, after looking up the
3166 containing function). */
3169 arm_unwind_pc (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3172 pc
= frame_unwind_register_unsigned (this_frame
, ARM_PC_REGNUM
);
3173 return arm_addr_bits_remove (gdbarch
, pc
);
3177 arm_unwind_sp (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3179 return frame_unwind_register_unsigned (this_frame
, ARM_SP_REGNUM
);
3182 static struct value
*
3183 arm_dwarf2_prev_register (struct frame_info
*this_frame
, void **this_cache
,
3186 struct gdbarch
* gdbarch
= get_frame_arch (this_frame
);
3188 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
3193 /* The PC is normally copied from the return column, which
3194 describes saves of LR. However, that version may have an
3195 extra bit set to indicate Thumb state. The bit is not
3197 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3198 return frame_unwind_got_constant (this_frame
, regnum
,
3199 arm_addr_bits_remove (gdbarch
, lr
));
3202 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3203 cpsr
= get_frame_register_unsigned (this_frame
, regnum
);
3204 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3205 if (IS_THUMB_ADDR (lr
))
3209 return frame_unwind_got_constant (this_frame
, regnum
, cpsr
);
3212 internal_error (__FILE__
, __LINE__
,
3213 _("Unexpected register %d"), regnum
);
3218 arm_dwarf2_frame_init_reg (struct gdbarch
*gdbarch
, int regnum
,
3219 struct dwarf2_frame_state_reg
*reg
,
3220 struct frame_info
*this_frame
)
3226 reg
->how
= DWARF2_FRAME_REG_FN
;
3227 reg
->loc
.fn
= arm_dwarf2_prev_register
;
3230 reg
->how
= DWARF2_FRAME_REG_CFA
;
3235 /* Return true if we are in the function's epilogue, i.e. after the
3236 instruction that destroyed the function's stack frame. */
3239 thumb_in_function_epilogue_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3241 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3242 unsigned int insn
, insn2
;
3243 int found_return
= 0, found_stack_adjust
= 0;
3244 CORE_ADDR func_start
, func_end
;
3248 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3251 /* The epilogue is a sequence of instructions along the following lines:
3253 - add stack frame size to SP or FP
3254 - [if frame pointer used] restore SP from FP
3255 - restore registers from SP [may include PC]
3256 - a return-type instruction [if PC wasn't already restored]
3258 In a first pass, we scan forward from the current PC and verify the
3259 instructions we find as compatible with this sequence, ending in a
3262 However, this is not sufficient to distinguish indirect function calls
3263 within a function from indirect tail calls in the epilogue in some cases.
3264 Therefore, if we didn't already find any SP-changing instruction during
3265 forward scan, we add a backward scanning heuristic to ensure we actually
3266 are in the epilogue. */
3269 while (scan_pc
< func_end
&& !found_return
)
3271 if (target_read_memory (scan_pc
, buf
, 2))
3275 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3277 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
3279 else if (insn
== 0x46f7) /* mov pc, lr */
3281 else if (thumb_instruction_restores_sp (insn
))
3283 if ((insn
& 0xff00) == 0xbd00) /* pop <registers, PC> */
3286 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instruction */
3288 if (target_read_memory (scan_pc
, buf
, 2))
3292 insn2
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3294 if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3296 if (insn2
& 0x8000) /* <registers> include PC. */
3299 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3300 && (insn2
& 0x0fff) == 0x0b04)
3302 if ((insn2
& 0xf000) == 0xf000) /* <Rt> is PC. */
3305 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3306 && (insn2
& 0x0e00) == 0x0a00)
3318 /* Since any instruction in the epilogue sequence, with the possible
3319 exception of return itself, updates the stack pointer, we need to
3320 scan backwards for at most one instruction. Try either a 16-bit or
3321 a 32-bit instruction. This is just a heuristic, so we do not worry
3322 too much about false positives. */
3324 if (pc
- 4 < func_start
)
3326 if (target_read_memory (pc
- 4, buf
, 4))
3329 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3330 insn2
= extract_unsigned_integer (buf
+ 2, 2, byte_order_for_code
);
3332 if (thumb_instruction_restores_sp (insn2
))
3333 found_stack_adjust
= 1;
3334 else if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3335 found_stack_adjust
= 1;
3336 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3337 && (insn2
& 0x0fff) == 0x0b04)
3338 found_stack_adjust
= 1;
3339 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3340 && (insn2
& 0x0e00) == 0x0a00)
3341 found_stack_adjust
= 1;
3343 return found_stack_adjust
;
3346 /* Return true if we are in the function's epilogue, i.e. after the
3347 instruction that destroyed the function's stack frame. */
3350 arm_in_function_epilogue_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3352 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3354 int found_return
, found_stack_adjust
;
3355 CORE_ADDR func_start
, func_end
;
3357 if (arm_pc_is_thumb (gdbarch
, pc
))
3358 return thumb_in_function_epilogue_p (gdbarch
, pc
);
3360 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3363 /* We are in the epilogue if the previous instruction was a stack
3364 adjustment and the next instruction is a possible return (bx, mov
3365 pc, or pop). We could have to scan backwards to find the stack
3366 adjustment, or forwards to find the return, but this is a decent
3367 approximation. First scan forwards. */
3370 insn
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
3371 if (bits (insn
, 28, 31) != INST_NV
)
3373 if ((insn
& 0x0ffffff0) == 0x012fff10)
3376 else if ((insn
& 0x0ffffff0) == 0x01a0f000)
3379 else if ((insn
& 0x0fff0000) == 0x08bd0000
3380 && (insn
& 0x0000c000) != 0)
3381 /* POP (LDMIA), including PC or LR. */
3388 /* Scan backwards. This is just a heuristic, so do not worry about
3389 false positives from mode changes. */
3391 if (pc
< func_start
+ 4)
3394 found_stack_adjust
= 0;
3395 insn
= read_memory_unsigned_integer (pc
- 4, 4, byte_order_for_code
);
3396 if (bits (insn
, 28, 31) != INST_NV
)
3398 if ((insn
& 0x0df0f000) == 0x0080d000)
3399 /* ADD SP (register or immediate). */
3400 found_stack_adjust
= 1;
3401 else if ((insn
& 0x0df0f000) == 0x0040d000)
3402 /* SUB SP (register or immediate). */
3403 found_stack_adjust
= 1;
3404 else if ((insn
& 0x0ffffff0) == 0x01a0d000)
3406 found_stack_adjust
= 1;
3407 else if ((insn
& 0x0fff0000) == 0x08bd0000)
3409 found_stack_adjust
= 1;
3410 else if ((insn
& 0x0fff0000) == 0x049d0000)
3411 /* POP of a single register. */
3412 found_stack_adjust
= 1;
3415 if (found_stack_adjust
)
3422 /* When arguments must be pushed onto the stack, they go on in reverse
3423 order. The code below implements a FILO (stack) to do this. */
3428 struct stack_item
*prev
;
3432 static struct stack_item
*
3433 push_stack_item (struct stack_item
*prev
, const void *contents
, int len
)
3435 struct stack_item
*si
;
3436 si
= xmalloc (sizeof (struct stack_item
));
3437 si
->data
= xmalloc (len
);
3440 memcpy (si
->data
, contents
, len
);
3444 static struct stack_item
*
3445 pop_stack_item (struct stack_item
*si
)
3447 struct stack_item
*dead
= si
;
3455 /* Return the alignment (in bytes) of the given type. */
3458 arm_type_align (struct type
*t
)
3464 t
= check_typedef (t
);
3465 switch (TYPE_CODE (t
))
3468 /* Should never happen. */
3469 internal_error (__FILE__
, __LINE__
, _("unknown type alignment"));
3473 case TYPE_CODE_ENUM
:
3477 case TYPE_CODE_RANGE
:
3479 case TYPE_CODE_CHAR
:
3480 case TYPE_CODE_BOOL
:
3481 return TYPE_LENGTH (t
);
3483 case TYPE_CODE_ARRAY
:
3484 case TYPE_CODE_COMPLEX
:
3485 /* TODO: What about vector types? */
3486 return arm_type_align (TYPE_TARGET_TYPE (t
));
3488 case TYPE_CODE_STRUCT
:
3489 case TYPE_CODE_UNION
:
3491 for (n
= 0; n
< TYPE_NFIELDS (t
); n
++)
3493 falign
= arm_type_align (TYPE_FIELD_TYPE (t
, n
));
3501 /* Possible base types for a candidate for passing and returning in
3504 enum arm_vfp_cprc_base_type
3513 /* The length of one element of base type B. */
3516 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b
)
3520 case VFP_CPRC_SINGLE
:
3522 case VFP_CPRC_DOUBLE
:
3524 case VFP_CPRC_VEC64
:
3526 case VFP_CPRC_VEC128
:
3529 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3534 /* The character ('s', 'd' or 'q') for the type of VFP register used
3535 for passing base type B. */
3538 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b
)
3542 case VFP_CPRC_SINGLE
:
3544 case VFP_CPRC_DOUBLE
:
3546 case VFP_CPRC_VEC64
:
3548 case VFP_CPRC_VEC128
:
3551 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3556 /* Determine whether T may be part of a candidate for passing and
3557 returning in VFP registers, ignoring the limit on the total number
3558 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3559 classification of the first valid component found; if it is not
3560 VFP_CPRC_UNKNOWN, all components must have the same classification
3561 as *BASE_TYPE. If it is found that T contains a type not permitted
3562 for passing and returning in VFP registers, a type differently
3563 classified from *BASE_TYPE, or two types differently classified
3564 from each other, return -1, otherwise return the total number of
3565 base-type elements found (possibly 0 in an empty structure or
3566 array). Vector types are not currently supported, matching the
3567 generic AAPCS support. */
3570 arm_vfp_cprc_sub_candidate (struct type
*t
,
3571 enum arm_vfp_cprc_base_type
*base_type
)
3573 t
= check_typedef (t
);
3574 switch (TYPE_CODE (t
))
3577 switch (TYPE_LENGTH (t
))
3580 if (*base_type
== VFP_CPRC_UNKNOWN
)
3581 *base_type
= VFP_CPRC_SINGLE
;
3582 else if (*base_type
!= VFP_CPRC_SINGLE
)
3587 if (*base_type
== VFP_CPRC_UNKNOWN
)
3588 *base_type
= VFP_CPRC_DOUBLE
;
3589 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3598 case TYPE_CODE_COMPLEX
:
3599 /* Arguments of complex T where T is one of the types float or
3600 double get treated as if they are implemented as:
3609 switch (TYPE_LENGTH (t
))
3612 if (*base_type
== VFP_CPRC_UNKNOWN
)
3613 *base_type
= VFP_CPRC_SINGLE
;
3614 else if (*base_type
!= VFP_CPRC_SINGLE
)
3619 if (*base_type
== VFP_CPRC_UNKNOWN
)
3620 *base_type
= VFP_CPRC_DOUBLE
;
3621 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3630 case TYPE_CODE_ARRAY
:
3634 count
= arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t
), base_type
);
3637 if (TYPE_LENGTH (t
) == 0)
3639 gdb_assert (count
== 0);
3642 else if (count
== 0)
3644 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3645 gdb_assert ((TYPE_LENGTH (t
) % unitlen
) == 0);
3646 return TYPE_LENGTH (t
) / unitlen
;
3650 case TYPE_CODE_STRUCT
:
3655 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3657 int sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3659 if (sub_count
== -1)
3663 if (TYPE_LENGTH (t
) == 0)
3665 gdb_assert (count
== 0);
3668 else if (count
== 0)
3670 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3671 if (TYPE_LENGTH (t
) != unitlen
* count
)
3676 case TYPE_CODE_UNION
:
3681 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3683 int sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3685 if (sub_count
== -1)
3687 count
= (count
> sub_count
? count
: sub_count
);
3689 if (TYPE_LENGTH (t
) == 0)
3691 gdb_assert (count
== 0);
3694 else if (count
== 0)
3696 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3697 if (TYPE_LENGTH (t
) != unitlen
* count
)
3709 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3710 if passed to or returned from a non-variadic function with the VFP
3711 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3712 *BASE_TYPE to the base type for T and *COUNT to the number of
3713 elements of that base type before returning. */
3716 arm_vfp_call_candidate (struct type
*t
, enum arm_vfp_cprc_base_type
*base_type
,
3719 enum arm_vfp_cprc_base_type b
= VFP_CPRC_UNKNOWN
;
3720 int c
= arm_vfp_cprc_sub_candidate (t
, &b
);
3721 if (c
<= 0 || c
> 4)
3728 /* Return 1 if the VFP ABI should be used for passing arguments to and
3729 returning values from a function of type FUNC_TYPE, 0
3733 arm_vfp_abi_for_function (struct gdbarch
*gdbarch
, struct type
*func_type
)
3735 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3736 /* Variadic functions always use the base ABI. Assume that functions
3737 without debug info are not variadic. */
3738 if (func_type
&& TYPE_VARARGS (check_typedef (func_type
)))
3740 /* The VFP ABI is only supported as a variant of AAPCS. */
3741 if (tdep
->arm_abi
!= ARM_ABI_AAPCS
)
3743 return gdbarch_tdep (gdbarch
)->fp_model
== ARM_FLOAT_VFP
;
3746 /* We currently only support passing parameters in integer registers, which
3747 conforms with GCC's default model, and VFP argument passing following
3748 the VFP variant of AAPCS. Several other variants exist and
3749 we should probably support some of them based on the selected ABI. */
3752 arm_push_dummy_call (struct gdbarch
*gdbarch
, struct value
*function
,
3753 struct regcache
*regcache
, CORE_ADDR bp_addr
, int nargs
,
3754 struct value
**args
, CORE_ADDR sp
, int struct_return
,
3755 CORE_ADDR struct_addr
)
3757 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
3761 struct stack_item
*si
= NULL
;
3764 unsigned vfp_regs_free
= (1 << 16) - 1;
3766 /* Determine the type of this function and whether the VFP ABI
3768 ftype
= check_typedef (value_type (function
));
3769 if (TYPE_CODE (ftype
) == TYPE_CODE_PTR
)
3770 ftype
= check_typedef (TYPE_TARGET_TYPE (ftype
));
3771 use_vfp_abi
= arm_vfp_abi_for_function (gdbarch
, ftype
);
3773 /* Set the return address. For the ARM, the return breakpoint is
3774 always at BP_ADDR. */
3775 if (arm_pc_is_thumb (gdbarch
, bp_addr
))
3777 regcache_cooked_write_unsigned (regcache
, ARM_LR_REGNUM
, bp_addr
);
3779 /* Walk through the list of args and determine how large a temporary
3780 stack is required. Need to take care here as structs may be
3781 passed on the stack, and we have to push them. */
3784 argreg
= ARM_A1_REGNUM
;
3787 /* The struct_return pointer occupies the first parameter
3788 passing register. */
3792 fprintf_unfiltered (gdb_stdlog
, "struct return in %s = %s\n",
3793 gdbarch_register_name (gdbarch
, argreg
),
3794 paddress (gdbarch
, struct_addr
));
3795 regcache_cooked_write_unsigned (regcache
, argreg
, struct_addr
);
3799 for (argnum
= 0; argnum
< nargs
; argnum
++)
3802 struct type
*arg_type
;
3803 struct type
*target_type
;
3804 enum type_code typecode
;
3805 const bfd_byte
*val
;
3807 enum arm_vfp_cprc_base_type vfp_base_type
;
3809 int may_use_core_reg
= 1;
3811 arg_type
= check_typedef (value_type (args
[argnum
]));
3812 len
= TYPE_LENGTH (arg_type
);
3813 target_type
= TYPE_TARGET_TYPE (arg_type
);
3814 typecode
= TYPE_CODE (arg_type
);
3815 val
= value_contents (args
[argnum
]);
3817 align
= arm_type_align (arg_type
);
3818 /* Round alignment up to a whole number of words. */
3819 align
= (align
+ INT_REGISTER_SIZE
- 1) & ~(INT_REGISTER_SIZE
- 1);
3820 /* Different ABIs have different maximum alignments. */
3821 if (gdbarch_tdep (gdbarch
)->arm_abi
== ARM_ABI_APCS
)
3823 /* The APCS ABI only requires word alignment. */
3824 align
= INT_REGISTER_SIZE
;
3828 /* The AAPCS requires at most doubleword alignment. */
3829 if (align
> INT_REGISTER_SIZE
* 2)
3830 align
= INT_REGISTER_SIZE
* 2;
3834 && arm_vfp_call_candidate (arg_type
, &vfp_base_type
,
3842 /* Because this is a CPRC it cannot go in a core register or
3843 cause a core register to be skipped for alignment.
3844 Either it goes in VFP registers and the rest of this loop
3845 iteration is skipped for this argument, or it goes on the
3846 stack (and the stack alignment code is correct for this
3848 may_use_core_reg
= 0;
3850 unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
3851 shift
= unit_length
/ 4;
3852 mask
= (1 << (shift
* vfp_base_count
)) - 1;
3853 for (regno
= 0; regno
< 16; regno
+= shift
)
3854 if (((vfp_regs_free
>> regno
) & mask
) == mask
)
3863 vfp_regs_free
&= ~(mask
<< regno
);
3864 reg_scaled
= regno
/ shift
;
3865 reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
3866 for (i
= 0; i
< vfp_base_count
; i
++)
3870 if (reg_char
== 'q')
3871 arm_neon_quad_write (gdbarch
, regcache
, reg_scaled
+ i
,
3872 val
+ i
* unit_length
);
3875 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d",
3876 reg_char
, reg_scaled
+ i
);
3877 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
3879 regcache_cooked_write (regcache
, regnum
,
3880 val
+ i
* unit_length
);
3887 /* This CPRC could not go in VFP registers, so all VFP
3888 registers are now marked as used. */
3893 /* Push stack padding for dowubleword alignment. */
3894 if (nstack
& (align
- 1))
3896 si
= push_stack_item (si
, val
, INT_REGISTER_SIZE
);
3897 nstack
+= INT_REGISTER_SIZE
;
3900 /* Doubleword aligned quantities must go in even register pairs. */
3901 if (may_use_core_reg
3902 && argreg
<= ARM_LAST_ARG_REGNUM
3903 && align
> INT_REGISTER_SIZE
3907 /* If the argument is a pointer to a function, and it is a
3908 Thumb function, create a LOCAL copy of the value and set
3909 the THUMB bit in it. */
3910 if (TYPE_CODE_PTR
== typecode
3911 && target_type
!= NULL
3912 && TYPE_CODE_FUNC
== TYPE_CODE (check_typedef (target_type
)))
3914 CORE_ADDR regval
= extract_unsigned_integer (val
, len
, byte_order
);
3915 if (arm_pc_is_thumb (gdbarch
, regval
))
3917 bfd_byte
*copy
= alloca (len
);
3918 store_unsigned_integer (copy
, len
, byte_order
,
3919 MAKE_THUMB_ADDR (regval
));
3924 /* Copy the argument to general registers or the stack in
3925 register-sized pieces. Large arguments are split between
3926 registers and stack. */
3929 int partial_len
= len
< INT_REGISTER_SIZE
? len
: INT_REGISTER_SIZE
;
3931 if (may_use_core_reg
&& argreg
<= ARM_LAST_ARG_REGNUM
)
3933 /* The argument is being passed in a general purpose
3936 = extract_unsigned_integer (val
, partial_len
, byte_order
);
3937 if (byte_order
== BFD_ENDIAN_BIG
)
3938 regval
<<= (INT_REGISTER_SIZE
- partial_len
) * 8;
3940 fprintf_unfiltered (gdb_stdlog
, "arg %d in %s = 0x%s\n",
3942 gdbarch_register_name
3944 phex (regval
, INT_REGISTER_SIZE
));
3945 regcache_cooked_write_unsigned (regcache
, argreg
, regval
);
3950 /* Push the arguments onto the stack. */
3952 fprintf_unfiltered (gdb_stdlog
, "arg %d @ sp + %d\n",
3954 si
= push_stack_item (si
, val
, INT_REGISTER_SIZE
);
3955 nstack
+= INT_REGISTER_SIZE
;
3962 /* If we have an odd number of words to push, then decrement the stack
3963 by one word now, so first stack argument will be dword aligned. */
3970 write_memory (sp
, si
->data
, si
->len
);
3971 si
= pop_stack_item (si
);
3974 /* Finally, update teh SP register. */
3975 regcache_cooked_write_unsigned (regcache
, ARM_SP_REGNUM
, sp
);
3981 /* Always align the frame to an 8-byte boundary. This is required on
3982 some platforms and harmless on the rest. */
3985 arm_frame_align (struct gdbarch
*gdbarch
, CORE_ADDR sp
)
3987 /* Align the stack to eight bytes. */
3988 return sp
& ~ (CORE_ADDR
) 7;
3992 print_fpu_flags (struct ui_file
*file
, int flags
)
3994 if (flags
& (1 << 0))
3995 fputs_filtered ("IVO ", file
);
3996 if (flags
& (1 << 1))
3997 fputs_filtered ("DVZ ", file
);
3998 if (flags
& (1 << 2))
3999 fputs_filtered ("OFL ", file
);
4000 if (flags
& (1 << 3))
4001 fputs_filtered ("UFL ", file
);
4002 if (flags
& (1 << 4))
4003 fputs_filtered ("INX ", file
);
4004 fputc_filtered ('\n', file
);
4007 /* Print interesting information about the floating point processor
4008 (if present) or emulator. */
4010 arm_print_float_info (struct gdbarch
*gdbarch
, struct ui_file
*file
,
4011 struct frame_info
*frame
, const char *args
)
4013 unsigned long status
= get_frame_register_unsigned (frame
, ARM_FPS_REGNUM
);
4016 type
= (status
>> 24) & 127;
4017 if (status
& (1 << 31))
4018 fprintf_filtered (file
, _("Hardware FPU type %d\n"), type
);
4020 fprintf_filtered (file
, _("Software FPU type %d\n"), type
);
4021 /* i18n: [floating point unit] mask */
4022 fputs_filtered (_("mask: "), file
);
4023 print_fpu_flags (file
, status
>> 16);
4024 /* i18n: [floating point unit] flags */
4025 fputs_filtered (_("flags: "), file
);
4026 print_fpu_flags (file
, status
);
4029 /* Construct the ARM extended floating point type. */
4030 static struct type
*
4031 arm_ext_type (struct gdbarch
*gdbarch
)
4033 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4035 if (!tdep
->arm_ext_type
)
4037 = arch_float_type (gdbarch
, -1, "builtin_type_arm_ext",
4038 floatformats_arm_ext
);
4040 return tdep
->arm_ext_type
;
4043 static struct type
*
4044 arm_neon_double_type (struct gdbarch
*gdbarch
)
4046 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4048 if (tdep
->neon_double_type
== NULL
)
4050 struct type
*t
, *elem
;
4052 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_d",
4054 elem
= builtin_type (gdbarch
)->builtin_uint8
;
4055 append_composite_type_field (t
, "u8", init_vector_type (elem
, 8));
4056 elem
= builtin_type (gdbarch
)->builtin_uint16
;
4057 append_composite_type_field (t
, "u16", init_vector_type (elem
, 4));
4058 elem
= builtin_type (gdbarch
)->builtin_uint32
;
4059 append_composite_type_field (t
, "u32", init_vector_type (elem
, 2));
4060 elem
= builtin_type (gdbarch
)->builtin_uint64
;
4061 append_composite_type_field (t
, "u64", elem
);
4062 elem
= builtin_type (gdbarch
)->builtin_float
;
4063 append_composite_type_field (t
, "f32", init_vector_type (elem
, 2));
4064 elem
= builtin_type (gdbarch
)->builtin_double
;
4065 append_composite_type_field (t
, "f64", elem
);
4067 TYPE_VECTOR (t
) = 1;
4068 TYPE_NAME (t
) = "neon_d";
4069 tdep
->neon_double_type
= t
;
4072 return tdep
->neon_double_type
;
4075 /* FIXME: The vector types are not correctly ordered on big-endian
4076 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4077 bits of d0 - regardless of what unit size is being held in d0. So
4078 the offset of the first uint8 in d0 is 7, but the offset of the
4079 first float is 4. This code works as-is for little-endian
4082 static struct type
*
4083 arm_neon_quad_type (struct gdbarch
*gdbarch
)
4085 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4087 if (tdep
->neon_quad_type
== NULL
)
4089 struct type
*t
, *elem
;
4091 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_q",
4093 elem
= builtin_type (gdbarch
)->builtin_uint8
;
4094 append_composite_type_field (t
, "u8", init_vector_type (elem
, 16));
4095 elem
= builtin_type (gdbarch
)->builtin_uint16
;
4096 append_composite_type_field (t
, "u16", init_vector_type (elem
, 8));
4097 elem
= builtin_type (gdbarch
)->builtin_uint32
;
4098 append_composite_type_field (t
, "u32", init_vector_type (elem
, 4));
4099 elem
= builtin_type (gdbarch
)->builtin_uint64
;
4100 append_composite_type_field (t
, "u64", init_vector_type (elem
, 2));
4101 elem
= builtin_type (gdbarch
)->builtin_float
;
4102 append_composite_type_field (t
, "f32", init_vector_type (elem
, 4));
4103 elem
= builtin_type (gdbarch
)->builtin_double
;
4104 append_composite_type_field (t
, "f64", init_vector_type (elem
, 2));
4106 TYPE_VECTOR (t
) = 1;
4107 TYPE_NAME (t
) = "neon_q";
4108 tdep
->neon_quad_type
= t
;
4111 return tdep
->neon_quad_type
;
4114 /* Return the GDB type object for the "standard" data type of data in
4117 static struct type
*
4118 arm_register_type (struct gdbarch
*gdbarch
, int regnum
)
4120 int num_regs
= gdbarch_num_regs (gdbarch
);
4122 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
4123 && regnum
>= num_regs
&& regnum
< num_regs
+ 32)
4124 return builtin_type (gdbarch
)->builtin_float
;
4126 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
4127 && regnum
>= num_regs
+ 32 && regnum
< num_regs
+ 32 + 16)
4128 return arm_neon_quad_type (gdbarch
);
4130 /* If the target description has register information, we are only
4131 in this function so that we can override the types of
4132 double-precision registers for NEON. */
4133 if (tdesc_has_registers (gdbarch_target_desc (gdbarch
)))
4135 struct type
*t
= tdesc_register_type (gdbarch
, regnum
);
4137 if (regnum
>= ARM_D0_REGNUM
&& regnum
< ARM_D0_REGNUM
+ 32
4138 && TYPE_CODE (t
) == TYPE_CODE_FLT
4139 && gdbarch_tdep (gdbarch
)->have_neon
)
4140 return arm_neon_double_type (gdbarch
);
4145 if (regnum
>= ARM_F0_REGNUM
&& regnum
< ARM_F0_REGNUM
+ NUM_FREGS
)
4147 if (!gdbarch_tdep (gdbarch
)->have_fpa_registers
)
4148 return builtin_type (gdbarch
)->builtin_void
;
4150 return arm_ext_type (gdbarch
);
4152 else if (regnum
== ARM_SP_REGNUM
)
4153 return builtin_type (gdbarch
)->builtin_data_ptr
;
4154 else if (regnum
== ARM_PC_REGNUM
)
4155 return builtin_type (gdbarch
)->builtin_func_ptr
;
4156 else if (regnum
>= ARRAY_SIZE (arm_register_names
))
4157 /* These registers are only supported on targets which supply
4158 an XML description. */
4159 return builtin_type (gdbarch
)->builtin_int0
;
4161 return builtin_type (gdbarch
)->builtin_uint32
;
4164 /* Map a DWARF register REGNUM onto the appropriate GDB register
4168 arm_dwarf_reg_to_regnum (struct gdbarch
*gdbarch
, int reg
)
4170 /* Core integer regs. */
4171 if (reg
>= 0 && reg
<= 15)
4174 /* Legacy FPA encoding. These were once used in a way which
4175 overlapped with VFP register numbering, so their use is
4176 discouraged, but GDB doesn't support the ARM toolchain
4177 which used them for VFP. */
4178 if (reg
>= 16 && reg
<= 23)
4179 return ARM_F0_REGNUM
+ reg
- 16;
4181 /* New assignments for the FPA registers. */
4182 if (reg
>= 96 && reg
<= 103)
4183 return ARM_F0_REGNUM
+ reg
- 96;
4185 /* WMMX register assignments. */
4186 if (reg
>= 104 && reg
<= 111)
4187 return ARM_WCGR0_REGNUM
+ reg
- 104;
4189 if (reg
>= 112 && reg
<= 127)
4190 return ARM_WR0_REGNUM
+ reg
- 112;
4192 if (reg
>= 192 && reg
<= 199)
4193 return ARM_WC0_REGNUM
+ reg
- 192;
4195 /* VFP v2 registers. A double precision value is actually
4196 in d1 rather than s2, but the ABI only defines numbering
4197 for the single precision registers. This will "just work"
4198 in GDB for little endian targets (we'll read eight bytes,
4199 starting in s0 and then progressing to s1), but will be
4200 reversed on big endian targets with VFP. This won't
4201 be a problem for the new Neon quad registers; you're supposed
4202 to use DW_OP_piece for those. */
4203 if (reg
>= 64 && reg
<= 95)
4207 xsnprintf (name_buf
, sizeof (name_buf
), "s%d", reg
- 64);
4208 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4212 /* VFP v3 / Neon registers. This range is also used for VFP v2
4213 registers, except that it now describes d0 instead of s0. */
4214 if (reg
>= 256 && reg
<= 287)
4218 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", reg
- 256);
4219 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4226 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4228 arm_register_sim_regno (struct gdbarch
*gdbarch
, int regnum
)
4231 gdb_assert (reg
>= 0 && reg
< gdbarch_num_regs (gdbarch
));
4233 if (regnum
>= ARM_WR0_REGNUM
&& regnum
<= ARM_WR15_REGNUM
)
4234 return regnum
- ARM_WR0_REGNUM
+ SIM_ARM_IWMMXT_COP0R0_REGNUM
;
4236 if (regnum
>= ARM_WC0_REGNUM
&& regnum
<= ARM_WC7_REGNUM
)
4237 return regnum
- ARM_WC0_REGNUM
+ SIM_ARM_IWMMXT_COP1R0_REGNUM
;
4239 if (regnum
>= ARM_WCGR0_REGNUM
&& regnum
<= ARM_WCGR7_REGNUM
)
4240 return regnum
- ARM_WCGR0_REGNUM
+ SIM_ARM_IWMMXT_COP1R8_REGNUM
;
4242 if (reg
< NUM_GREGS
)
4243 return SIM_ARM_R0_REGNUM
+ reg
;
4246 if (reg
< NUM_FREGS
)
4247 return SIM_ARM_FP0_REGNUM
+ reg
;
4250 if (reg
< NUM_SREGS
)
4251 return SIM_ARM_FPS_REGNUM
+ reg
;
4254 internal_error (__FILE__
, __LINE__
, _("Bad REGNUM %d"), regnum
);
4257 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4258 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4259 It is thought that this is is the floating-point register format on
4260 little-endian systems. */
4263 convert_from_extended (const struct floatformat
*fmt
, const void *ptr
,
4264 void *dbl
, int endianess
)
4268 if (endianess
== BFD_ENDIAN_BIG
)
4269 floatformat_to_doublest (&floatformat_arm_ext_big
, ptr
, &d
);
4271 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword
,
4273 floatformat_from_doublest (fmt
, &d
, dbl
);
4277 convert_to_extended (const struct floatformat
*fmt
, void *dbl
, const void *ptr
,
4282 floatformat_to_doublest (fmt
, ptr
, &d
);
4283 if (endianess
== BFD_ENDIAN_BIG
)
4284 floatformat_from_doublest (&floatformat_arm_ext_big
, &d
, dbl
);
4286 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword
,
4291 condition_true (unsigned long cond
, unsigned long status_reg
)
4293 if (cond
== INST_AL
|| cond
== INST_NV
)
4299 return ((status_reg
& FLAG_Z
) != 0);
4301 return ((status_reg
& FLAG_Z
) == 0);
4303 return ((status_reg
& FLAG_C
) != 0);
4305 return ((status_reg
& FLAG_C
) == 0);
4307 return ((status_reg
& FLAG_N
) != 0);
4309 return ((status_reg
& FLAG_N
) == 0);
4311 return ((status_reg
& FLAG_V
) != 0);
4313 return ((status_reg
& FLAG_V
) == 0);
4315 return ((status_reg
& (FLAG_C
| FLAG_Z
)) == FLAG_C
);
4317 return ((status_reg
& (FLAG_C
| FLAG_Z
)) != FLAG_C
);
4319 return (((status_reg
& FLAG_N
) == 0) == ((status_reg
& FLAG_V
) == 0));
4321 return (((status_reg
& FLAG_N
) == 0) != ((status_reg
& FLAG_V
) == 0));
4323 return (((status_reg
& FLAG_Z
) == 0)
4324 && (((status_reg
& FLAG_N
) == 0)
4325 == ((status_reg
& FLAG_V
) == 0)));
4327 return (((status_reg
& FLAG_Z
) != 0)
4328 || (((status_reg
& FLAG_N
) == 0)
4329 != ((status_reg
& FLAG_V
) == 0)));
4334 static unsigned long
4335 shifted_reg_val (struct frame_info
*frame
, unsigned long inst
, int carry
,
4336 unsigned long pc_val
, unsigned long status_reg
)
4338 unsigned long res
, shift
;
4339 int rm
= bits (inst
, 0, 3);
4340 unsigned long shifttype
= bits (inst
, 5, 6);
4344 int rs
= bits (inst
, 8, 11);
4345 shift
= (rs
== 15 ? pc_val
+ 8
4346 : get_frame_register_unsigned (frame
, rs
)) & 0xFF;
4349 shift
= bits (inst
, 7, 11);
4351 res
= (rm
== ARM_PC_REGNUM
4352 ? (pc_val
+ (bit (inst
, 4) ? 12 : 8))
4353 : get_frame_register_unsigned (frame
, rm
));
4358 res
= shift
>= 32 ? 0 : res
<< shift
;
4362 res
= shift
>= 32 ? 0 : res
>> shift
;
4368 res
= ((res
& 0x80000000L
)
4369 ? ~((~res
) >> shift
) : res
>> shift
);
4372 case 3: /* ROR/RRX */
4375 res
= (res
>> 1) | (carry
? 0x80000000L
: 0);
4377 res
= (res
>> shift
) | (res
<< (32 - shift
));
4381 return res
& 0xffffffff;
4384 /* Return number of 1-bits in VAL. */
4387 bitcount (unsigned long val
)
4390 for (nbits
= 0; val
!= 0; nbits
++)
4391 val
&= val
- 1; /* Delete rightmost 1-bit in val. */
4395 /* Return the size in bytes of the complete Thumb instruction whose
4396 first halfword is INST1. */
4399 thumb_insn_size (unsigned short inst1
)
4401 if ((inst1
& 0xe000) == 0xe000 && (inst1
& 0x1800) != 0)
4408 thumb_advance_itstate (unsigned int itstate
)
4410 /* Preserve IT[7:5], the first three bits of the condition. Shift
4411 the upcoming condition flags left by one bit. */
4412 itstate
= (itstate
& 0xe0) | ((itstate
<< 1) & 0x1f);
4414 /* If we have finished the IT block, clear the state. */
4415 if ((itstate
& 0x0f) == 0)
4421 /* Find the next PC after the current instruction executes. In some
4422 cases we can not statically determine the answer (see the IT state
4423 handling in this function); in that case, a breakpoint may be
4424 inserted in addition to the returned PC, which will be used to set
4425 another breakpoint by our caller. */
4428 thumb_get_next_pc_raw (struct frame_info
*frame
, CORE_ADDR pc
)
4430 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
4431 struct address_space
*aspace
= get_frame_address_space (frame
);
4432 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
4433 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
4434 unsigned long pc_val
= ((unsigned long) pc
) + 4; /* PC after prefetch */
4435 unsigned short inst1
;
4436 CORE_ADDR nextpc
= pc
+ 2; /* Default is next instruction. */
4437 unsigned long offset
;
4438 ULONGEST status
, itstate
;
4440 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4441 pc_val
= MAKE_THUMB_ADDR (pc_val
);
4443 inst1
= read_memory_unsigned_integer (pc
, 2, byte_order_for_code
);
4445 /* Thumb-2 conditional execution support. There are eight bits in
4446 the CPSR which describe conditional execution state. Once
4447 reconstructed (they're in a funny order), the low five bits
4448 describe the low bit of the condition for each instruction and
4449 how many instructions remain. The high three bits describe the
4450 base condition. One of the low four bits will be set if an IT
4451 block is active. These bits read as zero on earlier
4453 status
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
4454 itstate
= ((status
>> 8) & 0xfc) | ((status
>> 25) & 0x3);
4456 /* If-Then handling. On GNU/Linux, where this routine is used, we
4457 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4458 can disable execution of the undefined instruction. So we might
4459 miss the breakpoint if we set it on a skipped conditional
4460 instruction. Because conditional instructions can change the
4461 flags, affecting the execution of further instructions, we may
4462 need to set two breakpoints. */
4464 if (gdbarch_tdep (gdbarch
)->thumb2_breakpoint
!= NULL
)
4466 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4468 /* An IT instruction. Because this instruction does not
4469 modify the flags, we can accurately predict the next
4470 executed instruction. */
4471 itstate
= inst1
& 0x00ff;
4472 pc
+= thumb_insn_size (inst1
);
4474 while (itstate
!= 0 && ! condition_true (itstate
>> 4, status
))
4476 inst1
= read_memory_unsigned_integer (pc
, 2,
4477 byte_order_for_code
);
4478 pc
+= thumb_insn_size (inst1
);
4479 itstate
= thumb_advance_itstate (itstate
);
4482 return MAKE_THUMB_ADDR (pc
);
4484 else if (itstate
!= 0)
4486 /* We are in a conditional block. Check the condition. */
4487 if (! condition_true (itstate
>> 4, status
))
4489 /* Advance to the next executed instruction. */
4490 pc
+= thumb_insn_size (inst1
);
4491 itstate
= thumb_advance_itstate (itstate
);
4493 while (itstate
!= 0 && ! condition_true (itstate
>> 4, status
))
4495 inst1
= read_memory_unsigned_integer (pc
, 2,
4496 byte_order_for_code
);
4497 pc
+= thumb_insn_size (inst1
);
4498 itstate
= thumb_advance_itstate (itstate
);
4501 return MAKE_THUMB_ADDR (pc
);
4503 else if ((itstate
& 0x0f) == 0x08)
4505 /* This is the last instruction of the conditional
4506 block, and it is executed. We can handle it normally
4507 because the following instruction is not conditional,
4508 and we must handle it normally because it is
4509 permitted to branch. Fall through. */
4515 /* There are conditional instructions after this one.
4516 If this instruction modifies the flags, then we can
4517 not predict what the next executed instruction will
4518 be. Fortunately, this instruction is architecturally
4519 forbidden to branch; we know it will fall through.
4520 Start by skipping past it. */
4521 pc
+= thumb_insn_size (inst1
);
4522 itstate
= thumb_advance_itstate (itstate
);
4524 /* Set a breakpoint on the following instruction. */
4525 gdb_assert ((itstate
& 0x0f) != 0);
4526 arm_insert_single_step_breakpoint (gdbarch
, aspace
,
4527 MAKE_THUMB_ADDR (pc
));
4528 cond_negated
= (itstate
>> 4) & 1;
4530 /* Skip all following instructions with the same
4531 condition. If there is a later instruction in the IT
4532 block with the opposite condition, set the other
4533 breakpoint there. If not, then set a breakpoint on
4534 the instruction after the IT block. */
4537 inst1
= read_memory_unsigned_integer (pc
, 2,
4538 byte_order_for_code
);
4539 pc
+= thumb_insn_size (inst1
);
4540 itstate
= thumb_advance_itstate (itstate
);
4542 while (itstate
!= 0 && ((itstate
>> 4) & 1) == cond_negated
);
4544 return MAKE_THUMB_ADDR (pc
);
4548 else if (itstate
& 0x0f)
4550 /* We are in a conditional block. Check the condition. */
4551 int cond
= itstate
>> 4;
4553 if (! condition_true (cond
, status
))
4554 /* Advance to the next instruction. All the 32-bit
4555 instructions share a common prefix. */
4556 return MAKE_THUMB_ADDR (pc
+ thumb_insn_size (inst1
));
4558 /* Otherwise, handle the instruction normally. */
4561 if ((inst1
& 0xff00) == 0xbd00) /* pop {rlist, pc} */
4565 /* Fetch the saved PC from the stack. It's stored above
4566 all of the other registers. */
4567 offset
= bitcount (bits (inst1
, 0, 7)) * INT_REGISTER_SIZE
;
4568 sp
= get_frame_register_unsigned (frame
, ARM_SP_REGNUM
);
4569 nextpc
= read_memory_unsigned_integer (sp
+ offset
, 4, byte_order
);
4571 else if ((inst1
& 0xf000) == 0xd000) /* conditional branch */
4573 unsigned long cond
= bits (inst1
, 8, 11);
4574 if (cond
== 0x0f) /* 0x0f = SWI */
4576 struct gdbarch_tdep
*tdep
;
4577 tdep
= gdbarch_tdep (gdbarch
);
4579 if (tdep
->syscall_next_pc
!= NULL
)
4580 nextpc
= tdep
->syscall_next_pc (frame
);
4583 else if (cond
!= 0x0f && condition_true (cond
, status
))
4584 nextpc
= pc_val
+ (sbits (inst1
, 0, 7) << 1);
4586 else if ((inst1
& 0xf800) == 0xe000) /* unconditional branch */
4588 nextpc
= pc_val
+ (sbits (inst1
, 0, 10) << 1);
4590 else if (thumb_insn_size (inst1
) == 4) /* 32-bit instruction */
4592 unsigned short inst2
;
4593 inst2
= read_memory_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
4595 /* Default to the next instruction. */
4597 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4599 if ((inst1
& 0xf800) == 0xf000 && (inst2
& 0x8000) == 0x8000)
4601 /* Branches and miscellaneous control instructions. */
4603 if ((inst2
& 0x1000) != 0 || (inst2
& 0xd001) == 0xc000)
4606 int j1
, j2
, imm1
, imm2
;
4608 imm1
= sbits (inst1
, 0, 10);
4609 imm2
= bits (inst2
, 0, 10);
4610 j1
= bit (inst2
, 13);
4611 j2
= bit (inst2
, 11);
4613 offset
= ((imm1
<< 12) + (imm2
<< 1));
4614 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
4616 nextpc
= pc_val
+ offset
;
4617 /* For BLX make sure to clear the low bits. */
4618 if (bit (inst2
, 12) == 0)
4619 nextpc
= nextpc
& 0xfffffffc;
4621 else if (inst1
== 0xf3de && (inst2
& 0xff00) == 0x3f00)
4623 /* SUBS PC, LR, #imm8. */
4624 nextpc
= get_frame_register_unsigned (frame
, ARM_LR_REGNUM
);
4625 nextpc
-= inst2
& 0x00ff;
4627 else if ((inst2
& 0xd000) == 0x8000 && (inst1
& 0x0380) != 0x0380)
4629 /* Conditional branch. */
4630 if (condition_true (bits (inst1
, 6, 9), status
))
4632 int sign
, j1
, j2
, imm1
, imm2
;
4634 sign
= sbits (inst1
, 10, 10);
4635 imm1
= bits (inst1
, 0, 5);
4636 imm2
= bits (inst2
, 0, 10);
4637 j1
= bit (inst2
, 13);
4638 j2
= bit (inst2
, 11);
4640 offset
= (sign
<< 20) + (j2
<< 19) + (j1
<< 18);
4641 offset
+= (imm1
<< 12) + (imm2
<< 1);
4643 nextpc
= pc_val
+ offset
;
4647 else if ((inst1
& 0xfe50) == 0xe810)
4649 /* Load multiple or RFE. */
4650 int rn
, offset
, load_pc
= 1;
4652 rn
= bits (inst1
, 0, 3);
4653 if (bit (inst1
, 7) && !bit (inst1
, 8))
4656 if (!bit (inst2
, 15))
4658 offset
= bitcount (inst2
) * 4 - 4;
4660 else if (!bit (inst1
, 7) && bit (inst1
, 8))
4663 if (!bit (inst2
, 15))
4667 else if (bit (inst1
, 7) && bit (inst1
, 8))
4672 else if (!bit (inst1
, 7) && !bit (inst1
, 8))
4682 CORE_ADDR addr
= get_frame_register_unsigned (frame
, rn
);
4683 nextpc
= get_frame_memory_unsigned (frame
, addr
+ offset
, 4);
4686 else if ((inst1
& 0xffef) == 0xea4f && (inst2
& 0xfff0) == 0x0f00)
4688 /* MOV PC or MOVS PC. */
4689 nextpc
= get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4690 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4692 else if ((inst1
& 0xff70) == 0xf850 && (inst2
& 0xf000) == 0xf000)
4696 int rn
, load_pc
= 1;
4698 rn
= bits (inst1
, 0, 3);
4699 base
= get_frame_register_unsigned (frame
, rn
);
4700 if (rn
== ARM_PC_REGNUM
)
4702 base
= (base
+ 4) & ~(CORE_ADDR
) 0x3;
4704 base
+= bits (inst2
, 0, 11);
4706 base
-= bits (inst2
, 0, 11);
4708 else if (bit (inst1
, 7))
4709 base
+= bits (inst2
, 0, 11);
4710 else if (bit (inst2
, 11))
4712 if (bit (inst2
, 10))
4715 base
+= bits (inst2
, 0, 7);
4717 base
-= bits (inst2
, 0, 7);
4720 else if ((inst2
& 0x0fc0) == 0x0000)
4722 int shift
= bits (inst2
, 4, 5), rm
= bits (inst2
, 0, 3);
4723 base
+= get_frame_register_unsigned (frame
, rm
) << shift
;
4730 nextpc
= get_frame_memory_unsigned (frame
, base
, 4);
4732 else if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf000)
4735 CORE_ADDR tbl_reg
, table
, offset
, length
;
4737 tbl_reg
= bits (inst1
, 0, 3);
4738 if (tbl_reg
== 0x0f)
4739 table
= pc
+ 4; /* Regcache copy of PC isn't right yet. */
4741 table
= get_frame_register_unsigned (frame
, tbl_reg
);
4743 offset
= get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4744 length
= 2 * get_frame_memory_unsigned (frame
, table
+ offset
, 1);
4745 nextpc
= pc_val
+ length
;
4747 else if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf010)
4750 CORE_ADDR tbl_reg
, table
, offset
, length
;
4752 tbl_reg
= bits (inst1
, 0, 3);
4753 if (tbl_reg
== 0x0f)
4754 table
= pc
+ 4; /* Regcache copy of PC isn't right yet. */
4756 table
= get_frame_register_unsigned (frame
, tbl_reg
);
4758 offset
= 2 * get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4759 length
= 2 * get_frame_memory_unsigned (frame
, table
+ offset
, 2);
4760 nextpc
= pc_val
+ length
;
4763 else if ((inst1
& 0xff00) == 0x4700) /* bx REG, blx REG */
4765 if (bits (inst1
, 3, 6) == 0x0f)
4766 nextpc
= UNMAKE_THUMB_ADDR (pc_val
);
4768 nextpc
= get_frame_register_unsigned (frame
, bits (inst1
, 3, 6));
4770 else if ((inst1
& 0xff87) == 0x4687) /* mov pc, REG */
4772 if (bits (inst1
, 3, 6) == 0x0f)
4775 nextpc
= get_frame_register_unsigned (frame
, bits (inst1
, 3, 6));
4777 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4779 else if ((inst1
& 0xf500) == 0xb100)
4782 int imm
= (bit (inst1
, 9) << 6) + (bits (inst1
, 3, 7) << 1);
4783 ULONGEST reg
= get_frame_register_unsigned (frame
, bits (inst1
, 0, 2));
4785 if (bit (inst1
, 11) && reg
!= 0)
4786 nextpc
= pc_val
+ imm
;
4787 else if (!bit (inst1
, 11) && reg
== 0)
4788 nextpc
= pc_val
+ imm
;
4793 /* Get the raw next address. PC is the current program counter, in
4794 FRAME, which is assumed to be executing in ARM mode.
4796 The value returned has the execution state of the next instruction
4797 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4798 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4802 arm_get_next_pc_raw (struct frame_info
*frame
, CORE_ADDR pc
)
4804 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
4805 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
4806 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
4807 unsigned long pc_val
;
4808 unsigned long this_instr
;
4809 unsigned long status
;
4812 pc_val
= (unsigned long) pc
;
4813 this_instr
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
4815 status
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
4816 nextpc
= (CORE_ADDR
) (pc_val
+ 4); /* Default case */
4818 if (bits (this_instr
, 28, 31) == INST_NV
)
4819 switch (bits (this_instr
, 24, 27))
4824 /* Branch with Link and change to Thumb. */
4825 nextpc
= BranchDest (pc
, this_instr
);
4826 nextpc
|= bit (this_instr
, 24) << 1;
4827 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4833 /* Coprocessor register transfer. */
4834 if (bits (this_instr
, 12, 15) == 15)
4835 error (_("Invalid update to pc in instruction"));
4838 else if (condition_true (bits (this_instr
, 28, 31), status
))
4840 switch (bits (this_instr
, 24, 27))
4843 case 0x1: /* data processing */
4847 unsigned long operand1
, operand2
, result
= 0;
4851 if (bits (this_instr
, 12, 15) != 15)
4854 if (bits (this_instr
, 22, 25) == 0
4855 && bits (this_instr
, 4, 7) == 9) /* multiply */
4856 error (_("Invalid update to pc in instruction"));
4858 /* BX <reg>, BLX <reg> */
4859 if (bits (this_instr
, 4, 27) == 0x12fff1
4860 || bits (this_instr
, 4, 27) == 0x12fff3)
4862 rn
= bits (this_instr
, 0, 3);
4863 nextpc
= ((rn
== ARM_PC_REGNUM
)
4865 : get_frame_register_unsigned (frame
, rn
));
4870 /* Multiply into PC. */
4871 c
= (status
& FLAG_C
) ? 1 : 0;
4872 rn
= bits (this_instr
, 16, 19);
4873 operand1
= ((rn
== ARM_PC_REGNUM
)
4875 : get_frame_register_unsigned (frame
, rn
));
4877 if (bit (this_instr
, 25))
4879 unsigned long immval
= bits (this_instr
, 0, 7);
4880 unsigned long rotate
= 2 * bits (this_instr
, 8, 11);
4881 operand2
= ((immval
>> rotate
) | (immval
<< (32 - rotate
)))
4884 else /* operand 2 is a shifted register. */
4885 operand2
= shifted_reg_val (frame
, this_instr
, c
,
4888 switch (bits (this_instr
, 21, 24))
4891 result
= operand1
& operand2
;
4895 result
= operand1
^ operand2
;
4899 result
= operand1
- operand2
;
4903 result
= operand2
- operand1
;
4907 result
= operand1
+ operand2
;
4911 result
= operand1
+ operand2
+ c
;
4915 result
= operand1
- operand2
+ c
;
4919 result
= operand2
- operand1
+ c
;
4925 case 0xb: /* tst, teq, cmp, cmn */
4926 result
= (unsigned long) nextpc
;
4930 result
= operand1
| operand2
;
4934 /* Always step into a function. */
4939 result
= operand1
& ~operand2
;
4947 /* In 26-bit APCS the bottom two bits of the result are
4948 ignored, and we always end up in ARM state. */
4950 nextpc
= arm_addr_bits_remove (gdbarch
, result
);
4958 case 0x5: /* data transfer */
4961 if (bit (this_instr
, 20))
4964 if (bits (this_instr
, 12, 15) == 15)
4970 if (bit (this_instr
, 22))
4971 error (_("Invalid update to pc in instruction"));
4973 /* byte write to PC */
4974 rn
= bits (this_instr
, 16, 19);
4975 base
= ((rn
== ARM_PC_REGNUM
)
4977 : get_frame_register_unsigned (frame
, rn
));
4979 if (bit (this_instr
, 24))
4982 int c
= (status
& FLAG_C
) ? 1 : 0;
4983 unsigned long offset
=
4984 (bit (this_instr
, 25)
4985 ? shifted_reg_val (frame
, this_instr
, c
, pc_val
, status
)
4986 : bits (this_instr
, 0, 11));
4988 if (bit (this_instr
, 23))
4994 (CORE_ADDR
) read_memory_unsigned_integer ((CORE_ADDR
) base
,
5001 case 0x9: /* block transfer */
5002 if (bit (this_instr
, 20))
5005 if (bit (this_instr
, 15))
5009 unsigned long rn_val
5010 = get_frame_register_unsigned (frame
,
5011 bits (this_instr
, 16, 19));
5013 if (bit (this_instr
, 23))
5016 unsigned long reglist
= bits (this_instr
, 0, 14);
5017 offset
= bitcount (reglist
) * 4;
5018 if (bit (this_instr
, 24)) /* pre */
5021 else if (bit (this_instr
, 24))
5025 (CORE_ADDR
) read_memory_unsigned_integer ((CORE_ADDR
)
5032 case 0xb: /* branch & link */
5033 case 0xa: /* branch */
5035 nextpc
= BranchDest (pc
, this_instr
);
5041 case 0xe: /* coproc ops */
5045 struct gdbarch_tdep
*tdep
;
5046 tdep
= gdbarch_tdep (gdbarch
);
5048 if (tdep
->syscall_next_pc
!= NULL
)
5049 nextpc
= tdep
->syscall_next_pc (frame
);
5055 fprintf_filtered (gdb_stderr
, _("Bad bit-field extraction\n"));
5063 /* Determine next PC after current instruction executes. Will call either
5064 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5065 loop is detected. */
5068 arm_get_next_pc (struct frame_info
*frame
, CORE_ADDR pc
)
5072 if (arm_frame_is_thumb (frame
))
5073 nextpc
= thumb_get_next_pc_raw (frame
, pc
);
5075 nextpc
= arm_get_next_pc_raw (frame
, pc
);
5080 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5081 of the appropriate mode (as encoded in the PC value), even if this
5082 differs from what would be expected according to the symbol tables. */
5085 arm_insert_single_step_breakpoint (struct gdbarch
*gdbarch
,
5086 struct address_space
*aspace
,
5089 struct cleanup
*old_chain
5090 = make_cleanup_restore_integer (&arm_override_mode
);
5092 arm_override_mode
= IS_THUMB_ADDR (pc
);
5093 pc
= gdbarch_addr_bits_remove (gdbarch
, pc
);
5095 insert_single_step_breakpoint (gdbarch
, aspace
, pc
);
5097 do_cleanups (old_chain
);
5100 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5101 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5102 is found, attempt to step through it. A breakpoint is placed at the end of
5106 thumb_deal_with_atomic_sequence_raw (struct frame_info
*frame
)
5108 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
5109 struct address_space
*aspace
= get_frame_address_space (frame
);
5110 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
5111 CORE_ADDR pc
= get_frame_pc (frame
);
5112 CORE_ADDR breaks
[2] = {-1, -1};
5114 unsigned short insn1
, insn2
;
5117 int last_breakpoint
= 0; /* Defaults to 0 (no breakpoints placed). */
5118 const int atomic_sequence_length
= 16; /* Instruction sequence length. */
5119 ULONGEST status
, itstate
;
5121 /* We currently do not support atomic sequences within an IT block. */
5122 status
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
5123 itstate
= ((status
>> 8) & 0xfc) | ((status
>> 25) & 0x3);
5127 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5128 insn1
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5130 if (thumb_insn_size (insn1
) != 4)
5133 insn2
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5135 if (!((insn1
& 0xfff0) == 0xe850
5136 || ((insn1
& 0xfff0) == 0xe8d0 && (insn2
& 0x00c0) == 0x0040)))
5139 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5141 for (insn_count
= 0; insn_count
< atomic_sequence_length
; ++insn_count
)
5143 insn1
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5146 if (thumb_insn_size (insn1
) != 4)
5148 /* Assume that there is at most one conditional branch in the
5149 atomic sequence. If a conditional branch is found, put a
5150 breakpoint in its destination address. */
5151 if ((insn1
& 0xf000) == 0xd000 && bits (insn1
, 8, 11) != 0x0f)
5153 if (last_breakpoint
> 0)
5154 return 0; /* More than one conditional branch found,
5155 fallback to the standard code. */
5157 breaks
[1] = loc
+ 2 + (sbits (insn1
, 0, 7) << 1);
5161 /* We do not support atomic sequences that use any *other*
5162 instructions but conditional branches to change the PC.
5163 Fall back to standard code to avoid losing control of
5165 else if (thumb_instruction_changes_pc (insn1
))
5170 insn2
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5173 /* Assume that there is at most one conditional branch in the
5174 atomic sequence. If a conditional branch is found, put a
5175 breakpoint in its destination address. */
5176 if ((insn1
& 0xf800) == 0xf000
5177 && (insn2
& 0xd000) == 0x8000
5178 && (insn1
& 0x0380) != 0x0380)
5180 int sign
, j1
, j2
, imm1
, imm2
;
5181 unsigned int offset
;
5183 sign
= sbits (insn1
, 10, 10);
5184 imm1
= bits (insn1
, 0, 5);
5185 imm2
= bits (insn2
, 0, 10);
5186 j1
= bit (insn2
, 13);
5187 j2
= bit (insn2
, 11);
5189 offset
= (sign
<< 20) + (j2
<< 19) + (j1
<< 18);
5190 offset
+= (imm1
<< 12) + (imm2
<< 1);
5192 if (last_breakpoint
> 0)
5193 return 0; /* More than one conditional branch found,
5194 fallback to the standard code. */
5196 breaks
[1] = loc
+ offset
;
5200 /* We do not support atomic sequences that use any *other*
5201 instructions but conditional branches to change the PC.
5202 Fall back to standard code to avoid losing control of
5204 else if (thumb2_instruction_changes_pc (insn1
, insn2
))
5207 /* If we find a strex{,b,h,d}, we're done. */
5208 if ((insn1
& 0xfff0) == 0xe840
5209 || ((insn1
& 0xfff0) == 0xe8c0 && (insn2
& 0x00c0) == 0x0040))
5214 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5215 if (insn_count
== atomic_sequence_length
)
5218 /* Insert a breakpoint right after the end of the atomic sequence. */
5221 /* Check for duplicated breakpoints. Check also for a breakpoint
5222 placed (branch instruction's destination) anywhere in sequence. */
5224 && (breaks
[1] == breaks
[0]
5225 || (breaks
[1] >= pc
&& breaks
[1] < loc
)))
5226 last_breakpoint
= 0;
5228 /* Effectively inserts the breakpoints. */
5229 for (index
= 0; index
<= last_breakpoint
; index
++)
5230 arm_insert_single_step_breakpoint (gdbarch
, aspace
,
5231 MAKE_THUMB_ADDR (breaks
[index
]));
5237 arm_deal_with_atomic_sequence_raw (struct frame_info
*frame
)
5239 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
5240 struct address_space
*aspace
= get_frame_address_space (frame
);
5241 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
5242 CORE_ADDR pc
= get_frame_pc (frame
);
5243 CORE_ADDR breaks
[2] = {-1, -1};
5248 int last_breakpoint
= 0; /* Defaults to 0 (no breakpoints placed). */
5249 const int atomic_sequence_length
= 16; /* Instruction sequence length. */
5251 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5252 Note that we do not currently support conditionally executed atomic
5254 insn
= read_memory_unsigned_integer (loc
, 4, byte_order_for_code
);
5256 if ((insn
& 0xff9000f0) != 0xe1900090)
5259 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5261 for (insn_count
= 0; insn_count
< atomic_sequence_length
; ++insn_count
)
5263 insn
= read_memory_unsigned_integer (loc
, 4, byte_order_for_code
);
5266 /* Assume that there is at most one conditional branch in the atomic
5267 sequence. If a conditional branch is found, put a breakpoint in
5268 its destination address. */
5269 if (bits (insn
, 24, 27) == 0xa)
5271 if (last_breakpoint
> 0)
5272 return 0; /* More than one conditional branch found, fallback
5273 to the standard single-step code. */
5275 breaks
[1] = BranchDest (loc
- 4, insn
);
5279 /* We do not support atomic sequences that use any *other* instructions
5280 but conditional branches to change the PC. Fall back to standard
5281 code to avoid losing control of execution. */
5282 else if (arm_instruction_changes_pc (insn
))
5285 /* If we find a strex{,b,h,d}, we're done. */
5286 if ((insn
& 0xff9000f0) == 0xe1800090)
5290 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5291 if (insn_count
== atomic_sequence_length
)
5294 /* Insert a breakpoint right after the end of the atomic sequence. */
5297 /* Check for duplicated breakpoints. Check also for a breakpoint
5298 placed (branch instruction's destination) anywhere in sequence. */
5300 && (breaks
[1] == breaks
[0]
5301 || (breaks
[1] >= pc
&& breaks
[1] < loc
)))
5302 last_breakpoint
= 0;
5304 /* Effectively inserts the breakpoints. */
5305 for (index
= 0; index
<= last_breakpoint
; index
++)
5306 arm_insert_single_step_breakpoint (gdbarch
, aspace
, breaks
[index
]);
5312 arm_deal_with_atomic_sequence (struct frame_info
*frame
)
5314 if (arm_frame_is_thumb (frame
))
5315 return thumb_deal_with_atomic_sequence_raw (frame
);
5317 return arm_deal_with_atomic_sequence_raw (frame
);
5320 /* single_step() is called just before we want to resume the inferior,
5321 if we want to single-step it but there is no hardware or kernel
5322 single-step support. We find the target of the coming instruction
5323 and breakpoint it. */
5326 arm_software_single_step (struct frame_info
*frame
)
5328 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
5329 struct address_space
*aspace
= get_frame_address_space (frame
);
5332 if (arm_deal_with_atomic_sequence (frame
))
5335 next_pc
= arm_get_next_pc (frame
, get_frame_pc (frame
));
5336 arm_insert_single_step_breakpoint (gdbarch
, aspace
, next_pc
);
5341 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5342 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5343 NULL if an error occurs. BUF is freed. */
5346 extend_buffer_earlier (gdb_byte
*buf
, CORE_ADDR endaddr
,
5347 int old_len
, int new_len
)
5350 int bytes_to_read
= new_len
- old_len
;
5352 new_buf
= xmalloc (new_len
);
5353 memcpy (new_buf
+ bytes_to_read
, buf
, old_len
);
5355 if (target_read_memory (endaddr
- new_len
, new_buf
, bytes_to_read
) != 0)
5363 /* An IT block is at most the 2-byte IT instruction followed by
5364 four 4-byte instructions. The furthest back we must search to
5365 find an IT block that affects the current instruction is thus
5366 2 + 3 * 4 == 14 bytes. */
5367 #define MAX_IT_BLOCK_PREFIX 14
5369 /* Use a quick scan if there are more than this many bytes of
5371 #define IT_SCAN_THRESHOLD 32
5373 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5374 A breakpoint in an IT block may not be hit, depending on the
5377 arm_adjust_breakpoint_address (struct gdbarch
*gdbarch
, CORE_ADDR bpaddr
)
5381 CORE_ADDR boundary
, func_start
;
5383 enum bfd_endian order
= gdbarch_byte_order_for_code (gdbarch
);
5384 int i
, any
, last_it
, last_it_count
;
5386 /* If we are using BKPT breakpoints, none of this is necessary. */
5387 if (gdbarch_tdep (gdbarch
)->thumb2_breakpoint
== NULL
)
5390 /* ARM mode does not have this problem. */
5391 if (!arm_pc_is_thumb (gdbarch
, bpaddr
))
5394 /* We are setting a breakpoint in Thumb code that could potentially
5395 contain an IT block. The first step is to find how much Thumb
5396 code there is; we do not need to read outside of known Thumb
5398 map_type
= arm_find_mapping_symbol (bpaddr
, &boundary
);
5400 /* Thumb-2 code must have mapping symbols to have a chance. */
5403 bpaddr
= gdbarch_addr_bits_remove (gdbarch
, bpaddr
);
5405 if (find_pc_partial_function (bpaddr
, NULL
, &func_start
, NULL
)
5406 && func_start
> boundary
)
5407 boundary
= func_start
;
5409 /* Search for a candidate IT instruction. We have to do some fancy
5410 footwork to distinguish a real IT instruction from the second
5411 half of a 32-bit instruction, but there is no need for that if
5412 there's no candidate. */
5413 buf_len
= min (bpaddr
- boundary
, MAX_IT_BLOCK_PREFIX
);
5415 /* No room for an IT instruction. */
5418 buf
= xmalloc (buf_len
);
5419 if (target_read_memory (bpaddr
- buf_len
, buf
, buf_len
) != 0)
5422 for (i
= 0; i
< buf_len
; i
+= 2)
5424 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
5425 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
5437 /* OK, the code bytes before this instruction contain at least one
5438 halfword which resembles an IT instruction. We know that it's
5439 Thumb code, but there are still two possibilities. Either the
5440 halfword really is an IT instruction, or it is the second half of
5441 a 32-bit Thumb instruction. The only way we can tell is to
5442 scan forwards from a known instruction boundary. */
5443 if (bpaddr
- boundary
> IT_SCAN_THRESHOLD
)
5447 /* There's a lot of code before this instruction. Start with an
5448 optimistic search; it's easy to recognize halfwords that can
5449 not be the start of a 32-bit instruction, and use that to
5450 lock on to the instruction boundaries. */
5451 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, IT_SCAN_THRESHOLD
);
5454 buf_len
= IT_SCAN_THRESHOLD
;
5457 for (i
= 0; i
< buf_len
- sizeof (buf
) && ! definite
; i
+= 2)
5459 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
5460 if (thumb_insn_size (inst1
) == 2)
5467 /* At this point, if DEFINITE, BUF[I] is the first place we
5468 are sure that we know the instruction boundaries, and it is far
5469 enough from BPADDR that we could not miss an IT instruction
5470 affecting BPADDR. If ! DEFINITE, give up - start from a
5474 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
,
5478 buf_len
= bpaddr
- boundary
;
5484 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, bpaddr
- boundary
);
5487 buf_len
= bpaddr
- boundary
;
5491 /* Scan forwards. Find the last IT instruction before BPADDR. */
5496 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
5498 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
5503 else if (inst1
& 0x0002)
5505 else if (inst1
& 0x0004)
5510 i
+= thumb_insn_size (inst1
);
5516 /* There wasn't really an IT instruction after all. */
5519 if (last_it_count
< 1)
5520 /* It was too far away. */
5523 /* This really is a trouble spot. Move the breakpoint to the IT
5525 return bpaddr
- buf_len
+ last_it
;
5528 /* ARM displaced stepping support.
5530 Generally ARM displaced stepping works as follows:
5532 1. When an instruction is to be single-stepped, it is first decoded by
5533 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5534 Depending on the type of instruction, it is then copied to a scratch
5535 location, possibly in a modified form. The copy_* set of functions
5536 performs such modification, as necessary. A breakpoint is placed after
5537 the modified instruction in the scratch space to return control to GDB.
5538 Note in particular that instructions which modify the PC will no longer
5539 do so after modification.
5541 2. The instruction is single-stepped, by setting the PC to the scratch
5542 location address, and resuming. Control returns to GDB when the
5545 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5546 function used for the current instruction. This function's job is to
5547 put the CPU/memory state back to what it would have been if the
5548 instruction had been executed unmodified in its original location. */
5550 /* NOP instruction (mov r0, r0). */
5551 #define ARM_NOP 0xe1a00000
5552 #define THUMB_NOP 0x4600
5554 /* Helper for register reads for displaced stepping. In particular, this
5555 returns the PC as it would be seen by the instruction at its original
5559 displaced_read_reg (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5563 CORE_ADDR from
= dsc
->insn_addr
;
5565 if (regno
== ARM_PC_REGNUM
)
5567 /* Compute pipeline offset:
5568 - When executing an ARM instruction, PC reads as the address of the
5569 current instruction plus 8.
5570 - When executing a Thumb instruction, PC reads as the address of the
5571 current instruction plus 4. */
5578 if (debug_displaced
)
5579 fprintf_unfiltered (gdb_stdlog
, "displaced: read pc value %.8lx\n",
5580 (unsigned long) from
);
5581 return (ULONGEST
) from
;
5585 regcache_cooked_read_unsigned (regs
, regno
, &ret
);
5586 if (debug_displaced
)
5587 fprintf_unfiltered (gdb_stdlog
, "displaced: read r%d value %.8lx\n",
5588 regno
, (unsigned long) ret
);
5594 displaced_in_arm_mode (struct regcache
*regs
)
5597 ULONGEST t_bit
= arm_psr_thumb_bit (get_regcache_arch (regs
));
5599 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
5601 return (ps
& t_bit
) == 0;
5604 /* Write to the PC as from a branch instruction. */
5607 branch_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5611 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5612 architecture versions < 6. */
5613 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
5614 val
& ~(ULONGEST
) 0x3);
5616 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
5617 val
& ~(ULONGEST
) 0x1);
5620 /* Write to the PC as from a branch-exchange instruction. */
5623 bx_write_pc (struct regcache
*regs
, ULONGEST val
)
5626 ULONGEST t_bit
= arm_psr_thumb_bit (get_regcache_arch (regs
));
5628 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
5632 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
| t_bit
);
5633 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffe);
5635 else if ((val
& 2) == 0)
5637 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
5638 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
);
5642 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5643 mode, align dest to 4 bytes). */
5644 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5645 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
5646 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffc);
5650 /* Write to the PC as if from a load instruction. */
5653 load_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5656 if (DISPLACED_STEPPING_ARCH_VERSION
>= 5)
5657 bx_write_pc (regs
, val
);
5659 branch_write_pc (regs
, dsc
, val
);
5662 /* Write to the PC as if from an ALU instruction. */
5665 alu_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5668 if (DISPLACED_STEPPING_ARCH_VERSION
>= 7 && !dsc
->is_thumb
)
5669 bx_write_pc (regs
, val
);
5671 branch_write_pc (regs
, dsc
, val
);
5674 /* Helper for writing to registers for displaced stepping. Writing to the PC
5675 has a varying effects depending on the instruction which does the write:
5676 this is controlled by the WRITE_PC argument. */
5679 displaced_write_reg (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5680 int regno
, ULONGEST val
, enum pc_write_style write_pc
)
5682 if (regno
== ARM_PC_REGNUM
)
5684 if (debug_displaced
)
5685 fprintf_unfiltered (gdb_stdlog
, "displaced: writing pc %.8lx\n",
5686 (unsigned long) val
);
5689 case BRANCH_WRITE_PC
:
5690 branch_write_pc (regs
, dsc
, val
);
5694 bx_write_pc (regs
, val
);
5698 load_write_pc (regs
, dsc
, val
);
5702 alu_write_pc (regs
, dsc
, val
);
5705 case CANNOT_WRITE_PC
:
5706 warning (_("Instruction wrote to PC in an unexpected way when "
5707 "single-stepping"));
5711 internal_error (__FILE__
, __LINE__
,
5712 _("Invalid argument to displaced_write_reg"));
5715 dsc
->wrote_to_pc
= 1;
5719 if (debug_displaced
)
5720 fprintf_unfiltered (gdb_stdlog
, "displaced: writing r%d value %.8lx\n",
5721 regno
, (unsigned long) val
);
5722 regcache_cooked_write_unsigned (regs
, regno
, val
);
5726 /* This function is used to concisely determine if an instruction INSN
5727 references PC. Register fields of interest in INSN should have the
5728 corresponding fields of BITMASK set to 0b1111. The function
5729 returns return 1 if any of these fields in INSN reference the PC
5730 (also 0b1111, r15), else it returns 0. */
5733 insn_references_pc (uint32_t insn
, uint32_t bitmask
)
5735 uint32_t lowbit
= 1;
5737 while (bitmask
!= 0)
5741 for (; lowbit
&& (bitmask
& lowbit
) == 0; lowbit
<<= 1)
5747 mask
= lowbit
* 0xf;
5749 if ((insn
& mask
) == mask
)
5758 /* The simplest copy function. Many instructions have the same effect no
5759 matter what address they are executed at: in those cases, use this. */
5762 arm_copy_unmodified (struct gdbarch
*gdbarch
, uint32_t insn
,
5763 const char *iname
, struct displaced_step_closure
*dsc
)
5765 if (debug_displaced
)
5766 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx, "
5767 "opcode/class '%s' unmodified\n", (unsigned long) insn
,
5770 dsc
->modinsn
[0] = insn
;
5776 thumb_copy_unmodified_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
5777 uint16_t insn2
, const char *iname
,
5778 struct displaced_step_closure
*dsc
)
5780 if (debug_displaced
)
5781 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x %.4x, "
5782 "opcode/class '%s' unmodified\n", insn1
, insn2
,
5785 dsc
->modinsn
[0] = insn1
;
5786 dsc
->modinsn
[1] = insn2
;
5792 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5795 thumb_copy_unmodified_16bit (struct gdbarch
*gdbarch
, unsigned int insn
,
5797 struct displaced_step_closure
*dsc
)
5799 if (debug_displaced
)
5800 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x, "
5801 "opcode/class '%s' unmodified\n", insn
,
5804 dsc
->modinsn
[0] = insn
;
5809 /* Preload instructions with immediate offset. */
5812 cleanup_preload (struct gdbarch
*gdbarch
,
5813 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5815 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5816 if (!dsc
->u
.preload
.immed
)
5817 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5821 install_preload (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5822 struct displaced_step_closure
*dsc
, unsigned int rn
)
5825 /* Preload instructions:
5827 {pli/pld} [rn, #+/-imm]
5829 {pli/pld} [r0, #+/-imm]. */
5831 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5832 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5833 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
5834 dsc
->u
.preload
.immed
= 1;
5836 dsc
->cleanup
= &cleanup_preload
;
5840 arm_copy_preload (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5841 struct displaced_step_closure
*dsc
)
5843 unsigned int rn
= bits (insn
, 16, 19);
5845 if (!insn_references_pc (insn
, 0x000f0000ul
))
5846 return arm_copy_unmodified (gdbarch
, insn
, "preload", dsc
);
5848 if (debug_displaced
)
5849 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
5850 (unsigned long) insn
);
5852 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
5854 install_preload (gdbarch
, regs
, dsc
, rn
);
5860 thumb2_copy_preload (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
5861 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5863 unsigned int rn
= bits (insn1
, 0, 3);
5864 unsigned int u_bit
= bit (insn1
, 7);
5865 int imm12
= bits (insn2
, 0, 11);
5868 if (rn
!= ARM_PC_REGNUM
)
5869 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "preload", dsc
);
5871 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5872 PLD (literal) Encoding T1. */
5873 if (debug_displaced
)
5874 fprintf_unfiltered (gdb_stdlog
,
5875 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5876 (unsigned int) dsc
->insn_addr
, u_bit
? '+' : '-',
5882 /* Rewrite instruction {pli/pld} PC imm12 into:
5883 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5887 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5889 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5890 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5892 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
5894 displaced_write_reg (regs
, dsc
, 0, pc_val
, CANNOT_WRITE_PC
);
5895 displaced_write_reg (regs
, dsc
, 1, imm12
, CANNOT_WRITE_PC
);
5896 dsc
->u
.preload
.immed
= 0;
5898 /* {pli/pld} [r0, r1] */
5899 dsc
->modinsn
[0] = insn1
& 0xfff0;
5900 dsc
->modinsn
[1] = 0xf001;
5903 dsc
->cleanup
= &cleanup_preload
;
5907 /* Preload instructions with register offset. */
5910 install_preload_reg(struct gdbarch
*gdbarch
, struct regcache
*regs
,
5911 struct displaced_step_closure
*dsc
, unsigned int rn
,
5914 ULONGEST rn_val
, rm_val
;
5916 /* Preload register-offset instructions:
5918 {pli/pld} [rn, rm {, shift}]
5920 {pli/pld} [r0, r1 {, shift}]. */
5922 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5923 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5924 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5925 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5926 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
5927 displaced_write_reg (regs
, dsc
, 1, rm_val
, CANNOT_WRITE_PC
);
5928 dsc
->u
.preload
.immed
= 0;
5930 dsc
->cleanup
= &cleanup_preload
;
5934 arm_copy_preload_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5935 struct regcache
*regs
,
5936 struct displaced_step_closure
*dsc
)
5938 unsigned int rn
= bits (insn
, 16, 19);
5939 unsigned int rm
= bits (insn
, 0, 3);
5942 if (!insn_references_pc (insn
, 0x000f000ful
))
5943 return arm_copy_unmodified (gdbarch
, insn
, "preload reg", dsc
);
5945 if (debug_displaced
)
5946 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
5947 (unsigned long) insn
);
5949 dsc
->modinsn
[0] = (insn
& 0xfff0fff0) | 0x1;
5951 install_preload_reg (gdbarch
, regs
, dsc
, rn
, rm
);
5955 /* Copy/cleanup coprocessor load and store instructions. */
5958 cleanup_copro_load_store (struct gdbarch
*gdbarch
,
5959 struct regcache
*regs
,
5960 struct displaced_step_closure
*dsc
)
5962 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 0);
5964 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5966 if (dsc
->u
.ldst
.writeback
)
5967 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, LOAD_WRITE_PC
);
5971 install_copro_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5972 struct displaced_step_closure
*dsc
,
5973 int writeback
, unsigned int rn
)
5977 /* Coprocessor load/store instructions:
5979 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5981 {stc/stc2} [r0, #+/-imm].
5983 ldc/ldc2 are handled identically. */
5985 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5986 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5987 /* PC should be 4-byte aligned. */
5988 rn_val
= rn_val
& 0xfffffffc;
5989 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
5991 dsc
->u
.ldst
.writeback
= writeback
;
5992 dsc
->u
.ldst
.rn
= rn
;
5994 dsc
->cleanup
= &cleanup_copro_load_store
;
5998 arm_copy_copro_load_store (struct gdbarch
*gdbarch
, uint32_t insn
,
5999 struct regcache
*regs
,
6000 struct displaced_step_closure
*dsc
)
6002 unsigned int rn
= bits (insn
, 16, 19);
6004 if (!insn_references_pc (insn
, 0x000f0000ul
))
6005 return arm_copy_unmodified (gdbarch
, insn
, "copro load/store", dsc
);
6007 if (debug_displaced
)
6008 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
6009 "load/store insn %.8lx\n", (unsigned long) insn
);
6011 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
6013 install_copro_load_store (gdbarch
, regs
, dsc
, bit (insn
, 25), rn
);
6019 thumb2_copy_copro_load_store (struct gdbarch
*gdbarch
, uint16_t insn1
,
6020 uint16_t insn2
, struct regcache
*regs
,
6021 struct displaced_step_closure
*dsc
)
6023 unsigned int rn
= bits (insn1
, 0, 3);
6025 if (rn
!= ARM_PC_REGNUM
)
6026 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6027 "copro load/store", dsc
);
6029 if (debug_displaced
)
6030 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
6031 "load/store insn %.4x%.4x\n", insn1
, insn2
);
6033 dsc
->modinsn
[0] = insn1
& 0xfff0;
6034 dsc
->modinsn
[1] = insn2
;
6037 /* This function is called for copying instruction LDC/LDC2/VLDR, which
6038 doesn't support writeback, so pass 0. */
6039 install_copro_load_store (gdbarch
, regs
, dsc
, 0, rn
);
6044 /* Clean up branch instructions (actually perform the branch, by setting
6048 cleanup_branch (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6049 struct displaced_step_closure
*dsc
)
6051 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
6052 int branch_taken
= condition_true (dsc
->u
.branch
.cond
, status
);
6053 enum pc_write_style write_pc
= dsc
->u
.branch
.exchange
6054 ? BX_WRITE_PC
: BRANCH_WRITE_PC
;
6059 if (dsc
->u
.branch
.link
)
6061 /* The value of LR should be the next insn of current one. In order
6062 not to confuse logic hanlding later insn `bx lr', if current insn mode
6063 is Thumb, the bit 0 of LR value should be set to 1. */
6064 ULONGEST next_insn_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
6067 next_insn_addr
|= 0x1;
6069 displaced_write_reg (regs
, dsc
, ARM_LR_REGNUM
, next_insn_addr
,
6073 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, dsc
->u
.branch
.dest
, write_pc
);
6076 /* Copy B/BL/BLX instructions with immediate destinations. */
6079 install_b_bl_blx (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6080 struct displaced_step_closure
*dsc
,
6081 unsigned int cond
, int exchange
, int link
, long offset
)
6083 /* Implement "BL<cond> <label>" as:
6085 Preparation: cond <- instruction condition
6086 Insn: mov r0, r0 (nop)
6087 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6089 B<cond> similar, but don't set r14 in cleanup. */
6091 dsc
->u
.branch
.cond
= cond
;
6092 dsc
->u
.branch
.link
= link
;
6093 dsc
->u
.branch
.exchange
= exchange
;
6095 dsc
->u
.branch
.dest
= dsc
->insn_addr
;
6096 if (link
&& exchange
)
6097 /* For BLX, offset is computed from the Align (PC, 4). */
6098 dsc
->u
.branch
.dest
= dsc
->u
.branch
.dest
& 0xfffffffc;
6101 dsc
->u
.branch
.dest
+= 4 + offset
;
6103 dsc
->u
.branch
.dest
+= 8 + offset
;
6105 dsc
->cleanup
= &cleanup_branch
;
6108 arm_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint32_t insn
,
6109 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6111 unsigned int cond
= bits (insn
, 28, 31);
6112 int exchange
= (cond
== 0xf);
6113 int link
= exchange
|| bit (insn
, 24);
6116 if (debug_displaced
)
6117 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s immediate insn "
6118 "%.8lx\n", (exchange
) ? "blx" : (link
) ? "bl" : "b",
6119 (unsigned long) insn
);
6121 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6122 then arrange the switch into Thumb mode. */
6123 offset
= (bits (insn
, 0, 23) << 2) | (bit (insn
, 24) << 1) | 1;
6125 offset
= bits (insn
, 0, 23) << 2;
6127 if (bit (offset
, 25))
6128 offset
= offset
| ~0x3ffffff;
6130 dsc
->modinsn
[0] = ARM_NOP
;
6132 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
6137 thumb2_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint16_t insn1
,
6138 uint16_t insn2
, struct regcache
*regs
,
6139 struct displaced_step_closure
*dsc
)
6141 int link
= bit (insn2
, 14);
6142 int exchange
= link
&& !bit (insn2
, 12);
6145 int j1
= bit (insn2
, 13);
6146 int j2
= bit (insn2
, 11);
6147 int s
= sbits (insn1
, 10, 10);
6148 int i1
= !(j1
^ bit (insn1
, 10));
6149 int i2
= !(j2
^ bit (insn1
, 10));
6151 if (!link
&& !exchange
) /* B */
6153 offset
= (bits (insn2
, 0, 10) << 1);
6154 if (bit (insn2
, 12)) /* Encoding T4 */
6156 offset
|= (bits (insn1
, 0, 9) << 12)
6162 else /* Encoding T3 */
6164 offset
|= (bits (insn1
, 0, 5) << 12)
6168 cond
= bits (insn1
, 6, 9);
6173 offset
= (bits (insn1
, 0, 9) << 12);
6174 offset
|= ((i2
<< 22) | (i1
<< 23) | (s
<< 24));
6175 offset
|= exchange
?
6176 (bits (insn2
, 1, 10) << 2) : (bits (insn2
, 0, 10) << 1);
6179 if (debug_displaced
)
6180 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s insn "
6181 "%.4x %.4x with offset %.8lx\n",
6182 link
? (exchange
) ? "blx" : "bl" : "b",
6183 insn1
, insn2
, offset
);
6185 dsc
->modinsn
[0] = THUMB_NOP
;
6187 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
6191 /* Copy B Thumb instructions. */
6193 thumb_copy_b (struct gdbarch
*gdbarch
, unsigned short insn
,
6194 struct displaced_step_closure
*dsc
)
6196 unsigned int cond
= 0;
6198 unsigned short bit_12_15
= bits (insn
, 12, 15);
6199 CORE_ADDR from
= dsc
->insn_addr
;
6201 if (bit_12_15
== 0xd)
6203 /* offset = SignExtend (imm8:0, 32) */
6204 offset
= sbits ((insn
<< 1), 0, 8);
6205 cond
= bits (insn
, 8, 11);
6207 else if (bit_12_15
== 0xe) /* Encoding T2 */
6209 offset
= sbits ((insn
<< 1), 0, 11);
6213 if (debug_displaced
)
6214 fprintf_unfiltered (gdb_stdlog
,
6215 "displaced: copying b immediate insn %.4x "
6216 "with offset %d\n", insn
, offset
);
6218 dsc
->u
.branch
.cond
= cond
;
6219 dsc
->u
.branch
.link
= 0;
6220 dsc
->u
.branch
.exchange
= 0;
6221 dsc
->u
.branch
.dest
= from
+ 4 + offset
;
6223 dsc
->modinsn
[0] = THUMB_NOP
;
6225 dsc
->cleanup
= &cleanup_branch
;
6230 /* Copy BX/BLX with register-specified destinations. */
6233 install_bx_blx_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6234 struct displaced_step_closure
*dsc
, int link
,
6235 unsigned int cond
, unsigned int rm
)
6237 /* Implement {BX,BLX}<cond> <reg>" as:
6239 Preparation: cond <- instruction condition
6240 Insn: mov r0, r0 (nop)
6241 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6243 Don't set r14 in cleanup for BX. */
6245 dsc
->u
.branch
.dest
= displaced_read_reg (regs
, dsc
, rm
);
6247 dsc
->u
.branch
.cond
= cond
;
6248 dsc
->u
.branch
.link
= link
;
6250 dsc
->u
.branch
.exchange
= 1;
6252 dsc
->cleanup
= &cleanup_branch
;
6256 arm_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
6257 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6259 unsigned int cond
= bits (insn
, 28, 31);
6262 int link
= bit (insn
, 5);
6263 unsigned int rm
= bits (insn
, 0, 3);
6265 if (debug_displaced
)
6266 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx",
6267 (unsigned long) insn
);
6269 dsc
->modinsn
[0] = ARM_NOP
;
6271 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, cond
, rm
);
6276 thumb_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
6277 struct regcache
*regs
,
6278 struct displaced_step_closure
*dsc
)
6280 int link
= bit (insn
, 7);
6281 unsigned int rm
= bits (insn
, 3, 6);
6283 if (debug_displaced
)
6284 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x",
6285 (unsigned short) insn
);
6287 dsc
->modinsn
[0] = THUMB_NOP
;
6289 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, INST_AL
, rm
);
6295 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6298 cleanup_alu_imm (struct gdbarch
*gdbarch
,
6299 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6301 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
6302 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
6303 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
6304 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
6308 arm_copy_alu_imm (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
6309 struct displaced_step_closure
*dsc
)
6311 unsigned int rn
= bits (insn
, 16, 19);
6312 unsigned int rd
= bits (insn
, 12, 15);
6313 unsigned int op
= bits (insn
, 21, 24);
6314 int is_mov
= (op
== 0xd);
6315 ULONGEST rd_val
, rn_val
;
6317 if (!insn_references_pc (insn
, 0x000ff000ul
))
6318 return arm_copy_unmodified (gdbarch
, insn
, "ALU immediate", dsc
);
6320 if (debug_displaced
)
6321 fprintf_unfiltered (gdb_stdlog
, "displaced: copying immediate %s insn "
6322 "%.8lx\n", is_mov
? "move" : "ALU",
6323 (unsigned long) insn
);
6325 /* Instruction is of form:
6327 <op><cond> rd, [rn,] #imm
6331 Preparation: tmp1, tmp2 <- r0, r1;
6333 Insn: <op><cond> r0, r1, #imm
6334 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6337 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6338 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6339 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6340 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6341 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6342 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6346 dsc
->modinsn
[0] = insn
& 0xfff00fff;
6348 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x10000;
6350 dsc
->cleanup
= &cleanup_alu_imm
;
6356 thumb2_copy_alu_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
6357 uint16_t insn2
, struct regcache
*regs
,
6358 struct displaced_step_closure
*dsc
)
6360 unsigned int op
= bits (insn1
, 5, 8);
6361 unsigned int rn
, rm
, rd
;
6362 ULONGEST rd_val
, rn_val
;
6364 rn
= bits (insn1
, 0, 3); /* Rn */
6365 rm
= bits (insn2
, 0, 3); /* Rm */
6366 rd
= bits (insn2
, 8, 11); /* Rd */
6368 /* This routine is only called for instruction MOV. */
6369 gdb_assert (op
== 0x2 && rn
== 0xf);
6371 if (rm
!= ARM_PC_REGNUM
&& rd
!= ARM_PC_REGNUM
)
6372 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ALU imm", dsc
);
6374 if (debug_displaced
)
6375 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.4x%.4x\n",
6376 "ALU", insn1
, insn2
);
6378 /* Instruction is of form:
6380 <op><cond> rd, [rn,] #imm
6384 Preparation: tmp1, tmp2 <- r0, r1;
6386 Insn: <op><cond> r0, r1, #imm
6387 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6390 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6391 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6392 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6393 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6394 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6395 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6398 dsc
->modinsn
[0] = insn1
;
6399 dsc
->modinsn
[1] = ((insn2
& 0xf0f0) | 0x1);
6402 dsc
->cleanup
= &cleanup_alu_imm
;
6407 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6410 cleanup_alu_reg (struct gdbarch
*gdbarch
,
6411 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6416 rd_val
= displaced_read_reg (regs
, dsc
, 0);
6418 for (i
= 0; i
< 3; i
++)
6419 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
6421 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
6425 install_alu_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6426 struct displaced_step_closure
*dsc
,
6427 unsigned int rd
, unsigned int rn
, unsigned int rm
)
6429 ULONGEST rd_val
, rn_val
, rm_val
;
6431 /* Instruction is of form:
6433 <op><cond> rd, [rn,] rm [, <shift>]
6437 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6438 r0, r1, r2 <- rd, rn, rm
6439 Insn: <op><cond> r0, r1, r2 [, <shift>]
6440 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6443 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6444 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6445 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6446 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6447 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6448 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6449 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6450 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6451 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
6454 dsc
->cleanup
= &cleanup_alu_reg
;
6458 arm_copy_alu_reg (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
6459 struct displaced_step_closure
*dsc
)
6461 unsigned int op
= bits (insn
, 21, 24);
6462 int is_mov
= (op
== 0xd);
6464 if (!insn_references_pc (insn
, 0x000ff00ful
))
6465 return arm_copy_unmodified (gdbarch
, insn
, "ALU reg", dsc
);
6467 if (debug_displaced
)
6468 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.8lx\n",
6469 is_mov
? "move" : "ALU", (unsigned long) insn
);
6472 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x2;
6474 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x10002;
6476 install_alu_reg (gdbarch
, regs
, dsc
, bits (insn
, 12, 15), bits (insn
, 16, 19),
6482 thumb_copy_alu_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
6483 struct regcache
*regs
,
6484 struct displaced_step_closure
*dsc
)
6486 unsigned rn
, rm
, rd
;
6488 rd
= bits (insn
, 3, 6);
6489 rn
= (bit (insn
, 7) << 3) | bits (insn
, 0, 2);
6492 if (rd
!= ARM_PC_REGNUM
&& rn
!= ARM_PC_REGNUM
)
6493 return thumb_copy_unmodified_16bit (gdbarch
, insn
, "ALU reg", dsc
);
6495 if (debug_displaced
)
6496 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.4x\n",
6497 "ALU", (unsigned short) insn
);
6499 dsc
->modinsn
[0] = ((insn
& 0xff00) | 0x08);
6501 install_alu_reg (gdbarch
, regs
, dsc
, rd
, rn
, rm
);
6506 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6509 cleanup_alu_shifted_reg (struct gdbarch
*gdbarch
,
6510 struct regcache
*regs
,
6511 struct displaced_step_closure
*dsc
)
6513 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
6516 for (i
= 0; i
< 4; i
++)
6517 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
6519 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
6523 install_alu_shifted_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6524 struct displaced_step_closure
*dsc
,
6525 unsigned int rd
, unsigned int rn
, unsigned int rm
,
6529 ULONGEST rd_val
, rn_val
, rm_val
, rs_val
;
6531 /* Instruction is of form:
6533 <op><cond> rd, [rn,] rm, <shift> rs
6537 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6538 r0, r1, r2, r3 <- rd, rn, rm, rs
6539 Insn: <op><cond> r0, r1, r2, <shift> r3
6541 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6545 for (i
= 0; i
< 4; i
++)
6546 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6548 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6549 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6550 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6551 rs_val
= displaced_read_reg (regs
, dsc
, rs
);
6552 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6553 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6554 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
6555 displaced_write_reg (regs
, dsc
, 3, rs_val
, CANNOT_WRITE_PC
);
6557 dsc
->cleanup
= &cleanup_alu_shifted_reg
;
6561 arm_copy_alu_shifted_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
6562 struct regcache
*regs
,
6563 struct displaced_step_closure
*dsc
)
6565 unsigned int op
= bits (insn
, 21, 24);
6566 int is_mov
= (op
== 0xd);
6567 unsigned int rd
, rn
, rm
, rs
;
6569 if (!insn_references_pc (insn
, 0x000fff0ful
))
6570 return arm_copy_unmodified (gdbarch
, insn
, "ALU shifted reg", dsc
);
6572 if (debug_displaced
)
6573 fprintf_unfiltered (gdb_stdlog
, "displaced: copying shifted reg %s insn "
6574 "%.8lx\n", is_mov
? "move" : "ALU",
6575 (unsigned long) insn
);
6577 rn
= bits (insn
, 16, 19);
6578 rm
= bits (insn
, 0, 3);
6579 rs
= bits (insn
, 8, 11);
6580 rd
= bits (insn
, 12, 15);
6583 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x302;
6585 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x10302;
6587 install_alu_shifted_reg (gdbarch
, regs
, dsc
, rd
, rn
, rm
, rs
);
6592 /* Clean up load instructions. */
6595 cleanup_load (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6596 struct displaced_step_closure
*dsc
)
6598 ULONGEST rt_val
, rt_val2
= 0, rn_val
;
6600 rt_val
= displaced_read_reg (regs
, dsc
, 0);
6601 if (dsc
->u
.ldst
.xfersize
== 8)
6602 rt_val2
= displaced_read_reg (regs
, dsc
, 1);
6603 rn_val
= displaced_read_reg (regs
, dsc
, 2);
6605 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
6606 if (dsc
->u
.ldst
.xfersize
> 4)
6607 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
6608 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
6609 if (!dsc
->u
.ldst
.immed
)
6610 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
6612 /* Handle register writeback. */
6613 if (dsc
->u
.ldst
.writeback
)
6614 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
6615 /* Put result in right place. */
6616 displaced_write_reg (regs
, dsc
, dsc
->rd
, rt_val
, LOAD_WRITE_PC
);
6617 if (dsc
->u
.ldst
.xfersize
== 8)
6618 displaced_write_reg (regs
, dsc
, dsc
->rd
+ 1, rt_val2
, LOAD_WRITE_PC
);
6621 /* Clean up store instructions. */
6624 cleanup_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6625 struct displaced_step_closure
*dsc
)
6627 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 2);
6629 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
6630 if (dsc
->u
.ldst
.xfersize
> 4)
6631 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
6632 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
6633 if (!dsc
->u
.ldst
.immed
)
6634 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
6635 if (!dsc
->u
.ldst
.restore_r4
)
6636 displaced_write_reg (regs
, dsc
, 4, dsc
->tmp
[4], CANNOT_WRITE_PC
);
6639 if (dsc
->u
.ldst
.writeback
)
6640 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
6643 /* Copy "extra" load/store instructions. These are halfword/doubleword
6644 transfers, which have a different encoding to byte/word transfers. */
6647 arm_copy_extra_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
, int unpriveleged
,
6648 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6650 unsigned int op1
= bits (insn
, 20, 24);
6651 unsigned int op2
= bits (insn
, 5, 6);
6652 unsigned int rt
= bits (insn
, 12, 15);
6653 unsigned int rn
= bits (insn
, 16, 19);
6654 unsigned int rm
= bits (insn
, 0, 3);
6655 char load
[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6656 char bytesize
[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6657 int immed
= (op1
& 0x4) != 0;
6659 ULONGEST rt_val
, rt_val2
= 0, rn_val
, rm_val
= 0;
6661 if (!insn_references_pc (insn
, 0x000ff00ful
))
6662 return arm_copy_unmodified (gdbarch
, insn
, "extra load/store", dsc
);
6664 if (debug_displaced
)
6665 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %sextra load/store "
6666 "insn %.8lx\n", unpriveleged
? "unpriveleged " : "",
6667 (unsigned long) insn
);
6669 opcode
= ((op2
<< 2) | (op1
& 0x1) | ((op1
& 0x4) >> 1)) - 4;
6672 internal_error (__FILE__
, __LINE__
,
6673 _("copy_extra_ld_st: instruction decode error"));
6675 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6676 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6677 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6679 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6681 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
6682 if (bytesize
[opcode
] == 8)
6683 rt_val2
= displaced_read_reg (regs
, dsc
, rt
+ 1);
6684 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6686 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6688 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
6689 if (bytesize
[opcode
] == 8)
6690 displaced_write_reg (regs
, dsc
, 1, rt_val2
, CANNOT_WRITE_PC
);
6691 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
6693 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
6696 dsc
->u
.ldst
.xfersize
= bytesize
[opcode
];
6697 dsc
->u
.ldst
.rn
= rn
;
6698 dsc
->u
.ldst
.immed
= immed
;
6699 dsc
->u
.ldst
.writeback
= bit (insn
, 24) == 0 || bit (insn
, 21) != 0;
6700 dsc
->u
.ldst
.restore_r4
= 0;
6703 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6705 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6706 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
6708 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6710 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6711 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
6713 dsc
->cleanup
= load
[opcode
] ? &cleanup_load
: &cleanup_store
;
6718 /* Copy byte/half word/word loads and stores. */
6721 install_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6722 struct displaced_step_closure
*dsc
, int load
,
6723 int immed
, int writeback
, int size
, int usermode
,
6724 int rt
, int rm
, int rn
)
6726 ULONGEST rt_val
, rn_val
, rm_val
= 0;
6728 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6729 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6731 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6733 dsc
->tmp
[4] = displaced_read_reg (regs
, dsc
, 4);
6735 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
6736 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6738 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6740 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
6741 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
6743 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
6745 dsc
->u
.ldst
.xfersize
= size
;
6746 dsc
->u
.ldst
.rn
= rn
;
6747 dsc
->u
.ldst
.immed
= immed
;
6748 dsc
->u
.ldst
.writeback
= writeback
;
6750 /* To write PC we can do:
6752 Before this sequence of instructions:
6753 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6754 r2 is the Rn value got from dispalced_read_reg.
6756 Insn1: push {pc} Write address of STR instruction + offset on stack
6757 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6758 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6759 = addr(Insn1) + offset - addr(Insn3) - 8
6761 Insn4: add r4, r4, #8 r4 = offset - 8
6762 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6764 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6766 Otherwise we don't know what value to write for PC, since the offset is
6767 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6768 of this can be found in Section "Saving from r15" in
6769 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6771 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
6776 thumb2_copy_load_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
6777 uint16_t insn2
, struct regcache
*regs
,
6778 struct displaced_step_closure
*dsc
, int size
)
6780 unsigned int u_bit
= bit (insn1
, 7);
6781 unsigned int rt
= bits (insn2
, 12, 15);
6782 int imm12
= bits (insn2
, 0, 11);
6785 if (debug_displaced
)
6786 fprintf_unfiltered (gdb_stdlog
,
6787 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6788 (unsigned int) dsc
->insn_addr
, rt
, u_bit
? '+' : '-',
6794 /* Rewrite instruction LDR Rt imm12 into:
6796 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6800 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6803 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6804 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6805 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6807 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
6809 pc_val
= pc_val
& 0xfffffffc;
6811 displaced_write_reg (regs
, dsc
, 2, pc_val
, CANNOT_WRITE_PC
);
6812 displaced_write_reg (regs
, dsc
, 3, imm12
, CANNOT_WRITE_PC
);
6816 dsc
->u
.ldst
.xfersize
= size
;
6817 dsc
->u
.ldst
.immed
= 0;
6818 dsc
->u
.ldst
.writeback
= 0;
6819 dsc
->u
.ldst
.restore_r4
= 0;
6821 /* LDR R0, R2, R3 */
6822 dsc
->modinsn
[0] = 0xf852;
6823 dsc
->modinsn
[1] = 0x3;
6826 dsc
->cleanup
= &cleanup_load
;
6832 thumb2_copy_load_reg_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
6833 uint16_t insn2
, struct regcache
*regs
,
6834 struct displaced_step_closure
*dsc
,
6835 int writeback
, int immed
)
6837 unsigned int rt
= bits (insn2
, 12, 15);
6838 unsigned int rn
= bits (insn1
, 0, 3);
6839 unsigned int rm
= bits (insn2
, 0, 3); /* Only valid if !immed. */
6840 /* In LDR (register), there is also a register Rm, which is not allowed to
6841 be PC, so we don't have to check it. */
6843 if (rt
!= ARM_PC_REGNUM
&& rn
!= ARM_PC_REGNUM
)
6844 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "load",
6847 if (debug_displaced
)
6848 fprintf_unfiltered (gdb_stdlog
,
6849 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6850 rt
, rn
, insn1
, insn2
);
6852 install_load_store (gdbarch
, regs
, dsc
, 1, immed
, writeback
, 4,
6855 dsc
->u
.ldst
.restore_r4
= 0;
6858 /* ldr[b]<cond> rt, [rn, #imm], etc.
6860 ldr[b]<cond> r0, [r2, #imm]. */
6862 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
6863 dsc
->modinsn
[1] = insn2
& 0x0fff;
6866 /* ldr[b]<cond> rt, [rn, rm], etc.
6868 ldr[b]<cond> r0, [r2, r3]. */
6870 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
6871 dsc
->modinsn
[1] = (insn2
& 0x0ff0) | 0x3;
6881 arm_copy_ldr_str_ldrb_strb (struct gdbarch
*gdbarch
, uint32_t insn
,
6882 struct regcache
*regs
,
6883 struct displaced_step_closure
*dsc
,
6884 int load
, int size
, int usermode
)
6886 int immed
= !bit (insn
, 25);
6887 int writeback
= (bit (insn
, 24) == 0 || bit (insn
, 21) != 0);
6888 unsigned int rt
= bits (insn
, 12, 15);
6889 unsigned int rn
= bits (insn
, 16, 19);
6890 unsigned int rm
= bits (insn
, 0, 3); /* Only valid if !immed. */
6892 if (!insn_references_pc (insn
, 0x000ff00ful
))
6893 return arm_copy_unmodified (gdbarch
, insn
, "load/store", dsc
);
6895 if (debug_displaced
)
6896 fprintf_unfiltered (gdb_stdlog
,
6897 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6898 load
? (size
== 1 ? "ldrb" : "ldr")
6899 : (size
== 1 ? "strb" : "str"), usermode
? "t" : "",
6901 (unsigned long) insn
);
6903 install_load_store (gdbarch
, regs
, dsc
, load
, immed
, writeback
, size
,
6904 usermode
, rt
, rm
, rn
);
6906 if (load
|| rt
!= ARM_PC_REGNUM
)
6908 dsc
->u
.ldst
.restore_r4
= 0;
6911 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6913 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6914 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
6916 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6918 {ldr,str}[b]<cond> r0, [r2, r3]. */
6919 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
6923 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6924 dsc
->u
.ldst
.restore_r4
= 1;
6925 dsc
->modinsn
[0] = 0xe92d8000; /* push {pc} */
6926 dsc
->modinsn
[1] = 0xe8bd0010; /* pop {r4} */
6927 dsc
->modinsn
[2] = 0xe044400f; /* sub r4, r4, pc. */
6928 dsc
->modinsn
[3] = 0xe2844008; /* add r4, r4, #8. */
6929 dsc
->modinsn
[4] = 0xe0800004; /* add r0, r0, r4. */
6933 dsc
->modinsn
[5] = (insn
& 0xfff00fff) | 0x20000;
6935 dsc
->modinsn
[5] = (insn
& 0xfff00ff0) | 0x20003;
6940 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
6945 /* Cleanup LDM instructions with fully-populated register list. This is an
6946 unfortunate corner case: it's impossible to implement correctly by modifying
6947 the instruction. The issue is as follows: we have an instruction,
6951 which we must rewrite to avoid loading PC. A possible solution would be to
6952 do the load in two halves, something like (with suitable cleanup
6956 ldm[id][ab] r8!, {r0-r7}
6958 ldm[id][ab] r8, {r7-r14}
6961 but at present there's no suitable place for <temp>, since the scratch space
6962 is overwritten before the cleanup routine is called. For now, we simply
6963 emulate the instruction. */
6966 cleanup_block_load_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6967 struct displaced_step_closure
*dsc
)
6969 int inc
= dsc
->u
.block
.increment
;
6970 int bump_before
= dsc
->u
.block
.before
? (inc
? 4 : -4) : 0;
6971 int bump_after
= dsc
->u
.block
.before
? 0 : (inc
? 4 : -4);
6972 uint32_t regmask
= dsc
->u
.block
.regmask
;
6973 int regno
= inc
? 0 : 15;
6974 CORE_ADDR xfer_addr
= dsc
->u
.block
.xfer_addr
;
6975 int exception_return
= dsc
->u
.block
.load
&& dsc
->u
.block
.user
6976 && (regmask
& 0x8000) != 0;
6977 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
6978 int do_transfer
= condition_true (dsc
->u
.block
.cond
, status
);
6979 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
6984 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6985 sensible we can do here. Complain loudly. */
6986 if (exception_return
)
6987 error (_("Cannot single-step exception return"));
6989 /* We don't handle any stores here for now. */
6990 gdb_assert (dsc
->u
.block
.load
!= 0);
6992 if (debug_displaced
)
6993 fprintf_unfiltered (gdb_stdlog
, "displaced: emulating block transfer: "
6994 "%s %s %s\n", dsc
->u
.block
.load
? "ldm" : "stm",
6995 dsc
->u
.block
.increment
? "inc" : "dec",
6996 dsc
->u
.block
.before
? "before" : "after");
7003 while (regno
<= ARM_PC_REGNUM
&& (regmask
& (1 << regno
)) == 0)
7006 while (regno
>= 0 && (regmask
& (1 << regno
)) == 0)
7009 xfer_addr
+= bump_before
;
7011 memword
= read_memory_unsigned_integer (xfer_addr
, 4, byte_order
);
7012 displaced_write_reg (regs
, dsc
, regno
, memword
, LOAD_WRITE_PC
);
7014 xfer_addr
+= bump_after
;
7016 regmask
&= ~(1 << regno
);
7019 if (dsc
->u
.block
.writeback
)
7020 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, xfer_addr
,
7024 /* Clean up an STM which included the PC in the register list. */
7027 cleanup_block_store_pc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7028 struct displaced_step_closure
*dsc
)
7030 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
7031 int store_executed
= condition_true (dsc
->u
.block
.cond
, status
);
7032 CORE_ADDR pc_stored_at
, transferred_regs
= bitcount (dsc
->u
.block
.regmask
);
7033 CORE_ADDR stm_insn_addr
;
7036 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
7038 /* If condition code fails, there's nothing else to do. */
7039 if (!store_executed
)
7042 if (dsc
->u
.block
.increment
)
7044 pc_stored_at
= dsc
->u
.block
.xfer_addr
+ 4 * transferred_regs
;
7046 if (dsc
->u
.block
.before
)
7051 pc_stored_at
= dsc
->u
.block
.xfer_addr
;
7053 if (dsc
->u
.block
.before
)
7057 pc_val
= read_memory_unsigned_integer (pc_stored_at
, 4, byte_order
);
7058 stm_insn_addr
= dsc
->scratch_base
;
7059 offset
= pc_val
- stm_insn_addr
;
7061 if (debug_displaced
)
7062 fprintf_unfiltered (gdb_stdlog
, "displaced: detected PC offset %.8lx for "
7063 "STM instruction\n", offset
);
7065 /* Rewrite the stored PC to the proper value for the non-displaced original
7067 write_memory_unsigned_integer (pc_stored_at
, 4, byte_order
,
7068 dsc
->insn_addr
+ offset
);
7071 /* Clean up an LDM which includes the PC in the register list. We clumped all
7072 the registers in the transferred list into a contiguous range r0...rX (to
7073 avoid loading PC directly and losing control of the debugged program), so we
7074 must undo that here. */
7077 cleanup_block_load_pc (struct gdbarch
*gdbarch
,
7078 struct regcache
*regs
,
7079 struct displaced_step_closure
*dsc
)
7081 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
7082 int load_executed
= condition_true (dsc
->u
.block
.cond
, status
);
7083 unsigned int mask
= dsc
->u
.block
.regmask
, write_reg
= ARM_PC_REGNUM
;
7084 unsigned int regs_loaded
= bitcount (mask
);
7085 unsigned int num_to_shuffle
= regs_loaded
, clobbered
;
7087 /* The method employed here will fail if the register list is fully populated
7088 (we need to avoid loading PC directly). */
7089 gdb_assert (num_to_shuffle
< 16);
7094 clobbered
= (1 << num_to_shuffle
) - 1;
7096 while (num_to_shuffle
> 0)
7098 if ((mask
& (1 << write_reg
)) != 0)
7100 unsigned int read_reg
= num_to_shuffle
- 1;
7102 if (read_reg
!= write_reg
)
7104 ULONGEST rval
= displaced_read_reg (regs
, dsc
, read_reg
);
7105 displaced_write_reg (regs
, dsc
, write_reg
, rval
, LOAD_WRITE_PC
);
7106 if (debug_displaced
)
7107 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: move "
7108 "loaded register r%d to r%d\n"), read_reg
,
7111 else if (debug_displaced
)
7112 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: register "
7113 "r%d already in the right place\n"),
7116 clobbered
&= ~(1 << write_reg
);
7124 /* Restore any registers we scribbled over. */
7125 for (write_reg
= 0; clobbered
!= 0; write_reg
++)
7127 if ((clobbered
& (1 << write_reg
)) != 0)
7129 displaced_write_reg (regs
, dsc
, write_reg
, dsc
->tmp
[write_reg
],
7131 if (debug_displaced
)
7132 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: restored "
7133 "clobbered register r%d\n"), write_reg
);
7134 clobbered
&= ~(1 << write_reg
);
7138 /* Perform register writeback manually. */
7139 if (dsc
->u
.block
.writeback
)
7141 ULONGEST new_rn_val
= dsc
->u
.block
.xfer_addr
;
7143 if (dsc
->u
.block
.increment
)
7144 new_rn_val
+= regs_loaded
* 4;
7146 new_rn_val
-= regs_loaded
* 4;
7148 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, new_rn_val
,
7153 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7154 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7157 arm_copy_block_xfer (struct gdbarch
*gdbarch
, uint32_t insn
,
7158 struct regcache
*regs
,
7159 struct displaced_step_closure
*dsc
)
7161 int load
= bit (insn
, 20);
7162 int user
= bit (insn
, 22);
7163 int increment
= bit (insn
, 23);
7164 int before
= bit (insn
, 24);
7165 int writeback
= bit (insn
, 21);
7166 int rn
= bits (insn
, 16, 19);
7168 /* Block transfers which don't mention PC can be run directly
7170 if (rn
!= ARM_PC_REGNUM
&& (insn
& 0x8000) == 0)
7171 return arm_copy_unmodified (gdbarch
, insn
, "ldm/stm", dsc
);
7173 if (rn
== ARM_PC_REGNUM
)
7175 warning (_("displaced: Unpredictable LDM or STM with "
7176 "base register r15"));
7177 return arm_copy_unmodified (gdbarch
, insn
, "unpredictable ldm/stm", dsc
);
7180 if (debug_displaced
)
7181 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
7182 "%.8lx\n", (unsigned long) insn
);
7184 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
7185 dsc
->u
.block
.rn
= rn
;
7187 dsc
->u
.block
.load
= load
;
7188 dsc
->u
.block
.user
= user
;
7189 dsc
->u
.block
.increment
= increment
;
7190 dsc
->u
.block
.before
= before
;
7191 dsc
->u
.block
.writeback
= writeback
;
7192 dsc
->u
.block
.cond
= bits (insn
, 28, 31);
7194 dsc
->u
.block
.regmask
= insn
& 0xffff;
7198 if ((insn
& 0xffff) == 0xffff)
7200 /* LDM with a fully-populated register list. This case is
7201 particularly tricky. Implement for now by fully emulating the
7202 instruction (which might not behave perfectly in all cases, but
7203 these instructions should be rare enough for that not to matter
7205 dsc
->modinsn
[0] = ARM_NOP
;
7207 dsc
->cleanup
= &cleanup_block_load_all
;
7211 /* LDM of a list of registers which includes PC. Implement by
7212 rewriting the list of registers to be transferred into a
7213 contiguous chunk r0...rX before doing the transfer, then shuffling
7214 registers into the correct places in the cleanup routine. */
7215 unsigned int regmask
= insn
& 0xffff;
7216 unsigned int num_in_list
= bitcount (regmask
), new_regmask
, bit
= 1;
7217 unsigned int to
= 0, from
= 0, i
, new_rn
;
7219 for (i
= 0; i
< num_in_list
; i
++)
7220 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
7222 /* Writeback makes things complicated. We need to avoid clobbering
7223 the base register with one of the registers in our modified
7224 register list, but just using a different register can't work in
7227 ldm r14!, {r0-r13,pc}
7229 which would need to be rewritten as:
7233 but that can't work, because there's no free register for N.
7235 Solve this by turning off the writeback bit, and emulating
7236 writeback manually in the cleanup routine. */
7241 new_regmask
= (1 << num_in_list
) - 1;
7243 if (debug_displaced
)
7244 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
7245 "{..., pc}: original reg list %.4x, modified "
7246 "list %.4x\n"), rn
, writeback
? "!" : "",
7247 (int) insn
& 0xffff, new_regmask
);
7249 dsc
->modinsn
[0] = (insn
& ~0xffff) | (new_regmask
& 0xffff);
7251 dsc
->cleanup
= &cleanup_block_load_pc
;
7256 /* STM of a list of registers which includes PC. Run the instruction
7257 as-is, but out of line: this will store the wrong value for the PC,
7258 so we must manually fix up the memory in the cleanup routine.
7259 Doing things this way has the advantage that we can auto-detect
7260 the offset of the PC write (which is architecture-dependent) in
7261 the cleanup routine. */
7262 dsc
->modinsn
[0] = insn
;
7264 dsc
->cleanup
= &cleanup_block_store_pc
;
7271 thumb2_copy_block_xfer (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
7272 struct regcache
*regs
,
7273 struct displaced_step_closure
*dsc
)
7275 int rn
= bits (insn1
, 0, 3);
7276 int load
= bit (insn1
, 4);
7277 int writeback
= bit (insn1
, 5);
7279 /* Block transfers which don't mention PC can be run directly
7281 if (rn
!= ARM_PC_REGNUM
&& (insn2
& 0x8000) == 0)
7282 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ldm/stm", dsc
);
7284 if (rn
== ARM_PC_REGNUM
)
7286 warning (_("displaced: Unpredictable LDM or STM with "
7287 "base register r15"));
7288 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7289 "unpredictable ldm/stm", dsc
);
7292 if (debug_displaced
)
7293 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
7294 "%.4x%.4x\n", insn1
, insn2
);
7296 /* Clear bit 13, since it should be always zero. */
7297 dsc
->u
.block
.regmask
= (insn2
& 0xdfff);
7298 dsc
->u
.block
.rn
= rn
;
7300 dsc
->u
.block
.load
= load
;
7301 dsc
->u
.block
.user
= 0;
7302 dsc
->u
.block
.increment
= bit (insn1
, 7);
7303 dsc
->u
.block
.before
= bit (insn1
, 8);
7304 dsc
->u
.block
.writeback
= writeback
;
7305 dsc
->u
.block
.cond
= INST_AL
;
7306 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
7310 if (dsc
->u
.block
.regmask
== 0xffff)
7312 /* This branch is impossible to happen. */
7317 unsigned int regmask
= dsc
->u
.block
.regmask
;
7318 unsigned int num_in_list
= bitcount (regmask
), new_regmask
, bit
= 1;
7319 unsigned int to
= 0, from
= 0, i
, new_rn
;
7321 for (i
= 0; i
< num_in_list
; i
++)
7322 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
7327 new_regmask
= (1 << num_in_list
) - 1;
7329 if (debug_displaced
)
7330 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
7331 "{..., pc}: original reg list %.4x, modified "
7332 "list %.4x\n"), rn
, writeback
? "!" : "",
7333 (int) dsc
->u
.block
.regmask
, new_regmask
);
7335 dsc
->modinsn
[0] = insn1
;
7336 dsc
->modinsn
[1] = (new_regmask
& 0xffff);
7339 dsc
->cleanup
= &cleanup_block_load_pc
;
7344 dsc
->modinsn
[0] = insn1
;
7345 dsc
->modinsn
[1] = insn2
;
7347 dsc
->cleanup
= &cleanup_block_store_pc
;
7352 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7353 for Linux, where some SVC instructions must be treated specially. */
7356 cleanup_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7357 struct displaced_step_closure
*dsc
)
7359 CORE_ADDR resume_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
7361 if (debug_displaced
)
7362 fprintf_unfiltered (gdb_stdlog
, "displaced: cleanup for svc, resume at "
7363 "%.8lx\n", (unsigned long) resume_addr
);
7365 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, resume_addr
, BRANCH_WRITE_PC
);
7369 /* Common copy routine for svc instruciton. */
7372 install_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7373 struct displaced_step_closure
*dsc
)
7375 /* Preparation: none.
7376 Insn: unmodified svc.
7377 Cleanup: pc <- insn_addr + insn_size. */
7379 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7381 dsc
->wrote_to_pc
= 1;
7383 /* Allow OS-specific code to override SVC handling. */
7384 if (dsc
->u
.svc
.copy_svc_os
)
7385 return dsc
->u
.svc
.copy_svc_os (gdbarch
, regs
, dsc
);
7388 dsc
->cleanup
= &cleanup_svc
;
7394 arm_copy_svc (struct gdbarch
*gdbarch
, uint32_t insn
,
7395 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
7398 if (debug_displaced
)
7399 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.8lx\n",
7400 (unsigned long) insn
);
7402 dsc
->modinsn
[0] = insn
;
7404 return install_svc (gdbarch
, regs
, dsc
);
7408 thumb_copy_svc (struct gdbarch
*gdbarch
, uint16_t insn
,
7409 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
7412 if (debug_displaced
)
7413 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.4x\n",
7416 dsc
->modinsn
[0] = insn
;
7418 return install_svc (gdbarch
, regs
, dsc
);
7421 /* Copy undefined instructions. */
7424 arm_copy_undef (struct gdbarch
*gdbarch
, uint32_t insn
,
7425 struct displaced_step_closure
*dsc
)
7427 if (debug_displaced
)
7428 fprintf_unfiltered (gdb_stdlog
,
7429 "displaced: copying undefined insn %.8lx\n",
7430 (unsigned long) insn
);
7432 dsc
->modinsn
[0] = insn
;
7438 thumb_32bit_copy_undef (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
7439 struct displaced_step_closure
*dsc
)
7442 if (debug_displaced
)
7443 fprintf_unfiltered (gdb_stdlog
, "displaced: copying undefined insn "
7444 "%.4x %.4x\n", (unsigned short) insn1
,
7445 (unsigned short) insn2
);
7447 dsc
->modinsn
[0] = insn1
;
7448 dsc
->modinsn
[1] = insn2
;
7454 /* Copy unpredictable instructions. */
7457 arm_copy_unpred (struct gdbarch
*gdbarch
, uint32_t insn
,
7458 struct displaced_step_closure
*dsc
)
7460 if (debug_displaced
)
7461 fprintf_unfiltered (gdb_stdlog
, "displaced: copying unpredictable insn "
7462 "%.8lx\n", (unsigned long) insn
);
7464 dsc
->modinsn
[0] = insn
;
7469 /* The decode_* functions are instruction decoding helpers. They mostly follow
7470 the presentation in the ARM ARM. */
7473 arm_decode_misc_memhint_neon (struct gdbarch
*gdbarch
, uint32_t insn
,
7474 struct regcache
*regs
,
7475 struct displaced_step_closure
*dsc
)
7477 unsigned int op1
= bits (insn
, 20, 26), op2
= bits (insn
, 4, 7);
7478 unsigned int rn
= bits (insn
, 16, 19);
7480 if (op1
== 0x10 && (op2
& 0x2) == 0x0 && (rn
& 0xe) == 0x0)
7481 return arm_copy_unmodified (gdbarch
, insn
, "cps", dsc
);
7482 else if (op1
== 0x10 && op2
== 0x0 && (rn
& 0xe) == 0x1)
7483 return arm_copy_unmodified (gdbarch
, insn
, "setend", dsc
);
7484 else if ((op1
& 0x60) == 0x20)
7485 return arm_copy_unmodified (gdbarch
, insn
, "neon dataproc", dsc
);
7486 else if ((op1
& 0x71) == 0x40)
7487 return arm_copy_unmodified (gdbarch
, insn
, "neon elt/struct load/store",
7489 else if ((op1
& 0x77) == 0x41)
7490 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
7491 else if ((op1
& 0x77) == 0x45)
7492 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pli. */
7493 else if ((op1
& 0x77) == 0x51)
7496 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
7498 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7500 else if ((op1
& 0x77) == 0x55)
7501 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
7502 else if (op1
== 0x57)
7505 case 0x1: return arm_copy_unmodified (gdbarch
, insn
, "clrex", dsc
);
7506 case 0x4: return arm_copy_unmodified (gdbarch
, insn
, "dsb", dsc
);
7507 case 0x5: return arm_copy_unmodified (gdbarch
, insn
, "dmb", dsc
);
7508 case 0x6: return arm_copy_unmodified (gdbarch
, insn
, "isb", dsc
);
7509 default: return arm_copy_unpred (gdbarch
, insn
, dsc
);
7511 else if ((op1
& 0x63) == 0x43)
7512 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7513 else if ((op2
& 0x1) == 0x0)
7514 switch (op1
& ~0x80)
7517 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
7519 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
); /* pli reg. */
7520 case 0x71: case 0x75:
7522 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
);
7523 case 0x63: case 0x67: case 0x73: case 0x77:
7524 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7526 return arm_copy_undef (gdbarch
, insn
, dsc
);
7529 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Probably unreachable. */
7533 arm_decode_unconditional (struct gdbarch
*gdbarch
, uint32_t insn
,
7534 struct regcache
*regs
,
7535 struct displaced_step_closure
*dsc
)
7537 if (bit (insn
, 27) == 0)
7538 return arm_decode_misc_memhint_neon (gdbarch
, insn
, regs
, dsc
);
7539 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7540 else switch (((insn
& 0x7000000) >> 23) | ((insn
& 0x100000) >> 20))
7543 return arm_copy_unmodified (gdbarch
, insn
, "srs", dsc
);
7546 return arm_copy_unmodified (gdbarch
, insn
, "rfe", dsc
);
7548 case 0x4: case 0x5: case 0x6: case 0x7:
7549 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
7552 switch ((insn
& 0xe00000) >> 21)
7554 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7556 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7559 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
7562 return arm_copy_undef (gdbarch
, insn
, dsc
);
7567 int rn_f
= (bits (insn
, 16, 19) == 0xf);
7568 switch ((insn
& 0xe00000) >> 21)
7571 /* ldc/ldc2 imm (undefined for rn == pc). */
7572 return rn_f
? arm_copy_undef (gdbarch
, insn
, dsc
)
7573 : arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7576 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
7578 case 0x4: case 0x5: case 0x6: case 0x7:
7579 /* ldc/ldc2 lit (undefined for rn != pc). */
7580 return rn_f
? arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
)
7581 : arm_copy_undef (gdbarch
, insn
, dsc
);
7584 return arm_copy_undef (gdbarch
, insn
, dsc
);
7589 return arm_copy_unmodified (gdbarch
, insn
, "stc/stc2", dsc
);
7592 if (bits (insn
, 16, 19) == 0xf)
7594 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7596 return arm_copy_undef (gdbarch
, insn
, dsc
);
7600 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
7602 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7606 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
7608 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7611 return arm_copy_undef (gdbarch
, insn
, dsc
);
7615 /* Decode miscellaneous instructions in dp/misc encoding space. */
7618 arm_decode_miscellaneous (struct gdbarch
*gdbarch
, uint32_t insn
,
7619 struct regcache
*regs
,
7620 struct displaced_step_closure
*dsc
)
7622 unsigned int op2
= bits (insn
, 4, 6);
7623 unsigned int op
= bits (insn
, 21, 22);
7624 unsigned int op1
= bits (insn
, 16, 19);
7629 return arm_copy_unmodified (gdbarch
, insn
, "mrs/msr", dsc
);
7632 if (op
== 0x1) /* bx. */
7633 return arm_copy_bx_blx_reg (gdbarch
, insn
, regs
, dsc
);
7635 return arm_copy_unmodified (gdbarch
, insn
, "clz", dsc
);
7637 return arm_copy_undef (gdbarch
, insn
, dsc
);
7641 /* Not really supported. */
7642 return arm_copy_unmodified (gdbarch
, insn
, "bxj", dsc
);
7644 return arm_copy_undef (gdbarch
, insn
, dsc
);
7648 return arm_copy_bx_blx_reg (gdbarch
, insn
,
7649 regs
, dsc
); /* blx register. */
7651 return arm_copy_undef (gdbarch
, insn
, dsc
);
7654 return arm_copy_unmodified (gdbarch
, insn
, "saturating add/sub", dsc
);
7658 return arm_copy_unmodified (gdbarch
, insn
, "bkpt", dsc
);
7660 /* Not really supported. */
7661 return arm_copy_unmodified (gdbarch
, insn
, "smc", dsc
);
7664 return arm_copy_undef (gdbarch
, insn
, dsc
);
7669 arm_decode_dp_misc (struct gdbarch
*gdbarch
, uint32_t insn
,
7670 struct regcache
*regs
,
7671 struct displaced_step_closure
*dsc
)
7674 switch (bits (insn
, 20, 24))
7677 return arm_copy_unmodified (gdbarch
, insn
, "movw", dsc
);
7680 return arm_copy_unmodified (gdbarch
, insn
, "movt", dsc
);
7682 case 0x12: case 0x16:
7683 return arm_copy_unmodified (gdbarch
, insn
, "msr imm", dsc
);
7686 return arm_copy_alu_imm (gdbarch
, insn
, regs
, dsc
);
7690 uint32_t op1
= bits (insn
, 20, 24), op2
= bits (insn
, 4, 7);
7692 if ((op1
& 0x19) != 0x10 && (op2
& 0x1) == 0x0)
7693 return arm_copy_alu_reg (gdbarch
, insn
, regs
, dsc
);
7694 else if ((op1
& 0x19) != 0x10 && (op2
& 0x9) == 0x1)
7695 return arm_copy_alu_shifted_reg (gdbarch
, insn
, regs
, dsc
);
7696 else if ((op1
& 0x19) == 0x10 && (op2
& 0x8) == 0x0)
7697 return arm_decode_miscellaneous (gdbarch
, insn
, regs
, dsc
);
7698 else if ((op1
& 0x19) == 0x10 && (op2
& 0x9) == 0x8)
7699 return arm_copy_unmodified (gdbarch
, insn
, "halfword mul/mla", dsc
);
7700 else if ((op1
& 0x10) == 0x00 && op2
== 0x9)
7701 return arm_copy_unmodified (gdbarch
, insn
, "mul/mla", dsc
);
7702 else if ((op1
& 0x10) == 0x10 && op2
== 0x9)
7703 return arm_copy_unmodified (gdbarch
, insn
, "synch", dsc
);
7704 else if (op2
== 0xb || (op2
& 0xd) == 0xd)
7705 /* 2nd arg means "unpriveleged". */
7706 return arm_copy_extra_ld_st (gdbarch
, insn
, (op1
& 0x12) == 0x02, regs
,
7710 /* Should be unreachable. */
7715 arm_decode_ld_st_word_ubyte (struct gdbarch
*gdbarch
, uint32_t insn
,
7716 struct regcache
*regs
,
7717 struct displaced_step_closure
*dsc
)
7719 int a
= bit (insn
, 25), b
= bit (insn
, 4);
7720 uint32_t op1
= bits (insn
, 20, 24);
7721 int rn_f
= bits (insn
, 16, 19) == 0xf;
7723 if ((!a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02)
7724 || (a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02 && !b
))
7725 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 0);
7726 else if ((!a
&& (op1
& 0x17) == 0x02)
7727 || (a
&& (op1
& 0x17) == 0x02 && !b
))
7728 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 1);
7729 else if ((!a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03)
7730 || (a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03 && !b
))
7731 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 0);
7732 else if ((!a
&& (op1
& 0x17) == 0x03)
7733 || (a
&& (op1
& 0x17) == 0x03 && !b
))
7734 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 1);
7735 else if ((!a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06)
7736 || (a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06 && !b
))
7737 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 0);
7738 else if ((!a
&& (op1
& 0x17) == 0x06)
7739 || (a
&& (op1
& 0x17) == 0x06 && !b
))
7740 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 1);
7741 else if ((!a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07)
7742 || (a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07 && !b
))
7743 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 0);
7744 else if ((!a
&& (op1
& 0x17) == 0x07)
7745 || (a
&& (op1
& 0x17) == 0x07 && !b
))
7746 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 1);
7748 /* Should be unreachable. */
7753 arm_decode_media (struct gdbarch
*gdbarch
, uint32_t insn
,
7754 struct displaced_step_closure
*dsc
)
7756 switch (bits (insn
, 20, 24))
7758 case 0x00: case 0x01: case 0x02: case 0x03:
7759 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub signed", dsc
);
7761 case 0x04: case 0x05: case 0x06: case 0x07:
7762 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub unsigned", dsc
);
7764 case 0x08: case 0x09: case 0x0a: case 0x0b:
7765 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7766 return arm_copy_unmodified (gdbarch
, insn
,
7767 "decode/pack/unpack/saturate/reverse", dsc
);
7770 if (bits (insn
, 5, 7) == 0) /* op2. */
7772 if (bits (insn
, 12, 15) == 0xf)
7773 return arm_copy_unmodified (gdbarch
, insn
, "usad8", dsc
);
7775 return arm_copy_unmodified (gdbarch
, insn
, "usada8", dsc
);
7778 return arm_copy_undef (gdbarch
, insn
, dsc
);
7780 case 0x1a: case 0x1b:
7781 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
7782 return arm_copy_unmodified (gdbarch
, insn
, "sbfx", dsc
);
7784 return arm_copy_undef (gdbarch
, insn
, dsc
);
7786 case 0x1c: case 0x1d:
7787 if (bits (insn
, 5, 6) == 0x0) /* op2[1:0]. */
7789 if (bits (insn
, 0, 3) == 0xf)
7790 return arm_copy_unmodified (gdbarch
, insn
, "bfc", dsc
);
7792 return arm_copy_unmodified (gdbarch
, insn
, "bfi", dsc
);
7795 return arm_copy_undef (gdbarch
, insn
, dsc
);
7797 case 0x1e: case 0x1f:
7798 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
7799 return arm_copy_unmodified (gdbarch
, insn
, "ubfx", dsc
);
7801 return arm_copy_undef (gdbarch
, insn
, dsc
);
7804 /* Should be unreachable. */
7809 arm_decode_b_bl_ldmstm (struct gdbarch
*gdbarch
, int32_t insn
,
7810 struct regcache
*regs
,
7811 struct displaced_step_closure
*dsc
)
7814 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
7816 return arm_copy_block_xfer (gdbarch
, insn
, regs
, dsc
);
7820 arm_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
,
7821 struct regcache
*regs
,
7822 struct displaced_step_closure
*dsc
)
7824 unsigned int opcode
= bits (insn
, 20, 24);
7828 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7829 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon mrrc/mcrr", dsc
);
7831 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7832 case 0x12: case 0x16:
7833 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vstm/vpush", dsc
);
7835 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7836 case 0x13: case 0x17:
7837 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vldm/vpop", dsc
);
7839 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7840 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7841 /* Note: no writeback for these instructions. Bit 25 will always be
7842 zero though (via caller), so the following works OK. */
7843 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7846 /* Should be unreachable. */
7850 /* Decode shifted register instructions. */
7853 thumb2_decode_dp_shift_reg (struct gdbarch
*gdbarch
, uint16_t insn1
,
7854 uint16_t insn2
, struct regcache
*regs
,
7855 struct displaced_step_closure
*dsc
)
7857 /* PC is only allowed to be used in instruction MOV. */
7859 unsigned int op
= bits (insn1
, 5, 8);
7860 unsigned int rn
= bits (insn1
, 0, 3);
7862 if (op
== 0x2 && rn
== 0xf) /* MOV */
7863 return thumb2_copy_alu_imm (gdbarch
, insn1
, insn2
, regs
, dsc
);
7865 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7866 "dp (shift reg)", dsc
);
7870 /* Decode extension register load/store. Exactly the same as
7871 arm_decode_ext_reg_ld_st. */
7874 thumb2_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint16_t insn1
,
7875 uint16_t insn2
, struct regcache
*regs
,
7876 struct displaced_step_closure
*dsc
)
7878 unsigned int opcode
= bits (insn1
, 4, 8);
7882 case 0x04: case 0x05:
7883 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7884 "vfp/neon vmov", dsc
);
7886 case 0x08: case 0x0c: /* 01x00 */
7887 case 0x0a: case 0x0e: /* 01x10 */
7888 case 0x12: case 0x16: /* 10x10 */
7889 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7890 "vfp/neon vstm/vpush", dsc
);
7892 case 0x09: case 0x0d: /* 01x01 */
7893 case 0x0b: case 0x0f: /* 01x11 */
7894 case 0x13: case 0x17: /* 10x11 */
7895 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7896 "vfp/neon vldm/vpop", dsc
);
7898 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7899 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7901 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7902 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
, regs
, dsc
);
7905 /* Should be unreachable. */
7910 arm_decode_svc_copro (struct gdbarch
*gdbarch
, uint32_t insn
, CORE_ADDR to
,
7911 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
7913 unsigned int op1
= bits (insn
, 20, 25);
7914 int op
= bit (insn
, 4);
7915 unsigned int coproc
= bits (insn
, 8, 11);
7916 unsigned int rn
= bits (insn
, 16, 19);
7918 if ((op1
& 0x20) == 0x00 && (op1
& 0x3a) != 0x00 && (coproc
& 0xe) == 0xa)
7919 return arm_decode_ext_reg_ld_st (gdbarch
, insn
, regs
, dsc
);
7920 else if ((op1
& 0x21) == 0x00 && (op1
& 0x3a) != 0x00
7921 && (coproc
& 0xe) != 0xa)
7923 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7924 else if ((op1
& 0x21) == 0x01 && (op1
& 0x3a) != 0x00
7925 && (coproc
& 0xe) != 0xa)
7926 /* ldc/ldc2 imm/lit. */
7927 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7928 else if ((op1
& 0x3e) == 0x00)
7929 return arm_copy_undef (gdbarch
, insn
, dsc
);
7930 else if ((op1
& 0x3e) == 0x04 && (coproc
& 0xe) == 0xa)
7931 return arm_copy_unmodified (gdbarch
, insn
, "neon 64bit xfer", dsc
);
7932 else if (op1
== 0x04 && (coproc
& 0xe) != 0xa)
7933 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
7934 else if (op1
== 0x05 && (coproc
& 0xe) != 0xa)
7935 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
7936 else if ((op1
& 0x30) == 0x20 && !op
)
7938 if ((coproc
& 0xe) == 0xa)
7939 return arm_copy_unmodified (gdbarch
, insn
, "vfp dataproc", dsc
);
7941 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7943 else if ((op1
& 0x30) == 0x20 && op
)
7944 return arm_copy_unmodified (gdbarch
, insn
, "neon 8/16/32 bit xfer", dsc
);
7945 else if ((op1
& 0x31) == 0x20 && op
&& (coproc
& 0xe) != 0xa)
7946 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
7947 else if ((op1
& 0x31) == 0x21 && op
&& (coproc
& 0xe) != 0xa)
7948 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
7949 else if ((op1
& 0x30) == 0x30)
7950 return arm_copy_svc (gdbarch
, insn
, regs
, dsc
);
7952 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Possibly unreachable. */
7956 thumb2_decode_svc_copro (struct gdbarch
*gdbarch
, uint16_t insn1
,
7957 uint16_t insn2
, struct regcache
*regs
,
7958 struct displaced_step_closure
*dsc
)
7960 unsigned int coproc
= bits (insn2
, 8, 11);
7961 unsigned int op1
= bits (insn1
, 4, 9);
7962 unsigned int bit_5_8
= bits (insn1
, 5, 8);
7963 unsigned int bit_9
= bit (insn1
, 9);
7964 unsigned int bit_4
= bit (insn1
, 4);
7965 unsigned int rn
= bits (insn1
, 0, 3);
7970 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7971 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7973 else if (bit_5_8
== 0) /* UNDEFINED. */
7974 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
7977 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7978 if ((coproc
& 0xe) == 0xa)
7979 return thumb2_decode_ext_reg_ld_st (gdbarch
, insn1
, insn2
, regs
,
7981 else /* coproc is not 101x. */
7983 if (bit_4
== 0) /* STC/STC2. */
7984 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7986 else /* LDC/LDC2 {literal, immeidate}. */
7987 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
,
7993 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "coproc", dsc
);
7999 install_pc_relative (struct gdbarch
*gdbarch
, struct regcache
*regs
,
8000 struct displaced_step_closure
*dsc
, int rd
)
8006 Preparation: Rd <- PC
8012 int val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
8013 displaced_write_reg (regs
, dsc
, rd
, val
, CANNOT_WRITE_PC
);
8017 thumb_copy_pc_relative_16bit (struct gdbarch
*gdbarch
, struct regcache
*regs
,
8018 struct displaced_step_closure
*dsc
,
8019 int rd
, unsigned int imm
)
8022 /* Encoding T2: ADDS Rd, #imm */
8023 dsc
->modinsn
[0] = (0x3000 | (rd
<< 8) | imm
);
8025 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
8031 thumb_decode_pc_relative_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
8032 struct regcache
*regs
,
8033 struct displaced_step_closure
*dsc
)
8035 unsigned int rd
= bits (insn
, 8, 10);
8036 unsigned int imm8
= bits (insn
, 0, 7);
8038 if (debug_displaced
)
8039 fprintf_unfiltered (gdb_stdlog
,
8040 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8043 return thumb_copy_pc_relative_16bit (gdbarch
, regs
, dsc
, rd
, imm8
);
8047 thumb_copy_pc_relative_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
8048 uint16_t insn2
, struct regcache
*regs
,
8049 struct displaced_step_closure
*dsc
)
8051 unsigned int rd
= bits (insn2
, 8, 11);
8052 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8053 extract raw immediate encoding rather than computing immediate. When
8054 generating ADD or SUB instruction, we can simply perform OR operation to
8055 set immediate into ADD. */
8056 unsigned int imm_3_8
= insn2
& 0x70ff;
8057 unsigned int imm_i
= insn1
& 0x0400; /* Clear all bits except bit 10. */
8059 if (debug_displaced
)
8060 fprintf_unfiltered (gdb_stdlog
,
8061 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8062 rd
, imm_i
, imm_3_8
, insn1
, insn2
);
8064 if (bit (insn1
, 7)) /* Encoding T2 */
8066 /* Encoding T3: SUB Rd, Rd, #imm */
8067 dsc
->modinsn
[0] = (0xf1a0 | rd
| imm_i
);
8068 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
8070 else /* Encoding T3 */
8072 /* Encoding T3: ADD Rd, Rd, #imm */
8073 dsc
->modinsn
[0] = (0xf100 | rd
| imm_i
);
8074 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
8078 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
8084 thumb_copy_16bit_ldr_literal (struct gdbarch
*gdbarch
, unsigned short insn1
,
8085 struct regcache
*regs
,
8086 struct displaced_step_closure
*dsc
)
8088 unsigned int rt
= bits (insn1
, 8, 10);
8090 int imm8
= (bits (insn1
, 0, 7) << 2);
8091 CORE_ADDR from
= dsc
->insn_addr
;
8097 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8099 Insn: LDR R0, [R2, R3];
8100 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8102 if (debug_displaced
)
8103 fprintf_unfiltered (gdb_stdlog
,
8104 "displaced: copying thumb ldr r%d [pc #%d]\n"
8107 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
8108 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
8109 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
8110 pc
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
8111 /* The assembler calculates the required value of the offset from the
8112 Align(PC,4) value of this instruction to the label. */
8113 pc
= pc
& 0xfffffffc;
8115 displaced_write_reg (regs
, dsc
, 2, pc
, CANNOT_WRITE_PC
);
8116 displaced_write_reg (regs
, dsc
, 3, imm8
, CANNOT_WRITE_PC
);
8119 dsc
->u
.ldst
.xfersize
= 4;
8121 dsc
->u
.ldst
.immed
= 0;
8122 dsc
->u
.ldst
.writeback
= 0;
8123 dsc
->u
.ldst
.restore_r4
= 0;
8125 dsc
->modinsn
[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8127 dsc
->cleanup
= &cleanup_load
;
8132 /* Copy Thumb cbnz/cbz insruction. */
8135 thumb_copy_cbnz_cbz (struct gdbarch
*gdbarch
, uint16_t insn1
,
8136 struct regcache
*regs
,
8137 struct displaced_step_closure
*dsc
)
8139 int non_zero
= bit (insn1
, 11);
8140 unsigned int imm5
= (bit (insn1
, 9) << 6) | (bits (insn1
, 3, 7) << 1);
8141 CORE_ADDR from
= dsc
->insn_addr
;
8142 int rn
= bits (insn1
, 0, 2);
8143 int rn_val
= displaced_read_reg (regs
, dsc
, rn
);
8145 dsc
->u
.branch
.cond
= (rn_val
&& non_zero
) || (!rn_val
&& !non_zero
);
8146 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8147 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8148 condition is false, let it be, cleanup_branch will do nothing. */
8149 if (dsc
->u
.branch
.cond
)
8151 dsc
->u
.branch
.cond
= INST_AL
;
8152 dsc
->u
.branch
.dest
= from
+ 4 + imm5
;
8155 dsc
->u
.branch
.dest
= from
+ 2;
8157 dsc
->u
.branch
.link
= 0;
8158 dsc
->u
.branch
.exchange
= 0;
8160 if (debug_displaced
)
8161 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s [r%d = 0x%x]"
8162 " insn %.4x to %.8lx\n", non_zero
? "cbnz" : "cbz",
8163 rn
, rn_val
, insn1
, dsc
->u
.branch
.dest
);
8165 dsc
->modinsn
[0] = THUMB_NOP
;
8167 dsc
->cleanup
= &cleanup_branch
;
8171 /* Copy Table Branch Byte/Halfword */
8173 thumb2_copy_table_branch (struct gdbarch
*gdbarch
, uint16_t insn1
,
8174 uint16_t insn2
, struct regcache
*regs
,
8175 struct displaced_step_closure
*dsc
)
8177 ULONGEST rn_val
, rm_val
;
8178 int is_tbh
= bit (insn2
, 4);
8179 CORE_ADDR halfwords
= 0;
8180 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8182 rn_val
= displaced_read_reg (regs
, dsc
, bits (insn1
, 0, 3));
8183 rm_val
= displaced_read_reg (regs
, dsc
, bits (insn2
, 0, 3));
8189 target_read_memory (rn_val
+ 2 * rm_val
, buf
, 2);
8190 halfwords
= extract_unsigned_integer (buf
, 2, byte_order
);
8196 target_read_memory (rn_val
+ rm_val
, buf
, 1);
8197 halfwords
= extract_unsigned_integer (buf
, 1, byte_order
);
8200 if (debug_displaced
)
8201 fprintf_unfiltered (gdb_stdlog
, "displaced: %s base 0x%x offset 0x%x"
8202 " offset 0x%x\n", is_tbh
? "tbh" : "tbb",
8203 (unsigned int) rn_val
, (unsigned int) rm_val
,
8204 (unsigned int) halfwords
);
8206 dsc
->u
.branch
.cond
= INST_AL
;
8207 dsc
->u
.branch
.link
= 0;
8208 dsc
->u
.branch
.exchange
= 0;
8209 dsc
->u
.branch
.dest
= dsc
->insn_addr
+ 4 + 2 * halfwords
;
8211 dsc
->cleanup
= &cleanup_branch
;
8217 cleanup_pop_pc_16bit_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
8218 struct displaced_step_closure
*dsc
)
8221 int val
= displaced_read_reg (regs
, dsc
, 7);
8222 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, val
, BX_WRITE_PC
);
8225 val
= displaced_read_reg (regs
, dsc
, 8);
8226 displaced_write_reg (regs
, dsc
, 7, val
, CANNOT_WRITE_PC
);
8229 displaced_write_reg (regs
, dsc
, 8, dsc
->tmp
[0], CANNOT_WRITE_PC
);
8234 thumb_copy_pop_pc_16bit (struct gdbarch
*gdbarch
, unsigned short insn1
,
8235 struct regcache
*regs
,
8236 struct displaced_step_closure
*dsc
)
8238 dsc
->u
.block
.regmask
= insn1
& 0x00ff;
8240 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8243 (1) register list is full, that is, r0-r7 are used.
8244 Prepare: tmp[0] <- r8
8246 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8247 MOV r8, r7; Move value of r7 to r8;
8248 POP {r7}; Store PC value into r7.
8250 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8252 (2) register list is not full, supposing there are N registers in
8253 register list (except PC, 0 <= N <= 7).
8254 Prepare: for each i, 0 - N, tmp[i] <- ri.
8256 POP {r0, r1, ...., rN};
8258 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8259 from tmp[] properly.
8261 if (debug_displaced
)
8262 fprintf_unfiltered (gdb_stdlog
,
8263 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8264 dsc
->u
.block
.regmask
, insn1
);
8266 if (dsc
->u
.block
.regmask
== 0xff)
8268 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 8);
8270 dsc
->modinsn
[0] = (insn1
& 0xfeff); /* POP {r0,r1,...,r6, r7} */
8271 dsc
->modinsn
[1] = 0x46b8; /* MOV r8, r7 */
8272 dsc
->modinsn
[2] = 0xbc80; /* POP {r7} */
8275 dsc
->cleanup
= &cleanup_pop_pc_16bit_all
;
8279 unsigned int num_in_list
= bitcount (dsc
->u
.block
.regmask
);
8280 unsigned int new_regmask
, bit
= 1;
8281 unsigned int to
= 0, from
= 0, i
, new_rn
;
8283 for (i
= 0; i
< num_in_list
+ 1; i
++)
8284 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
8286 new_regmask
= (1 << (num_in_list
+ 1)) - 1;
8288 if (debug_displaced
)
8289 fprintf_unfiltered (gdb_stdlog
, _("displaced: POP "
8290 "{..., pc}: original reg list %.4x,"
8291 " modified list %.4x\n"),
8292 (int) dsc
->u
.block
.regmask
, new_regmask
);
8294 dsc
->u
.block
.regmask
|= 0x8000;
8295 dsc
->u
.block
.writeback
= 0;
8296 dsc
->u
.block
.cond
= INST_AL
;
8298 dsc
->modinsn
[0] = (insn1
& ~0x1ff) | (new_regmask
& 0xff);
8300 dsc
->cleanup
= &cleanup_block_load_pc
;
8307 thumb_process_displaced_16bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
8308 struct regcache
*regs
,
8309 struct displaced_step_closure
*dsc
)
8311 unsigned short op_bit_12_15
= bits (insn1
, 12, 15);
8312 unsigned short op_bit_10_11
= bits (insn1
, 10, 11);
8315 /* 16-bit thumb instructions. */
8316 switch (op_bit_12_15
)
8318 /* Shift (imme), add, subtract, move and compare. */
8319 case 0: case 1: case 2: case 3:
8320 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
8321 "shift/add/sub/mov/cmp",
8325 switch (op_bit_10_11
)
8327 case 0: /* Data-processing */
8328 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
8332 case 1: /* Special data instructions and branch and exchange. */
8334 unsigned short op
= bits (insn1
, 7, 9);
8335 if (op
== 6 || op
== 7) /* BX or BLX */
8336 err
= thumb_copy_bx_blx_reg (gdbarch
, insn1
, regs
, dsc
);
8337 else if (bits (insn1
, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8338 err
= thumb_copy_alu_reg (gdbarch
, insn1
, regs
, dsc
);
8340 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "special data",
8344 default: /* LDR (literal) */
8345 err
= thumb_copy_16bit_ldr_literal (gdbarch
, insn1
, regs
, dsc
);
8348 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8349 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldr/str", dsc
);
8352 if (op_bit_10_11
< 2) /* Generate PC-relative address */
8353 err
= thumb_decode_pc_relative_16bit (gdbarch
, insn1
, regs
, dsc
);
8354 else /* Generate SP-relative address */
8355 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "sp-relative", dsc
);
8357 case 11: /* Misc 16-bit instructions */
8359 switch (bits (insn1
, 8, 11))
8361 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8362 err
= thumb_copy_cbnz_cbz (gdbarch
, insn1
, regs
, dsc
);
8364 case 12: case 13: /* POP */
8365 if (bit (insn1
, 8)) /* PC is in register list. */
8366 err
= thumb_copy_pop_pc_16bit (gdbarch
, insn1
, regs
, dsc
);
8368 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "pop", dsc
);
8370 case 15: /* If-Then, and hints */
8371 if (bits (insn1
, 0, 3))
8372 /* If-Then makes up to four following instructions conditional.
8373 IT instruction itself is not conditional, so handle it as a
8374 common unmodified instruction. */
8375 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "If-Then",
8378 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "hints", dsc
);
8381 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "misc", dsc
);
8386 if (op_bit_10_11
< 2) /* Store multiple registers */
8387 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "stm", dsc
);
8388 else /* Load multiple registers */
8389 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldm", dsc
);
8391 case 13: /* Conditional branch and supervisor call */
8392 if (bits (insn1
, 9, 11) != 7) /* conditional branch */
8393 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
8395 err
= thumb_copy_svc (gdbarch
, insn1
, regs
, dsc
);
8397 case 14: /* Unconditional branch */
8398 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
8405 internal_error (__FILE__
, __LINE__
,
8406 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8410 decode_thumb_32bit_ld_mem_hints (struct gdbarch
*gdbarch
,
8411 uint16_t insn1
, uint16_t insn2
,
8412 struct regcache
*regs
,
8413 struct displaced_step_closure
*dsc
)
8415 int rt
= bits (insn2
, 12, 15);
8416 int rn
= bits (insn1
, 0, 3);
8417 int op1
= bits (insn1
, 7, 8);
8420 switch (bits (insn1
, 5, 6))
8422 case 0: /* Load byte and memory hints */
8423 if (rt
== 0xf) /* PLD/PLI */
8426 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8427 return thumb2_copy_preload (gdbarch
, insn1
, insn2
, regs
, dsc
);
8429 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8434 if (rn
== 0xf) /* LDRB/LDRSB (literal) */
8435 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
8438 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8439 "ldrb{reg, immediate}/ldrbt",
8444 case 1: /* Load halfword and memory hints. */
8445 if (rt
== 0xf) /* PLD{W} and Unalloc memory hint. */
8446 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8447 "pld/unalloc memhint", dsc
);
8451 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
8454 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8458 case 2: /* Load word */
8460 int insn2_bit_8_11
= bits (insn2
, 8, 11);
8463 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
, 4);
8464 else if (op1
== 0x1) /* Encoding T3 */
8465 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
, dsc
,
8467 else /* op1 == 0x0 */
8469 if (insn2_bit_8_11
== 0xc || (insn2_bit_8_11
& 0x9) == 0x9)
8470 /* LDR (immediate) */
8471 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
8472 dsc
, bit (insn2
, 8), 1);
8473 else if (insn2_bit_8_11
== 0xe) /* LDRT */
8474 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8477 /* LDR (register) */
8478 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
8484 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
8491 thumb_process_displaced_32bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
8492 uint16_t insn2
, struct regcache
*regs
,
8493 struct displaced_step_closure
*dsc
)
8496 unsigned short op
= bit (insn2
, 15);
8497 unsigned int op1
= bits (insn1
, 11, 12);
8503 switch (bits (insn1
, 9, 10))
8508 /* Load/store {dual, execlusive}, table branch. */
8509 if (bits (insn1
, 7, 8) == 1 && bits (insn1
, 4, 5) == 1
8510 && bits (insn2
, 5, 7) == 0)
8511 err
= thumb2_copy_table_branch (gdbarch
, insn1
, insn2
, regs
,
8514 /* PC is not allowed to use in load/store {dual, exclusive}
8516 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8517 "load/store dual/ex", dsc
);
8519 else /* load/store multiple */
8521 switch (bits (insn1
, 7, 8))
8523 case 0: case 3: /* SRS, RFE */
8524 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8527 case 1: case 2: /* LDM/STM/PUSH/POP */
8528 err
= thumb2_copy_block_xfer (gdbarch
, insn1
, insn2
, regs
, dsc
);
8535 /* Data-processing (shift register). */
8536 err
= thumb2_decode_dp_shift_reg (gdbarch
, insn1
, insn2
, regs
,
8539 default: /* Coprocessor instructions. */
8540 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
8545 case 2: /* op1 = 2 */
8546 if (op
) /* Branch and misc control. */
8548 if (bit (insn2
, 14) /* BLX/BL */
8549 || bit (insn2
, 12) /* Unconditional branch */
8550 || (bits (insn1
, 7, 9) != 0x7)) /* Conditional branch */
8551 err
= thumb2_copy_b_bl_blx (gdbarch
, insn1
, insn2
, regs
, dsc
);
8553 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8558 if (bit (insn1
, 9)) /* Data processing (plain binary imm). */
8560 int op
= bits (insn1
, 4, 8);
8561 int rn
= bits (insn1
, 0, 3);
8562 if ((op
== 0 || op
== 0xa) && rn
== 0xf)
8563 err
= thumb_copy_pc_relative_32bit (gdbarch
, insn1
, insn2
,
8566 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8569 else /* Data processing (modified immeidate) */
8570 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8574 case 3: /* op1 = 3 */
8575 switch (bits (insn1
, 9, 10))
8579 err
= decode_thumb_32bit_ld_mem_hints (gdbarch
, insn1
, insn2
,
8581 else /* NEON Load/Store and Store single data item */
8582 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8583 "neon elt/struct load/store",
8586 case 1: /* op1 = 3, bits (9, 10) == 1 */
8587 switch (bits (insn1
, 7, 8))
8589 case 0: case 1: /* Data processing (register) */
8590 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8593 case 2: /* Multiply and absolute difference */
8594 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8595 "mul/mua/diff", dsc
);
8597 case 3: /* Long multiply and divide */
8598 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8603 default: /* Coprocessor instructions */
8604 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
8613 internal_error (__FILE__
, __LINE__
,
8614 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8619 thumb_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8620 CORE_ADDR to
, struct regcache
*regs
,
8621 struct displaced_step_closure
*dsc
)
8623 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8625 = read_memory_unsigned_integer (from
, 2, byte_order_for_code
);
8627 if (debug_displaced
)
8628 fprintf_unfiltered (gdb_stdlog
, "displaced: process thumb insn %.4x "
8629 "at %.8lx\n", insn1
, (unsigned long) from
);
8632 dsc
->insn_size
= thumb_insn_size (insn1
);
8633 if (thumb_insn_size (insn1
) == 4)
8636 = read_memory_unsigned_integer (from
+ 2, 2, byte_order_for_code
);
8637 thumb_process_displaced_32bit_insn (gdbarch
, insn1
, insn2
, regs
, dsc
);
8640 thumb_process_displaced_16bit_insn (gdbarch
, insn1
, regs
, dsc
);
8644 arm_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8645 CORE_ADDR to
, struct regcache
*regs
,
8646 struct displaced_step_closure
*dsc
)
8649 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8652 /* Most displaced instructions use a 1-instruction scratch space, so set this
8653 here and override below if/when necessary. */
8655 dsc
->insn_addr
= from
;
8656 dsc
->scratch_base
= to
;
8657 dsc
->cleanup
= NULL
;
8658 dsc
->wrote_to_pc
= 0;
8660 if (!displaced_in_arm_mode (regs
))
8661 return thumb_process_displaced_insn (gdbarch
, from
, to
, regs
, dsc
);
8665 insn
= read_memory_unsigned_integer (from
, 4, byte_order_for_code
);
8666 if (debug_displaced
)
8667 fprintf_unfiltered (gdb_stdlog
, "displaced: stepping insn %.8lx "
8668 "at %.8lx\n", (unsigned long) insn
,
8669 (unsigned long) from
);
8671 if ((insn
& 0xf0000000) == 0xf0000000)
8672 err
= arm_decode_unconditional (gdbarch
, insn
, regs
, dsc
);
8673 else switch (((insn
& 0x10) >> 4) | ((insn
& 0xe000000) >> 24))
8675 case 0x0: case 0x1: case 0x2: case 0x3:
8676 err
= arm_decode_dp_misc (gdbarch
, insn
, regs
, dsc
);
8679 case 0x4: case 0x5: case 0x6:
8680 err
= arm_decode_ld_st_word_ubyte (gdbarch
, insn
, regs
, dsc
);
8684 err
= arm_decode_media (gdbarch
, insn
, dsc
);
8687 case 0x8: case 0x9: case 0xa: case 0xb:
8688 err
= arm_decode_b_bl_ldmstm (gdbarch
, insn
, regs
, dsc
);
8691 case 0xc: case 0xd: case 0xe: case 0xf:
8692 err
= arm_decode_svc_copro (gdbarch
, insn
, to
, regs
, dsc
);
8697 internal_error (__FILE__
, __LINE__
,
8698 _("arm_process_displaced_insn: Instruction decode error"));
8701 /* Actually set up the scratch space for a displaced instruction. */
8704 arm_displaced_init_closure (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8705 CORE_ADDR to
, struct displaced_step_closure
*dsc
)
8707 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8708 unsigned int i
, len
, offset
;
8709 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8710 int size
= dsc
->is_thumb
? 2 : 4;
8711 const gdb_byte
*bkp_insn
;
8714 /* Poke modified instruction(s). */
8715 for (i
= 0; i
< dsc
->numinsns
; i
++)
8717 if (debug_displaced
)
8719 fprintf_unfiltered (gdb_stdlog
, "displaced: writing insn ");
8721 fprintf_unfiltered (gdb_stdlog
, "%.8lx",
8724 fprintf_unfiltered (gdb_stdlog
, "%.4x",
8725 (unsigned short)dsc
->modinsn
[i
]);
8727 fprintf_unfiltered (gdb_stdlog
, " at %.8lx\n",
8728 (unsigned long) to
+ offset
);
8731 write_memory_unsigned_integer (to
+ offset
, size
,
8732 byte_order_for_code
,
8737 /* Choose the correct breakpoint instruction. */
8740 bkp_insn
= tdep
->thumb_breakpoint
;
8741 len
= tdep
->thumb_breakpoint_size
;
8745 bkp_insn
= tdep
->arm_breakpoint
;
8746 len
= tdep
->arm_breakpoint_size
;
8749 /* Put breakpoint afterwards. */
8750 write_memory (to
+ offset
, bkp_insn
, len
);
8752 if (debug_displaced
)
8753 fprintf_unfiltered (gdb_stdlog
, "displaced: copy %s->%s: ",
8754 paddress (gdbarch
, from
), paddress (gdbarch
, to
));
8757 /* Entry point for copying an instruction into scratch space for displaced
8760 struct displaced_step_closure
*
8761 arm_displaced_step_copy_insn (struct gdbarch
*gdbarch
,
8762 CORE_ADDR from
, CORE_ADDR to
,
8763 struct regcache
*regs
)
8765 struct displaced_step_closure
*dsc
8766 = xmalloc (sizeof (struct displaced_step_closure
));
8767 arm_process_displaced_insn (gdbarch
, from
, to
, regs
, dsc
);
8768 arm_displaced_init_closure (gdbarch
, from
, to
, dsc
);
8773 /* Entry point for cleaning things up after a displaced instruction has been
8777 arm_displaced_step_fixup (struct gdbarch
*gdbarch
,
8778 struct displaced_step_closure
*dsc
,
8779 CORE_ADDR from
, CORE_ADDR to
,
8780 struct regcache
*regs
)
8783 dsc
->cleanup (gdbarch
, regs
, dsc
);
8785 if (!dsc
->wrote_to_pc
)
8786 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
8787 dsc
->insn_addr
+ dsc
->insn_size
);
8791 #include "bfd-in2.h"
8792 #include "libcoff.h"
8795 gdb_print_insn_arm (bfd_vma memaddr
, disassemble_info
*info
)
8797 struct gdbarch
*gdbarch
= info
->application_data
;
8799 if (arm_pc_is_thumb (gdbarch
, memaddr
))
8801 static asymbol
*asym
;
8802 static combined_entry_type ce
;
8803 static struct coff_symbol_struct csym
;
8804 static struct bfd fake_bfd
;
8805 static bfd_target fake_target
;
8807 if (csym
.native
== NULL
)
8809 /* Create a fake symbol vector containing a Thumb symbol.
8810 This is solely so that the code in print_insn_little_arm()
8811 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8812 the presence of a Thumb symbol and switch to decoding
8813 Thumb instructions. */
8815 fake_target
.flavour
= bfd_target_coff_flavour
;
8816 fake_bfd
.xvec
= &fake_target
;
8817 ce
.u
.syment
.n_sclass
= C_THUMBEXTFUNC
;
8819 csym
.symbol
.the_bfd
= &fake_bfd
;
8820 csym
.symbol
.name
= "fake";
8821 asym
= (asymbol
*) & csym
;
8824 memaddr
= UNMAKE_THUMB_ADDR (memaddr
);
8825 info
->symbols
= &asym
;
8828 info
->symbols
= NULL
;
8830 if (info
->endian
== BFD_ENDIAN_BIG
)
8831 return print_insn_big_arm (memaddr
, info
);
8833 return print_insn_little_arm (memaddr
, info
);
8836 /* The following define instruction sequences that will cause ARM
8837 cpu's to take an undefined instruction trap. These are used to
8838 signal a breakpoint to GDB.
8840 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8841 modes. A different instruction is required for each mode. The ARM
8842 cpu's can also be big or little endian. Thus four different
8843 instructions are needed to support all cases.
8845 Note: ARMv4 defines several new instructions that will take the
8846 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8847 not in fact add the new instructions. The new undefined
8848 instructions in ARMv4 are all instructions that had no defined
8849 behaviour in earlier chips. There is no guarantee that they will
8850 raise an exception, but may be treated as NOP's. In practice, it
8851 may only safe to rely on instructions matching:
8853 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8854 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8855 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8857 Even this may only true if the condition predicate is true. The
8858 following use a condition predicate of ALWAYS so it is always TRUE.
8860 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8861 and NetBSD all use a software interrupt rather than an undefined
8862 instruction to force a trap. This can be handled by by the
8863 abi-specific code during establishment of the gdbarch vector. */
8865 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8866 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8867 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8868 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8870 static const gdb_byte arm_default_arm_le_breakpoint
[] = ARM_LE_BREAKPOINT
;
8871 static const gdb_byte arm_default_arm_be_breakpoint
[] = ARM_BE_BREAKPOINT
;
8872 static const gdb_byte arm_default_thumb_le_breakpoint
[] = THUMB_LE_BREAKPOINT
;
8873 static const gdb_byte arm_default_thumb_be_breakpoint
[] = THUMB_BE_BREAKPOINT
;
8875 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8876 the program counter value to determine whether a 16-bit or 32-bit
8877 breakpoint should be used. It returns a pointer to a string of
8878 bytes that encode a breakpoint instruction, stores the length of
8879 the string to *lenptr, and adjusts the program counter (if
8880 necessary) to point to the actual memory location where the
8881 breakpoint should be inserted. */
8883 static const unsigned char *
8884 arm_breakpoint_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
, int *lenptr
)
8886 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8887 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8889 if (arm_pc_is_thumb (gdbarch
, *pcptr
))
8891 *pcptr
= UNMAKE_THUMB_ADDR (*pcptr
);
8893 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8894 check whether we are replacing a 32-bit instruction. */
8895 if (tdep
->thumb2_breakpoint
!= NULL
)
8898 if (target_read_memory (*pcptr
, buf
, 2) == 0)
8900 unsigned short inst1
;
8901 inst1
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
8902 if (thumb_insn_size (inst1
) == 4)
8904 *lenptr
= tdep
->thumb2_breakpoint_size
;
8905 return tdep
->thumb2_breakpoint
;
8910 *lenptr
= tdep
->thumb_breakpoint_size
;
8911 return tdep
->thumb_breakpoint
;
8915 *lenptr
= tdep
->arm_breakpoint_size
;
8916 return tdep
->arm_breakpoint
;
8921 arm_remote_breakpoint_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
,
8924 arm_breakpoint_from_pc (gdbarch
, pcptr
, kindptr
);
8926 if (arm_pc_is_thumb (gdbarch
, *pcptr
) && *kindptr
== 4)
8927 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8928 that this is not confused with a 32-bit ARM breakpoint. */
8932 /* Extract from an array REGBUF containing the (raw) register state a
8933 function return value of type TYPE, and copy that, in virtual
8934 format, into VALBUF. */
8937 arm_extract_return_value (struct type
*type
, struct regcache
*regs
,
8940 struct gdbarch
*gdbarch
= get_regcache_arch (regs
);
8941 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8943 if (TYPE_CODE_FLT
== TYPE_CODE (type
))
8945 switch (gdbarch_tdep (gdbarch
)->fp_model
)
8949 /* The value is in register F0 in internal format. We need to
8950 extract the raw value and then convert it to the desired
8952 bfd_byte tmpbuf
[FP_REGISTER_SIZE
];
8954 regcache_cooked_read (regs
, ARM_F0_REGNUM
, tmpbuf
);
8955 convert_from_extended (floatformat_from_type (type
), tmpbuf
,
8956 valbuf
, gdbarch_byte_order (gdbarch
));
8960 case ARM_FLOAT_SOFT_FPA
:
8961 case ARM_FLOAT_SOFT_VFP
:
8962 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8963 not using the VFP ABI code. */
8965 regcache_cooked_read (regs
, ARM_A1_REGNUM
, valbuf
);
8966 if (TYPE_LENGTH (type
) > 4)
8967 regcache_cooked_read (regs
, ARM_A1_REGNUM
+ 1,
8968 valbuf
+ INT_REGISTER_SIZE
);
8972 internal_error (__FILE__
, __LINE__
,
8973 _("arm_extract_return_value: "
8974 "Floating point model not supported"));
8978 else if (TYPE_CODE (type
) == TYPE_CODE_INT
8979 || TYPE_CODE (type
) == TYPE_CODE_CHAR
8980 || TYPE_CODE (type
) == TYPE_CODE_BOOL
8981 || TYPE_CODE (type
) == TYPE_CODE_PTR
8982 || TYPE_CODE (type
) == TYPE_CODE_REF
8983 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
8985 /* If the type is a plain integer, then the access is
8986 straight-forward. Otherwise we have to play around a bit
8988 int len
= TYPE_LENGTH (type
);
8989 int regno
= ARM_A1_REGNUM
;
8994 /* By using store_unsigned_integer we avoid having to do
8995 anything special for small big-endian values. */
8996 regcache_cooked_read_unsigned (regs
, regno
++, &tmp
);
8997 store_unsigned_integer (valbuf
,
8998 (len
> INT_REGISTER_SIZE
8999 ? INT_REGISTER_SIZE
: len
),
9001 len
-= INT_REGISTER_SIZE
;
9002 valbuf
+= INT_REGISTER_SIZE
;
9007 /* For a structure or union the behaviour is as if the value had
9008 been stored to word-aligned memory and then loaded into
9009 registers with 32-bit load instruction(s). */
9010 int len
= TYPE_LENGTH (type
);
9011 int regno
= ARM_A1_REGNUM
;
9012 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
9016 regcache_cooked_read (regs
, regno
++, tmpbuf
);
9017 memcpy (valbuf
, tmpbuf
,
9018 len
> INT_REGISTER_SIZE
? INT_REGISTER_SIZE
: len
);
9019 len
-= INT_REGISTER_SIZE
;
9020 valbuf
+= INT_REGISTER_SIZE
;
9026 /* Will a function return an aggregate type in memory or in a
9027 register? Return 0 if an aggregate type can be returned in a
9028 register, 1 if it must be returned in memory. */
9031 arm_return_in_memory (struct gdbarch
*gdbarch
, struct type
*type
)
9034 enum type_code code
;
9036 CHECK_TYPEDEF (type
);
9038 /* In the ARM ABI, "integer" like aggregate types are returned in
9039 registers. For an aggregate type to be integer like, its size
9040 must be less than or equal to INT_REGISTER_SIZE and the
9041 offset of each addressable subfield must be zero. Note that bit
9042 fields are not addressable, and all addressable subfields of
9043 unions always start at offset zero.
9045 This function is based on the behaviour of GCC 2.95.1.
9046 See: gcc/arm.c: arm_return_in_memory() for details.
9048 Note: All versions of GCC before GCC 2.95.2 do not set up the
9049 parameters correctly for a function returning the following
9050 structure: struct { float f;}; This should be returned in memory,
9051 not a register. Richard Earnshaw sent me a patch, but I do not
9052 know of any way to detect if a function like the above has been
9053 compiled with the correct calling convention. */
9055 /* All aggregate types that won't fit in a register must be returned
9057 if (TYPE_LENGTH (type
) > INT_REGISTER_SIZE
)
9062 /* The AAPCS says all aggregates not larger than a word are returned
9064 if (gdbarch_tdep (gdbarch
)->arm_abi
!= ARM_ABI_APCS
)
9067 /* The only aggregate types that can be returned in a register are
9068 structs and unions. Arrays must be returned in memory. */
9069 code
= TYPE_CODE (type
);
9070 if ((TYPE_CODE_STRUCT
!= code
) && (TYPE_CODE_UNION
!= code
))
9075 /* Assume all other aggregate types can be returned in a register.
9076 Run a check for structures, unions and arrays. */
9079 if ((TYPE_CODE_STRUCT
== code
) || (TYPE_CODE_UNION
== code
))
9082 /* Need to check if this struct/union is "integer" like. For
9083 this to be true, its size must be less than or equal to
9084 INT_REGISTER_SIZE and the offset of each addressable
9085 subfield must be zero. Note that bit fields are not
9086 addressable, and unions always start at offset zero. If any
9087 of the subfields is a floating point type, the struct/union
9088 cannot be an integer type. */
9090 /* For each field in the object, check:
9091 1) Is it FP? --> yes, nRc = 1;
9092 2) Is it addressable (bitpos != 0) and
9093 not packed (bitsize == 0)?
9097 for (i
= 0; i
< TYPE_NFIELDS (type
); i
++)
9099 enum type_code field_type_code
;
9100 field_type_code
= TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type
,
9103 /* Is it a floating point type field? */
9104 if (field_type_code
== TYPE_CODE_FLT
)
9110 /* If bitpos != 0, then we have to care about it. */
9111 if (TYPE_FIELD_BITPOS (type
, i
) != 0)
9113 /* Bitfields are not addressable. If the field bitsize is
9114 zero, then the field is not packed. Hence it cannot be
9115 a bitfield or any other packed type. */
9116 if (TYPE_FIELD_BITSIZE (type
, i
) == 0)
9128 /* Write into appropriate registers a function return value of type
9129 TYPE, given in virtual format. */
9132 arm_store_return_value (struct type
*type
, struct regcache
*regs
,
9133 const gdb_byte
*valbuf
)
9135 struct gdbarch
*gdbarch
= get_regcache_arch (regs
);
9136 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
9138 if (TYPE_CODE (type
) == TYPE_CODE_FLT
)
9140 gdb_byte buf
[MAX_REGISTER_SIZE
];
9142 switch (gdbarch_tdep (gdbarch
)->fp_model
)
9146 convert_to_extended (floatformat_from_type (type
), buf
, valbuf
,
9147 gdbarch_byte_order (gdbarch
));
9148 regcache_cooked_write (regs
, ARM_F0_REGNUM
, buf
);
9151 case ARM_FLOAT_SOFT_FPA
:
9152 case ARM_FLOAT_SOFT_VFP
:
9153 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9154 not using the VFP ABI code. */
9156 regcache_cooked_write (regs
, ARM_A1_REGNUM
, valbuf
);
9157 if (TYPE_LENGTH (type
) > 4)
9158 regcache_cooked_write (regs
, ARM_A1_REGNUM
+ 1,
9159 valbuf
+ INT_REGISTER_SIZE
);
9163 internal_error (__FILE__
, __LINE__
,
9164 _("arm_store_return_value: Floating "
9165 "point model not supported"));
9169 else if (TYPE_CODE (type
) == TYPE_CODE_INT
9170 || TYPE_CODE (type
) == TYPE_CODE_CHAR
9171 || TYPE_CODE (type
) == TYPE_CODE_BOOL
9172 || TYPE_CODE (type
) == TYPE_CODE_PTR
9173 || TYPE_CODE (type
) == TYPE_CODE_REF
9174 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
9176 if (TYPE_LENGTH (type
) <= 4)
9178 /* Values of one word or less are zero/sign-extended and
9180 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
9181 LONGEST val
= unpack_long (type
, valbuf
);
9183 store_signed_integer (tmpbuf
, INT_REGISTER_SIZE
, byte_order
, val
);
9184 regcache_cooked_write (regs
, ARM_A1_REGNUM
, tmpbuf
);
9188 /* Integral values greater than one word are stored in consecutive
9189 registers starting with r0. This will always be a multiple of
9190 the regiser size. */
9191 int len
= TYPE_LENGTH (type
);
9192 int regno
= ARM_A1_REGNUM
;
9196 regcache_cooked_write (regs
, regno
++, valbuf
);
9197 len
-= INT_REGISTER_SIZE
;
9198 valbuf
+= INT_REGISTER_SIZE
;
9204 /* For a structure or union the behaviour is as if the value had
9205 been stored to word-aligned memory and then loaded into
9206 registers with 32-bit load instruction(s). */
9207 int len
= TYPE_LENGTH (type
);
9208 int regno
= ARM_A1_REGNUM
;
9209 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
9213 memcpy (tmpbuf
, valbuf
,
9214 len
> INT_REGISTER_SIZE
? INT_REGISTER_SIZE
: len
);
9215 regcache_cooked_write (regs
, regno
++, tmpbuf
);
9216 len
-= INT_REGISTER_SIZE
;
9217 valbuf
+= INT_REGISTER_SIZE
;
9223 /* Handle function return values. */
9225 static enum return_value_convention
9226 arm_return_value (struct gdbarch
*gdbarch
, struct value
*function
,
9227 struct type
*valtype
, struct regcache
*regcache
,
9228 gdb_byte
*readbuf
, const gdb_byte
*writebuf
)
9230 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
9231 struct type
*func_type
= function
? value_type (function
) : NULL
;
9232 enum arm_vfp_cprc_base_type vfp_base_type
;
9235 if (arm_vfp_abi_for_function (gdbarch
, func_type
)
9236 && arm_vfp_call_candidate (valtype
, &vfp_base_type
, &vfp_base_count
))
9238 int reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
9239 int unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
9241 for (i
= 0; i
< vfp_base_count
; i
++)
9243 if (reg_char
== 'q')
9246 arm_neon_quad_write (gdbarch
, regcache
, i
,
9247 writebuf
+ i
* unit_length
);
9250 arm_neon_quad_read (gdbarch
, regcache
, i
,
9251 readbuf
+ i
* unit_length
);
9258 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d", reg_char
, i
);
9259 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9262 regcache_cooked_write (regcache
, regnum
,
9263 writebuf
+ i
* unit_length
);
9265 regcache_cooked_read (regcache
, regnum
,
9266 readbuf
+ i
* unit_length
);
9269 return RETURN_VALUE_REGISTER_CONVENTION
;
9272 if (TYPE_CODE (valtype
) == TYPE_CODE_STRUCT
9273 || TYPE_CODE (valtype
) == TYPE_CODE_UNION
9274 || TYPE_CODE (valtype
) == TYPE_CODE_ARRAY
)
9276 if (tdep
->struct_return
== pcc_struct_return
9277 || arm_return_in_memory (gdbarch
, valtype
))
9278 return RETURN_VALUE_STRUCT_CONVENTION
;
9281 /* AAPCS returns complex types longer than a register in memory. */
9282 if (tdep
->arm_abi
!= ARM_ABI_APCS
9283 && TYPE_CODE (valtype
) == TYPE_CODE_COMPLEX
9284 && TYPE_LENGTH (valtype
) > INT_REGISTER_SIZE
)
9285 return RETURN_VALUE_STRUCT_CONVENTION
;
9288 arm_store_return_value (valtype
, regcache
, writebuf
);
9291 arm_extract_return_value (valtype
, regcache
, readbuf
);
9293 return RETURN_VALUE_REGISTER_CONVENTION
;
9298 arm_get_longjmp_target (struct frame_info
*frame
, CORE_ADDR
*pc
)
9300 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
9301 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
9302 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
9304 gdb_byte buf
[INT_REGISTER_SIZE
];
9306 jb_addr
= get_frame_register_unsigned (frame
, ARM_A1_REGNUM
);
9308 if (target_read_memory (jb_addr
+ tdep
->jb_pc
* tdep
->jb_elt_size
, buf
,
9312 *pc
= extract_unsigned_integer (buf
, INT_REGISTER_SIZE
, byte_order
);
9316 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9317 return the target PC. Otherwise return 0. */
9320 arm_skip_stub (struct frame_info
*frame
, CORE_ADDR pc
)
9324 CORE_ADDR start_addr
;
9326 /* Find the starting address and name of the function containing the PC. */
9327 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0)
9329 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9331 start_addr
= arm_skip_bx_reg (frame
, pc
);
9332 if (start_addr
!= 0)
9338 /* If PC is in a Thumb call or return stub, return the address of the
9339 target PC, which is in a register. The thunk functions are called
9340 _call_via_xx, where x is the register name. The possible names
9341 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9342 functions, named __ARM_call_via_r[0-7]. */
9343 if (strncmp (name
, "_call_via_", 10) == 0
9344 || strncmp (name
, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
9346 /* Use the name suffix to determine which register contains the
9348 static char *table
[15] =
9349 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9350 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9353 int offset
= strlen (name
) - 2;
9355 for (regno
= 0; regno
<= 14; regno
++)
9356 if (strcmp (&name
[offset
], table
[regno
]) == 0)
9357 return get_frame_register_unsigned (frame
, regno
);
9360 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9361 non-interworking calls to foo. We could decode the stubs
9362 to find the target but it's easier to use the symbol table. */
9363 namelen
= strlen (name
);
9364 if (name
[0] == '_' && name
[1] == '_'
9365 && ((namelen
> 2 + strlen ("_from_thumb")
9366 && strncmp (name
+ namelen
- strlen ("_from_thumb"), "_from_thumb",
9367 strlen ("_from_thumb")) == 0)
9368 || (namelen
> 2 + strlen ("_from_arm")
9369 && strncmp (name
+ namelen
- strlen ("_from_arm"), "_from_arm",
9370 strlen ("_from_arm")) == 0)))
9373 int target_len
= namelen
- 2;
9374 struct bound_minimal_symbol minsym
;
9375 struct objfile
*objfile
;
9376 struct obj_section
*sec
;
9378 if (name
[namelen
- 1] == 'b')
9379 target_len
-= strlen ("_from_thumb");
9381 target_len
-= strlen ("_from_arm");
9383 target_name
= alloca (target_len
+ 1);
9384 memcpy (target_name
, name
+ 2, target_len
);
9385 target_name
[target_len
] = '\0';
9387 sec
= find_pc_section (pc
);
9388 objfile
= (sec
== NULL
) ? NULL
: sec
->objfile
;
9389 minsym
= lookup_minimal_symbol (target_name
, NULL
, objfile
);
9390 if (minsym
.minsym
!= NULL
)
9391 return BMSYMBOL_VALUE_ADDRESS (minsym
);
9396 return 0; /* not a stub */
9400 set_arm_command (char *args
, int from_tty
)
9402 printf_unfiltered (_("\
9403 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9404 help_list (setarmcmdlist
, "set arm ", all_commands
, gdb_stdout
);
9408 show_arm_command (char *args
, int from_tty
)
9410 cmd_show_list (showarmcmdlist
, from_tty
, "");
9414 arm_update_current_architecture (void)
9416 struct gdbarch_info info
;
9418 /* If the current architecture is not ARM, we have nothing to do. */
9419 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch
!= bfd_arch_arm
)
9422 /* Update the architecture. */
9423 gdbarch_info_init (&info
);
9425 if (!gdbarch_update_p (info
))
9426 internal_error (__FILE__
, __LINE__
, _("could not update architecture"));
9430 set_fp_model_sfunc (char *args
, int from_tty
,
9431 struct cmd_list_element
*c
)
9433 enum arm_float_model fp_model
;
9435 for (fp_model
= ARM_FLOAT_AUTO
; fp_model
!= ARM_FLOAT_LAST
; fp_model
++)
9436 if (strcmp (current_fp_model
, fp_model_strings
[fp_model
]) == 0)
9438 arm_fp_model
= fp_model
;
9442 if (fp_model
== ARM_FLOAT_LAST
)
9443 internal_error (__FILE__
, __LINE__
, _("Invalid fp model accepted: %s."),
9446 arm_update_current_architecture ();
9450 show_fp_model (struct ui_file
*file
, int from_tty
,
9451 struct cmd_list_element
*c
, const char *value
)
9453 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
9455 if (arm_fp_model
== ARM_FLOAT_AUTO
9456 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
9457 fprintf_filtered (file
, _("\
9458 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9459 fp_model_strings
[tdep
->fp_model
]);
9461 fprintf_filtered (file
, _("\
9462 The current ARM floating point model is \"%s\".\n"),
9463 fp_model_strings
[arm_fp_model
]);
9467 arm_set_abi (char *args
, int from_tty
,
9468 struct cmd_list_element
*c
)
9470 enum arm_abi_kind arm_abi
;
9472 for (arm_abi
= ARM_ABI_AUTO
; arm_abi
!= ARM_ABI_LAST
; arm_abi
++)
9473 if (strcmp (arm_abi_string
, arm_abi_strings
[arm_abi
]) == 0)
9475 arm_abi_global
= arm_abi
;
9479 if (arm_abi
== ARM_ABI_LAST
)
9480 internal_error (__FILE__
, __LINE__
, _("Invalid ABI accepted: %s."),
9483 arm_update_current_architecture ();
9487 arm_show_abi (struct ui_file
*file
, int from_tty
,
9488 struct cmd_list_element
*c
, const char *value
)
9490 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
9492 if (arm_abi_global
== ARM_ABI_AUTO
9493 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
9494 fprintf_filtered (file
, _("\
9495 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9496 arm_abi_strings
[tdep
->arm_abi
]);
9498 fprintf_filtered (file
, _("The current ARM ABI is \"%s\".\n"),
9503 arm_show_fallback_mode (struct ui_file
*file
, int from_tty
,
9504 struct cmd_list_element
*c
, const char *value
)
9506 fprintf_filtered (file
,
9507 _("The current execution mode assumed "
9508 "(when symbols are unavailable) is \"%s\".\n"),
9509 arm_fallback_mode_string
);
9513 arm_show_force_mode (struct ui_file
*file
, int from_tty
,
9514 struct cmd_list_element
*c
, const char *value
)
9516 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
9518 fprintf_filtered (file
,
9519 _("The current execution mode assumed "
9520 "(even when symbols are available) is \"%s\".\n"),
9521 arm_force_mode_string
);
9524 /* If the user changes the register disassembly style used for info
9525 register and other commands, we have to also switch the style used
9526 in opcodes for disassembly output. This function is run in the "set
9527 arm disassembly" command, and does that. */
9530 set_disassembly_style_sfunc (char *args
, int from_tty
,
9531 struct cmd_list_element
*c
)
9533 set_disassembly_style ();
9536 /* Return the ARM register name corresponding to register I. */
9538 arm_register_name (struct gdbarch
*gdbarch
, int i
)
9540 const int num_regs
= gdbarch_num_regs (gdbarch
);
9542 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
9543 && i
>= num_regs
&& i
< num_regs
+ 32)
9545 static const char *const vfp_pseudo_names
[] = {
9546 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9547 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9548 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9549 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9552 return vfp_pseudo_names
[i
- num_regs
];
9555 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
9556 && i
>= num_regs
+ 32 && i
< num_regs
+ 32 + 16)
9558 static const char *const neon_pseudo_names
[] = {
9559 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9560 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9563 return neon_pseudo_names
[i
- num_regs
- 32];
9566 if (i
>= ARRAY_SIZE (arm_register_names
))
9567 /* These registers are only supported on targets which supply
9568 an XML description. */
9571 return arm_register_names
[i
];
9575 set_disassembly_style (void)
9579 /* Find the style that the user wants. */
9580 for (current
= 0; current
< num_disassembly_options
; current
++)
9581 if (disassembly_style
== valid_disassembly_styles
[current
])
9583 gdb_assert (current
< num_disassembly_options
);
9585 /* Synchronize the disassembler. */
9586 set_arm_regname_option (current
);
9589 /* Test whether the coff symbol specific value corresponds to a Thumb
9593 coff_sym_is_thumb (int val
)
9595 return (val
== C_THUMBEXT
9596 || val
== C_THUMBSTAT
9597 || val
== C_THUMBEXTFUNC
9598 || val
== C_THUMBSTATFUNC
9599 || val
== C_THUMBLABEL
);
9602 /* arm_coff_make_msymbol_special()
9603 arm_elf_make_msymbol_special()
9605 These functions test whether the COFF or ELF symbol corresponds to
9606 an address in thumb code, and set a "special" bit in a minimal
9607 symbol to indicate that it does. */
9610 arm_elf_make_msymbol_special(asymbol
*sym
, struct minimal_symbol
*msym
)
9612 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type
*)sym
)->internal_elf_sym
)
9613 == ST_BRANCH_TO_THUMB
)
9614 MSYMBOL_SET_SPECIAL (msym
);
9618 arm_coff_make_msymbol_special(int val
, struct minimal_symbol
*msym
)
9620 if (coff_sym_is_thumb (val
))
9621 MSYMBOL_SET_SPECIAL (msym
);
9625 arm_objfile_data_free (struct objfile
*objfile
, void *arg
)
9627 struct arm_per_objfile
*data
= arg
;
9630 for (i
= 0; i
< objfile
->obfd
->section_count
; i
++)
9631 VEC_free (arm_mapping_symbol_s
, data
->section_maps
[i
]);
9635 arm_record_special_symbol (struct gdbarch
*gdbarch
, struct objfile
*objfile
,
9638 const char *name
= bfd_asymbol_name (sym
);
9639 struct arm_per_objfile
*data
;
9640 VEC(arm_mapping_symbol_s
) **map_p
;
9641 struct arm_mapping_symbol new_map_sym
;
9643 gdb_assert (name
[0] == '$');
9644 if (name
[1] != 'a' && name
[1] != 't' && name
[1] != 'd')
9647 data
= objfile_data (objfile
, arm_objfile_data_key
);
9650 data
= OBSTACK_ZALLOC (&objfile
->objfile_obstack
,
9651 struct arm_per_objfile
);
9652 set_objfile_data (objfile
, arm_objfile_data_key
, data
);
9653 data
->section_maps
= OBSTACK_CALLOC (&objfile
->objfile_obstack
,
9654 objfile
->obfd
->section_count
,
9655 VEC(arm_mapping_symbol_s
) *);
9657 map_p
= &data
->section_maps
[bfd_get_section (sym
)->index
];
9659 new_map_sym
.value
= sym
->value
;
9660 new_map_sym
.type
= name
[1];
9662 /* Assume that most mapping symbols appear in order of increasing
9663 value. If they were randomly distributed, it would be faster to
9664 always push here and then sort at first use. */
9665 if (!VEC_empty (arm_mapping_symbol_s
, *map_p
))
9667 struct arm_mapping_symbol
*prev_map_sym
;
9669 prev_map_sym
= VEC_last (arm_mapping_symbol_s
, *map_p
);
9670 if (prev_map_sym
->value
>= sym
->value
)
9673 idx
= VEC_lower_bound (arm_mapping_symbol_s
, *map_p
, &new_map_sym
,
9674 arm_compare_mapping_symbols
);
9675 VEC_safe_insert (arm_mapping_symbol_s
, *map_p
, idx
, &new_map_sym
);
9680 VEC_safe_push (arm_mapping_symbol_s
, *map_p
, &new_map_sym
);
9684 arm_write_pc (struct regcache
*regcache
, CORE_ADDR pc
)
9686 struct gdbarch
*gdbarch
= get_regcache_arch (regcache
);
9687 regcache_cooked_write_unsigned (regcache
, ARM_PC_REGNUM
, pc
);
9689 /* If necessary, set the T bit. */
9692 ULONGEST val
, t_bit
;
9693 regcache_cooked_read_unsigned (regcache
, ARM_PS_REGNUM
, &val
);
9694 t_bit
= arm_psr_thumb_bit (gdbarch
);
9695 if (arm_pc_is_thumb (gdbarch
, pc
))
9696 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
9699 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
9704 /* Read the contents of a NEON quad register, by reading from two
9705 double registers. This is used to implement the quad pseudo
9706 registers, and for argument passing in case the quad registers are
9707 missing; vectors are passed in quad registers when using the VFP
9708 ABI, even if a NEON unit is not present. REGNUM is the index of
9709 the quad register, in [0, 15]. */
9711 static enum register_status
9712 arm_neon_quad_read (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9713 int regnum
, gdb_byte
*buf
)
9716 gdb_byte reg_buf
[8];
9717 int offset
, double_regnum
;
9718 enum register_status status
;
9720 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
9721 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9724 /* d0 is always the least significant half of q0. */
9725 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9730 status
= regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9731 if (status
!= REG_VALID
)
9733 memcpy (buf
+ offset
, reg_buf
, 8);
9735 offset
= 8 - offset
;
9736 status
= regcache_raw_read (regcache
, double_regnum
+ 1, reg_buf
);
9737 if (status
!= REG_VALID
)
9739 memcpy (buf
+ offset
, reg_buf
, 8);
9744 static enum register_status
9745 arm_pseudo_read (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9746 int regnum
, gdb_byte
*buf
)
9748 const int num_regs
= gdbarch_num_regs (gdbarch
);
9750 gdb_byte reg_buf
[8];
9751 int offset
, double_regnum
;
9753 gdb_assert (regnum
>= num_regs
);
9756 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
9757 /* Quad-precision register. */
9758 return arm_neon_quad_read (gdbarch
, regcache
, regnum
- 32, buf
);
9761 enum register_status status
;
9763 /* Single-precision register. */
9764 gdb_assert (regnum
< 32);
9766 /* s0 is always the least significant half of d0. */
9767 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9768 offset
= (regnum
& 1) ? 0 : 4;
9770 offset
= (regnum
& 1) ? 4 : 0;
9772 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
9773 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9776 status
= regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9777 if (status
== REG_VALID
)
9778 memcpy (buf
, reg_buf
+ offset
, 4);
9783 /* Store the contents of BUF to a NEON quad register, by writing to
9784 two double registers. This is used to implement the quad pseudo
9785 registers, and for argument passing in case the quad registers are
9786 missing; vectors are passed in quad registers when using the VFP
9787 ABI, even if a NEON unit is not present. REGNUM is the index
9788 of the quad register, in [0, 15]. */
9791 arm_neon_quad_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9792 int regnum
, const gdb_byte
*buf
)
9795 int offset
, double_regnum
;
9797 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
9798 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9801 /* d0 is always the least significant half of q0. */
9802 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9807 regcache_raw_write (regcache
, double_regnum
, buf
+ offset
);
9808 offset
= 8 - offset
;
9809 regcache_raw_write (regcache
, double_regnum
+ 1, buf
+ offset
);
9813 arm_pseudo_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9814 int regnum
, const gdb_byte
*buf
)
9816 const int num_regs
= gdbarch_num_regs (gdbarch
);
9818 gdb_byte reg_buf
[8];
9819 int offset
, double_regnum
;
9821 gdb_assert (regnum
>= num_regs
);
9824 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
9825 /* Quad-precision register. */
9826 arm_neon_quad_write (gdbarch
, regcache
, regnum
- 32, buf
);
9829 /* Single-precision register. */
9830 gdb_assert (regnum
< 32);
9832 /* s0 is always the least significant half of d0. */
9833 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9834 offset
= (regnum
& 1) ? 0 : 4;
9836 offset
= (regnum
& 1) ? 4 : 0;
9838 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
9839 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9842 regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9843 memcpy (reg_buf
+ offset
, buf
, 4);
9844 regcache_raw_write (regcache
, double_regnum
, reg_buf
);
9848 static struct value
*
9849 value_of_arm_user_reg (struct frame_info
*frame
, const void *baton
)
9851 const int *reg_p
= baton
;
9852 return value_of_register (*reg_p
, frame
);
9855 static enum gdb_osabi
9856 arm_elf_osabi_sniffer (bfd
*abfd
)
9858 unsigned int elfosabi
;
9859 enum gdb_osabi osabi
= GDB_OSABI_UNKNOWN
;
9861 elfosabi
= elf_elfheader (abfd
)->e_ident
[EI_OSABI
];
9863 if (elfosabi
== ELFOSABI_ARM
)
9864 /* GNU tools use this value. Check note sections in this case,
9866 bfd_map_over_sections (abfd
,
9867 generic_elf_osabi_sniff_abi_tag_sections
,
9870 /* Anything else will be handled by the generic ELF sniffer. */
9875 arm_register_reggroup_p (struct gdbarch
*gdbarch
, int regnum
,
9876 struct reggroup
*group
)
9878 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9879 this, FPS register belongs to save_regroup, restore_reggroup, and
9880 all_reggroup, of course. */
9881 if (regnum
== ARM_FPS_REGNUM
)
9882 return (group
== float_reggroup
9883 || group
== save_reggroup
9884 || group
== restore_reggroup
9885 || group
== all_reggroup
);
9887 return default_register_reggroup_p (gdbarch
, regnum
, group
);
9891 /* For backward-compatibility we allow two 'g' packet lengths with
9892 the remote protocol depending on whether FPA registers are
9893 supplied. M-profile targets do not have FPA registers, but some
9894 stubs already exist in the wild which use a 'g' packet which
9895 supplies them albeit with dummy values. The packet format which
9896 includes FPA registers should be considered deprecated for
9897 M-profile targets. */
9900 arm_register_g_packet_guesses (struct gdbarch
*gdbarch
)
9902 if (gdbarch_tdep (gdbarch
)->is_m
)
9904 /* If we know from the executable this is an M-profile target,
9905 cater for remote targets whose register set layout is the
9906 same as the FPA layout. */
9907 register_remote_g_packet_guess (gdbarch
,
9908 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9909 (16 * INT_REGISTER_SIZE
)
9910 + (8 * FP_REGISTER_SIZE
)
9911 + (2 * INT_REGISTER_SIZE
),
9912 tdesc_arm_with_m_fpa_layout
);
9914 /* The regular M-profile layout. */
9915 register_remote_g_packet_guess (gdbarch
,
9916 /* r0-r12,sp,lr,pc; xpsr */
9917 (16 * INT_REGISTER_SIZE
)
9918 + INT_REGISTER_SIZE
,
9921 /* M-profile plus M4F VFP. */
9922 register_remote_g_packet_guess (gdbarch
,
9923 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9924 (16 * INT_REGISTER_SIZE
)
9925 + (16 * VFP_REGISTER_SIZE
)
9926 + (2 * INT_REGISTER_SIZE
),
9927 tdesc_arm_with_m_vfp_d16
);
9930 /* Otherwise we don't have a useful guess. */
9934 /* Initialize the current architecture based on INFO. If possible,
9935 re-use an architecture from ARCHES, which is a list of
9936 architectures already created during this debugging session.
9938 Called e.g. at program startup, when reading a core file, and when
9939 reading a binary file. */
9941 static struct gdbarch
*
9942 arm_gdbarch_init (struct gdbarch_info info
, struct gdbarch_list
*arches
)
9944 struct gdbarch_tdep
*tdep
;
9945 struct gdbarch
*gdbarch
;
9946 struct gdbarch_list
*best_arch
;
9947 enum arm_abi_kind arm_abi
= arm_abi_global
;
9948 enum arm_float_model fp_model
= arm_fp_model
;
9949 struct tdesc_arch_data
*tdesc_data
= NULL
;
9951 int have_vfp_registers
= 0, have_vfp_pseudos
= 0, have_neon_pseudos
= 0;
9953 int have_fpa_registers
= 1;
9954 const struct target_desc
*tdesc
= info
.target_desc
;
9956 /* If we have an object to base this architecture on, try to determine
9959 if (arm_abi
== ARM_ABI_AUTO
&& info
.abfd
!= NULL
)
9961 int ei_osabi
, e_flags
;
9963 switch (bfd_get_flavour (info
.abfd
))
9965 case bfd_target_aout_flavour
:
9966 /* Assume it's an old APCS-style ABI. */
9967 arm_abi
= ARM_ABI_APCS
;
9970 case bfd_target_coff_flavour
:
9971 /* Assume it's an old APCS-style ABI. */
9973 arm_abi
= ARM_ABI_APCS
;
9976 case bfd_target_elf_flavour
:
9977 ei_osabi
= elf_elfheader (info
.abfd
)->e_ident
[EI_OSABI
];
9978 e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
9980 if (ei_osabi
== ELFOSABI_ARM
)
9982 /* GNU tools used to use this value, but do not for EABI
9983 objects. There's nowhere to tag an EABI version
9984 anyway, so assume APCS. */
9985 arm_abi
= ARM_ABI_APCS
;
9987 else if (ei_osabi
== ELFOSABI_NONE
)
9989 int eabi_ver
= EF_ARM_EABI_VERSION (e_flags
);
9990 int attr_arch
, attr_profile
;
9994 case EF_ARM_EABI_UNKNOWN
:
9995 /* Assume GNU tools. */
9996 arm_abi
= ARM_ABI_APCS
;
9999 case EF_ARM_EABI_VER4
:
10000 case EF_ARM_EABI_VER5
:
10001 arm_abi
= ARM_ABI_AAPCS
;
10002 /* EABI binaries default to VFP float ordering.
10003 They may also contain build attributes that can
10004 be used to identify if the VFP argument-passing
10006 if (fp_model
== ARM_FLOAT_AUTO
)
10009 switch (bfd_elf_get_obj_attr_int (info
.abfd
,
10014 /* "The user intended FP parameter/result
10015 passing to conform to AAPCS, base
10017 fp_model
= ARM_FLOAT_SOFT_VFP
;
10020 /* "The user intended FP parameter/result
10021 passing to conform to AAPCS, VFP
10023 fp_model
= ARM_FLOAT_VFP
;
10026 /* "The user intended FP parameter/result
10027 passing to conform to tool chain-specific
10028 conventions" - we don't know any such
10029 conventions, so leave it as "auto". */
10032 /* Attribute value not mentioned in the
10033 October 2008 ABI, so leave it as
10038 fp_model
= ARM_FLOAT_SOFT_VFP
;
10044 /* Leave it as "auto". */
10045 warning (_("unknown ARM EABI version 0x%x"), eabi_ver
);
10050 /* Detect M-profile programs. This only works if the
10051 executable file includes build attributes; GCC does
10052 copy them to the executable, but e.g. RealView does
10054 attr_arch
= bfd_elf_get_obj_attr_int (info
.abfd
, OBJ_ATTR_PROC
,
10056 attr_profile
= bfd_elf_get_obj_attr_int (info
.abfd
,
10058 Tag_CPU_arch_profile
);
10059 /* GCC specifies the profile for v6-M; RealView only
10060 specifies the profile for architectures starting with
10061 V7 (as opposed to architectures with a tag
10062 numerically greater than TAG_CPU_ARCH_V7). */
10063 if (!tdesc_has_registers (tdesc
)
10064 && (attr_arch
== TAG_CPU_ARCH_V6_M
10065 || attr_arch
== TAG_CPU_ARCH_V6S_M
10066 || attr_profile
== 'M'))
10071 if (fp_model
== ARM_FLOAT_AUTO
)
10073 int e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
10075 switch (e_flags
& (EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
))
10078 /* Leave it as "auto". Strictly speaking this case
10079 means FPA, but almost nobody uses that now, and
10080 many toolchains fail to set the appropriate bits
10081 for the floating-point model they use. */
10083 case EF_ARM_SOFT_FLOAT
:
10084 fp_model
= ARM_FLOAT_SOFT_FPA
;
10086 case EF_ARM_VFP_FLOAT
:
10087 fp_model
= ARM_FLOAT_VFP
;
10089 case EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
:
10090 fp_model
= ARM_FLOAT_SOFT_VFP
;
10095 if (e_flags
& EF_ARM_BE8
)
10096 info
.byte_order_for_code
= BFD_ENDIAN_LITTLE
;
10101 /* Leave it as "auto". */
10106 /* Check any target description for validity. */
10107 if (tdesc_has_registers (tdesc
))
10109 /* For most registers we require GDB's default names; but also allow
10110 the numeric names for sp / lr / pc, as a convenience. */
10111 static const char *const arm_sp_names
[] = { "r13", "sp", NULL
};
10112 static const char *const arm_lr_names
[] = { "r14", "lr", NULL
};
10113 static const char *const arm_pc_names
[] = { "r15", "pc", NULL
};
10115 const struct tdesc_feature
*feature
;
10118 feature
= tdesc_find_feature (tdesc
,
10119 "org.gnu.gdb.arm.core");
10120 if (feature
== NULL
)
10122 feature
= tdesc_find_feature (tdesc
,
10123 "org.gnu.gdb.arm.m-profile");
10124 if (feature
== NULL
)
10130 tdesc_data
= tdesc_data_alloc ();
10133 for (i
= 0; i
< ARM_SP_REGNUM
; i
++)
10134 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
10135 arm_register_names
[i
]);
10136 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
10139 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
10142 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
10146 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10147 ARM_PS_REGNUM
, "xpsr");
10149 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10150 ARM_PS_REGNUM
, "cpsr");
10154 tdesc_data_cleanup (tdesc_data
);
10158 feature
= tdesc_find_feature (tdesc
,
10159 "org.gnu.gdb.arm.fpa");
10160 if (feature
!= NULL
)
10163 for (i
= ARM_F0_REGNUM
; i
<= ARM_FPS_REGNUM
; i
++)
10164 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
10165 arm_register_names
[i
]);
10168 tdesc_data_cleanup (tdesc_data
);
10173 have_fpa_registers
= 0;
10175 feature
= tdesc_find_feature (tdesc
,
10176 "org.gnu.gdb.xscale.iwmmxt");
10177 if (feature
!= NULL
)
10179 static const char *const iwmmxt_names
[] = {
10180 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10181 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10182 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10183 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10187 for (i
= ARM_WR0_REGNUM
; i
<= ARM_WR15_REGNUM
; i
++)
10189 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
10190 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
10192 /* Check for the control registers, but do not fail if they
10194 for (i
= ARM_WC0_REGNUM
; i
<= ARM_WCASF_REGNUM
; i
++)
10195 tdesc_numbered_register (feature
, tdesc_data
, i
,
10196 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
10198 for (i
= ARM_WCGR0_REGNUM
; i
<= ARM_WCGR3_REGNUM
; i
++)
10200 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
10201 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
10205 tdesc_data_cleanup (tdesc_data
);
10210 /* If we have a VFP unit, check whether the single precision registers
10211 are present. If not, then we will synthesize them as pseudo
10213 feature
= tdesc_find_feature (tdesc
,
10214 "org.gnu.gdb.arm.vfp");
10215 if (feature
!= NULL
)
10217 static const char *const vfp_double_names
[] = {
10218 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10219 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10220 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10221 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10224 /* Require the double precision registers. There must be either
10227 for (i
= 0; i
< 32; i
++)
10229 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10231 vfp_double_names
[i
]);
10235 if (!valid_p
&& i
== 16)
10238 /* Also require FPSCR. */
10239 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10240 ARM_FPSCR_REGNUM
, "fpscr");
10243 tdesc_data_cleanup (tdesc_data
);
10247 if (tdesc_unnumbered_register (feature
, "s0") == 0)
10248 have_vfp_pseudos
= 1;
10250 have_vfp_registers
= 1;
10252 /* If we have VFP, also check for NEON. The architecture allows
10253 NEON without VFP (integer vector operations only), but GDB
10254 does not support that. */
10255 feature
= tdesc_find_feature (tdesc
,
10256 "org.gnu.gdb.arm.neon");
10257 if (feature
!= NULL
)
10259 /* NEON requires 32 double-precision registers. */
10262 tdesc_data_cleanup (tdesc_data
);
10266 /* If there are quad registers defined by the stub, use
10267 their type; otherwise (normally) provide them with
10268 the default type. */
10269 if (tdesc_unnumbered_register (feature
, "q0") == 0)
10270 have_neon_pseudos
= 1;
10277 /* If there is already a candidate, use it. */
10278 for (best_arch
= gdbarch_list_lookup_by_info (arches
, &info
);
10280 best_arch
= gdbarch_list_lookup_by_info (best_arch
->next
, &info
))
10282 if (arm_abi
!= ARM_ABI_AUTO
10283 && arm_abi
!= gdbarch_tdep (best_arch
->gdbarch
)->arm_abi
)
10286 if (fp_model
!= ARM_FLOAT_AUTO
10287 && fp_model
!= gdbarch_tdep (best_arch
->gdbarch
)->fp_model
)
10290 /* There are various other properties in tdep that we do not
10291 need to check here: those derived from a target description,
10292 since gdbarches with a different target description are
10293 automatically disqualified. */
10295 /* Do check is_m, though, since it might come from the binary. */
10296 if (is_m
!= gdbarch_tdep (best_arch
->gdbarch
)->is_m
)
10299 /* Found a match. */
10303 if (best_arch
!= NULL
)
10305 if (tdesc_data
!= NULL
)
10306 tdesc_data_cleanup (tdesc_data
);
10307 return best_arch
->gdbarch
;
10310 tdep
= xcalloc (1, sizeof (struct gdbarch_tdep
));
10311 gdbarch
= gdbarch_alloc (&info
, tdep
);
10313 /* Record additional information about the architecture we are defining.
10314 These are gdbarch discriminators, like the OSABI. */
10315 tdep
->arm_abi
= arm_abi
;
10316 tdep
->fp_model
= fp_model
;
10318 tdep
->have_fpa_registers
= have_fpa_registers
;
10319 tdep
->have_vfp_registers
= have_vfp_registers
;
10320 tdep
->have_vfp_pseudos
= have_vfp_pseudos
;
10321 tdep
->have_neon_pseudos
= have_neon_pseudos
;
10322 tdep
->have_neon
= have_neon
;
10324 arm_register_g_packet_guesses (gdbarch
);
10327 switch (info
.byte_order_for_code
)
10329 case BFD_ENDIAN_BIG
:
10330 tdep
->arm_breakpoint
= arm_default_arm_be_breakpoint
;
10331 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_be_breakpoint
);
10332 tdep
->thumb_breakpoint
= arm_default_thumb_be_breakpoint
;
10333 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_be_breakpoint
);
10337 case BFD_ENDIAN_LITTLE
:
10338 tdep
->arm_breakpoint
= arm_default_arm_le_breakpoint
;
10339 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_le_breakpoint
);
10340 tdep
->thumb_breakpoint
= arm_default_thumb_le_breakpoint
;
10341 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_le_breakpoint
);
10346 internal_error (__FILE__
, __LINE__
,
10347 _("arm_gdbarch_init: bad byte order for float format"));
10350 /* On ARM targets char defaults to unsigned. */
10351 set_gdbarch_char_signed (gdbarch
, 0);
10353 /* Note: for displaced stepping, this includes the breakpoint, and one word
10354 of additional scratch space. This setting isn't used for anything beside
10355 displaced stepping at present. */
10356 set_gdbarch_max_insn_length (gdbarch
, 4 * DISPLACED_MODIFIED_INSNS
);
10358 /* This should be low enough for everything. */
10359 tdep
->lowest_pc
= 0x20;
10360 tdep
->jb_pc
= -1; /* Longjump support not enabled by default. */
10362 /* The default, for both APCS and AAPCS, is to return small
10363 structures in registers. */
10364 tdep
->struct_return
= reg_struct_return
;
10366 set_gdbarch_push_dummy_call (gdbarch
, arm_push_dummy_call
);
10367 set_gdbarch_frame_align (gdbarch
, arm_frame_align
);
10369 set_gdbarch_write_pc (gdbarch
, arm_write_pc
);
10371 /* Frame handling. */
10372 set_gdbarch_dummy_id (gdbarch
, arm_dummy_id
);
10373 set_gdbarch_unwind_pc (gdbarch
, arm_unwind_pc
);
10374 set_gdbarch_unwind_sp (gdbarch
, arm_unwind_sp
);
10376 frame_base_set_default (gdbarch
, &arm_normal_base
);
10378 /* Address manipulation. */
10379 set_gdbarch_addr_bits_remove (gdbarch
, arm_addr_bits_remove
);
10381 /* Advance PC across function entry code. */
10382 set_gdbarch_skip_prologue (gdbarch
, arm_skip_prologue
);
10384 /* Detect whether PC is in function epilogue. */
10385 set_gdbarch_in_function_epilogue_p (gdbarch
, arm_in_function_epilogue_p
);
10387 /* Skip trampolines. */
10388 set_gdbarch_skip_trampoline_code (gdbarch
, arm_skip_stub
);
10390 /* The stack grows downward. */
10391 set_gdbarch_inner_than (gdbarch
, core_addr_lessthan
);
10393 /* Breakpoint manipulation. */
10394 set_gdbarch_breakpoint_from_pc (gdbarch
, arm_breakpoint_from_pc
);
10395 set_gdbarch_remote_breakpoint_from_pc (gdbarch
,
10396 arm_remote_breakpoint_from_pc
);
10398 /* Information about registers, etc. */
10399 set_gdbarch_sp_regnum (gdbarch
, ARM_SP_REGNUM
);
10400 set_gdbarch_pc_regnum (gdbarch
, ARM_PC_REGNUM
);
10401 set_gdbarch_num_regs (gdbarch
, ARM_NUM_REGS
);
10402 set_gdbarch_register_type (gdbarch
, arm_register_type
);
10403 set_gdbarch_register_reggroup_p (gdbarch
, arm_register_reggroup_p
);
10405 /* This "info float" is FPA-specific. Use the generic version if we
10406 do not have FPA. */
10407 if (gdbarch_tdep (gdbarch
)->have_fpa_registers
)
10408 set_gdbarch_print_float_info (gdbarch
, arm_print_float_info
);
10410 /* Internal <-> external register number maps. */
10411 set_gdbarch_dwarf2_reg_to_regnum (gdbarch
, arm_dwarf_reg_to_regnum
);
10412 set_gdbarch_register_sim_regno (gdbarch
, arm_register_sim_regno
);
10414 set_gdbarch_register_name (gdbarch
, arm_register_name
);
10416 /* Returning results. */
10417 set_gdbarch_return_value (gdbarch
, arm_return_value
);
10420 set_gdbarch_print_insn (gdbarch
, gdb_print_insn_arm
);
10422 /* Minsymbol frobbing. */
10423 set_gdbarch_elf_make_msymbol_special (gdbarch
, arm_elf_make_msymbol_special
);
10424 set_gdbarch_coff_make_msymbol_special (gdbarch
,
10425 arm_coff_make_msymbol_special
);
10426 set_gdbarch_record_special_symbol (gdbarch
, arm_record_special_symbol
);
10428 /* Thumb-2 IT block support. */
10429 set_gdbarch_adjust_breakpoint_address (gdbarch
,
10430 arm_adjust_breakpoint_address
);
10432 /* Virtual tables. */
10433 set_gdbarch_vbit_in_delta (gdbarch
, 1);
10435 /* Hook in the ABI-specific overrides, if they have been registered. */
10436 gdbarch_init_osabi (info
, gdbarch
);
10438 dwarf2_frame_set_init_reg (gdbarch
, arm_dwarf2_frame_init_reg
);
10440 /* Add some default predicates. */
10442 frame_unwind_append_unwinder (gdbarch
, &arm_m_exception_unwind
);
10443 frame_unwind_append_unwinder (gdbarch
, &arm_stub_unwind
);
10444 dwarf2_append_unwinders (gdbarch
);
10445 frame_unwind_append_unwinder (gdbarch
, &arm_exidx_unwind
);
10446 frame_unwind_append_unwinder (gdbarch
, &arm_prologue_unwind
);
10448 /* Now we have tuned the configuration, set a few final things,
10449 based on what the OS ABI has told us. */
10451 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10452 binaries are always marked. */
10453 if (tdep
->arm_abi
== ARM_ABI_AUTO
)
10454 tdep
->arm_abi
= ARM_ABI_APCS
;
10456 /* Watchpoints are not steppable. */
10457 set_gdbarch_have_nonsteppable_watchpoint (gdbarch
, 1);
10459 /* We used to default to FPA for generic ARM, but almost nobody
10460 uses that now, and we now provide a way for the user to force
10461 the model. So default to the most useful variant. */
10462 if (tdep
->fp_model
== ARM_FLOAT_AUTO
)
10463 tdep
->fp_model
= ARM_FLOAT_SOFT_FPA
;
10465 if (tdep
->jb_pc
>= 0)
10466 set_gdbarch_get_longjmp_target (gdbarch
, arm_get_longjmp_target
);
10468 /* Floating point sizes and format. */
10469 set_gdbarch_float_format (gdbarch
, floatformats_ieee_single
);
10470 if (tdep
->fp_model
== ARM_FLOAT_SOFT_FPA
|| tdep
->fp_model
== ARM_FLOAT_FPA
)
10472 set_gdbarch_double_format
10473 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
10474 set_gdbarch_long_double_format
10475 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
10479 set_gdbarch_double_format (gdbarch
, floatformats_ieee_double
);
10480 set_gdbarch_long_double_format (gdbarch
, floatformats_ieee_double
);
10483 if (have_vfp_pseudos
)
10485 /* NOTE: These are the only pseudo registers used by
10486 the ARM target at the moment. If more are added, a
10487 little more care in numbering will be needed. */
10489 int num_pseudos
= 32;
10490 if (have_neon_pseudos
)
10492 set_gdbarch_num_pseudo_regs (gdbarch
, num_pseudos
);
10493 set_gdbarch_pseudo_register_read (gdbarch
, arm_pseudo_read
);
10494 set_gdbarch_pseudo_register_write (gdbarch
, arm_pseudo_write
);
10499 set_tdesc_pseudo_register_name (gdbarch
, arm_register_name
);
10501 tdesc_use_registers (gdbarch
, tdesc
, tdesc_data
);
10503 /* Override tdesc_register_type to adjust the types of VFP
10504 registers for NEON. */
10505 set_gdbarch_register_type (gdbarch
, arm_register_type
);
10508 /* Add standard register aliases. We add aliases even for those
10509 nanes which are used by the current architecture - it's simpler,
10510 and does no harm, since nothing ever lists user registers. */
10511 for (i
= 0; i
< ARRAY_SIZE (arm_register_aliases
); i
++)
10512 user_reg_add (gdbarch
, arm_register_aliases
[i
].name
,
10513 value_of_arm_user_reg
, &arm_register_aliases
[i
].regnum
);
10519 arm_dump_tdep (struct gdbarch
*gdbarch
, struct ui_file
*file
)
10521 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
10526 fprintf_unfiltered (file
, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10527 (unsigned long) tdep
->lowest_pc
);
10530 extern initialize_file_ftype _initialize_arm_tdep
; /* -Wmissing-prototypes */
10533 _initialize_arm_tdep (void)
10535 struct ui_file
*stb
;
10537 struct cmd_list_element
*new_set
, *new_show
;
10538 const char *setname
;
10539 const char *setdesc
;
10540 const char *const *regnames
;
10542 static char *helptext
;
10543 char regdesc
[1024], *rdptr
= regdesc
;
10544 size_t rest
= sizeof (regdesc
);
10546 gdbarch_register (bfd_arch_arm
, arm_gdbarch_init
, arm_dump_tdep
);
10548 arm_objfile_data_key
10549 = register_objfile_data_with_cleanup (NULL
, arm_objfile_data_free
);
10551 /* Add ourselves to objfile event chain. */
10552 observer_attach_new_objfile (arm_exidx_new_objfile
);
10554 = register_objfile_data_with_cleanup (NULL
, arm_exidx_data_free
);
10556 /* Register an ELF OS ABI sniffer for ARM binaries. */
10557 gdbarch_register_osabi_sniffer (bfd_arch_arm
,
10558 bfd_target_elf_flavour
,
10559 arm_elf_osabi_sniffer
);
10561 /* Initialize the standard target descriptions. */
10562 initialize_tdesc_arm_with_m ();
10563 initialize_tdesc_arm_with_m_fpa_layout ();
10564 initialize_tdesc_arm_with_m_vfp_d16 ();
10565 initialize_tdesc_arm_with_iwmmxt ();
10566 initialize_tdesc_arm_with_vfpv2 ();
10567 initialize_tdesc_arm_with_vfpv3 ();
10568 initialize_tdesc_arm_with_neon ();
10570 /* Get the number of possible sets of register names defined in opcodes. */
10571 num_disassembly_options
= get_arm_regname_num_options ();
10573 /* Add root prefix command for all "set arm"/"show arm" commands. */
10574 add_prefix_cmd ("arm", no_class
, set_arm_command
,
10575 _("Various ARM-specific commands."),
10576 &setarmcmdlist
, "set arm ", 0, &setlist
);
10578 add_prefix_cmd ("arm", no_class
, show_arm_command
,
10579 _("Various ARM-specific commands."),
10580 &showarmcmdlist
, "show arm ", 0, &showlist
);
10582 /* Sync the opcode insn printer with our register viewer. */
10583 parse_arm_disassembler_option ("reg-names-std");
10585 /* Initialize the array that will be passed to
10586 add_setshow_enum_cmd(). */
10587 valid_disassembly_styles
10588 = xmalloc ((num_disassembly_options
+ 1) * sizeof (char *));
10589 for (i
= 0; i
< num_disassembly_options
; i
++)
10591 numregs
= get_arm_regnames (i
, &setname
, &setdesc
, ®names
);
10592 valid_disassembly_styles
[i
] = setname
;
10593 length
= snprintf (rdptr
, rest
, "%s - %s\n", setname
, setdesc
);
10596 /* When we find the default names, tell the disassembler to use
10598 if (!strcmp (setname
, "std"))
10600 disassembly_style
= setname
;
10601 set_arm_regname_option (i
);
10604 /* Mark the end of valid options. */
10605 valid_disassembly_styles
[num_disassembly_options
] = NULL
;
10607 /* Create the help text. */
10608 stb
= mem_fileopen ();
10609 fprintf_unfiltered (stb
, "%s%s%s",
10610 _("The valid values are:\n"),
10612 _("The default is \"std\"."));
10613 helptext
= ui_file_xstrdup (stb
, NULL
);
10614 ui_file_delete (stb
);
10616 add_setshow_enum_cmd("disassembler", no_class
,
10617 valid_disassembly_styles
, &disassembly_style
,
10618 _("Set the disassembly style."),
10619 _("Show the disassembly style."),
10621 set_disassembly_style_sfunc
,
10622 NULL
, /* FIXME: i18n: The disassembly style is
10624 &setarmcmdlist
, &showarmcmdlist
);
10626 add_setshow_boolean_cmd ("apcs32", no_class
, &arm_apcs_32
,
10627 _("Set usage of ARM 32-bit mode."),
10628 _("Show usage of ARM 32-bit mode."),
10629 _("When off, a 26-bit PC will be used."),
10631 NULL
, /* FIXME: i18n: Usage of ARM 32-bit
10633 &setarmcmdlist
, &showarmcmdlist
);
10635 /* Add a command to allow the user to force the FPU model. */
10636 add_setshow_enum_cmd ("fpu", no_class
, fp_model_strings
, ¤t_fp_model
,
10637 _("Set the floating point type."),
10638 _("Show the floating point type."),
10639 _("auto - Determine the FP typefrom the OS-ABI.\n\
10640 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10641 fpa - FPA co-processor (GCC compiled).\n\
10642 softvfp - Software FP with pure-endian doubles.\n\
10643 vfp - VFP co-processor."),
10644 set_fp_model_sfunc
, show_fp_model
,
10645 &setarmcmdlist
, &showarmcmdlist
);
10647 /* Add a command to allow the user to force the ABI. */
10648 add_setshow_enum_cmd ("abi", class_support
, arm_abi_strings
, &arm_abi_string
,
10650 _("Show the ABI."),
10651 NULL
, arm_set_abi
, arm_show_abi
,
10652 &setarmcmdlist
, &showarmcmdlist
);
10654 /* Add two commands to allow the user to force the assumed
10656 add_setshow_enum_cmd ("fallback-mode", class_support
,
10657 arm_mode_strings
, &arm_fallback_mode_string
,
10658 _("Set the mode assumed when symbols are unavailable."),
10659 _("Show the mode assumed when symbols are unavailable."),
10660 NULL
, NULL
, arm_show_fallback_mode
,
10661 &setarmcmdlist
, &showarmcmdlist
);
10662 add_setshow_enum_cmd ("force-mode", class_support
,
10663 arm_mode_strings
, &arm_force_mode_string
,
10664 _("Set the mode assumed even when symbols are available."),
10665 _("Show the mode assumed even when symbols are available."),
10666 NULL
, NULL
, arm_show_force_mode
,
10667 &setarmcmdlist
, &showarmcmdlist
);
10669 /* Debugging flag. */
10670 add_setshow_boolean_cmd ("arm", class_maintenance
, &arm_debug
,
10671 _("Set ARM debugging."),
10672 _("Show ARM debugging."),
10673 _("When on, arm-specific debugging is enabled."),
10675 NULL
, /* FIXME: i18n: "ARM debugging is %s. */
10676 &setdebuglist
, &showdebuglist
);
10679 /* ARM-reversible process record data structures. */
10681 #define ARM_INSN_SIZE_BYTES 4
10682 #define THUMB_INSN_SIZE_BYTES 2
10683 #define THUMB2_INSN_SIZE_BYTES 4
10686 /* Position of the bit within a 32-bit ARM instruction
10687 that defines whether the instruction is a load or store. */
10688 #define INSN_S_L_BIT_NUM 20
10690 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10693 unsigned int reg_len = LENGTH; \
10696 REGS = XNEWVEC (uint32_t, reg_len); \
10697 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10702 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10705 unsigned int mem_len = LENGTH; \
10708 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10709 memcpy(&MEMS->len, &RECORD_BUF[0], \
10710 sizeof(struct arm_mem_r) * LENGTH); \
10715 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10716 #define INSN_RECORDED(ARM_RECORD) \
10717 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10719 /* ARM memory record structure. */
10722 uint32_t len
; /* Record length. */
10723 uint32_t addr
; /* Memory address. */
10726 /* ARM instruction record contains opcode of current insn
10727 and execution state (before entry to decode_insn()),
10728 contains list of to-be-modified registers and
10729 memory blocks (on return from decode_insn()). */
10731 typedef struct insn_decode_record_t
10733 struct gdbarch
*gdbarch
;
10734 struct regcache
*regcache
;
10735 CORE_ADDR this_addr
; /* Address of the insn being decoded. */
10736 uint32_t arm_insn
; /* Should accommodate thumb. */
10737 uint32_t cond
; /* Condition code. */
10738 uint32_t opcode
; /* Insn opcode. */
10739 uint32_t decode
; /* Insn decode bits. */
10740 uint32_t mem_rec_count
; /* No of mem records. */
10741 uint32_t reg_rec_count
; /* No of reg records. */
10742 uint32_t *arm_regs
; /* Registers to be saved for this record. */
10743 struct arm_mem_r
*arm_mems
; /* Memory to be saved for this record. */
10744 } insn_decode_record
;
10747 /* Checks ARM SBZ and SBO mandatory fields. */
10750 sbo_sbz (uint32_t insn
, uint32_t bit_num
, uint32_t len
, uint32_t sbo
)
10752 uint32_t ones
= bits (insn
, bit_num
- 1, (bit_num
-1) + (len
- 1));
10771 enum arm_record_result
10773 ARM_RECORD_SUCCESS
= 0,
10774 ARM_RECORD_FAILURE
= 1
10781 } arm_record_strx_t
;
10792 arm_record_strx (insn_decode_record
*arm_insn_r
, uint32_t *record_buf
,
10793 uint32_t *record_buf_mem
, arm_record_strx_t str_type
)
10796 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10797 ULONGEST u_regval
[2]= {0};
10799 uint32_t reg_src1
= 0, reg_src2
= 0;
10800 uint32_t immed_high
= 0, immed_low
= 0,offset_8
= 0, tgt_mem_addr
= 0;
10801 uint32_t opcode1
= 0;
10803 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10804 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10805 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 24);
10808 if (14 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
10810 /* 1) Handle misc store, immediate offset. */
10811 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
10812 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
10813 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10814 regcache_raw_read_unsigned (reg_cache
, reg_src1
,
10816 if (ARM_PC_REGNUM
== reg_src1
)
10818 /* If R15 was used as Rn, hence current PC+8. */
10819 u_regval
[0] = u_regval
[0] + 8;
10821 offset_8
= (immed_high
<< 4) | immed_low
;
10822 /* Calculate target store address. */
10823 if (14 == arm_insn_r
->opcode
)
10825 tgt_mem_addr
= u_regval
[0] + offset_8
;
10829 tgt_mem_addr
= u_regval
[0] - offset_8
;
10831 if (ARM_RECORD_STRH
== str_type
)
10833 record_buf_mem
[0] = 2;
10834 record_buf_mem
[1] = tgt_mem_addr
;
10835 arm_insn_r
->mem_rec_count
= 1;
10837 else if (ARM_RECORD_STRD
== str_type
)
10839 record_buf_mem
[0] = 4;
10840 record_buf_mem
[1] = tgt_mem_addr
;
10841 record_buf_mem
[2] = 4;
10842 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10843 arm_insn_r
->mem_rec_count
= 2;
10846 else if (12 == arm_insn_r
->opcode
|| 8 == arm_insn_r
->opcode
)
10848 /* 2) Store, register offset. */
10850 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10852 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10853 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10854 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10855 if (15 == reg_src2
)
10857 /* If R15 was used as Rn, hence current PC+8. */
10858 u_regval
[0] = u_regval
[0] + 8;
10860 /* Calculate target store address, Rn +/- Rm, register offset. */
10861 if (12 == arm_insn_r
->opcode
)
10863 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
10867 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
10869 if (ARM_RECORD_STRH
== str_type
)
10871 record_buf_mem
[0] = 2;
10872 record_buf_mem
[1] = tgt_mem_addr
;
10873 arm_insn_r
->mem_rec_count
= 1;
10875 else if (ARM_RECORD_STRD
== str_type
)
10877 record_buf_mem
[0] = 4;
10878 record_buf_mem
[1] = tgt_mem_addr
;
10879 record_buf_mem
[2] = 4;
10880 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10881 arm_insn_r
->mem_rec_count
= 2;
10884 else if (11 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
10885 || 2 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
10887 /* 3) Store, immediate pre-indexed. */
10888 /* 5) Store, immediate post-indexed. */
10889 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
10890 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
10891 offset_8
= (immed_high
<< 4) | immed_low
;
10892 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10893 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10894 /* Calculate target store address, Rn +/- Rm, register offset. */
10895 if (15 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
10897 tgt_mem_addr
= u_regval
[0] + offset_8
;
10901 tgt_mem_addr
= u_regval
[0] - offset_8
;
10903 if (ARM_RECORD_STRH
== str_type
)
10905 record_buf_mem
[0] = 2;
10906 record_buf_mem
[1] = tgt_mem_addr
;
10907 arm_insn_r
->mem_rec_count
= 1;
10909 else if (ARM_RECORD_STRD
== str_type
)
10911 record_buf_mem
[0] = 4;
10912 record_buf_mem
[1] = tgt_mem_addr
;
10913 record_buf_mem
[2] = 4;
10914 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10915 arm_insn_r
->mem_rec_count
= 2;
10917 /* Record Rn also as it changes. */
10918 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
10919 arm_insn_r
->reg_rec_count
= 1;
10921 else if (9 == arm_insn_r
->opcode
|| 13 == arm_insn_r
->opcode
10922 || 0 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
10924 /* 4) Store, register pre-indexed. */
10925 /* 6) Store, register post -indexed. */
10926 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10927 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10928 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10929 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10930 /* Calculate target store address, Rn +/- Rm, register offset. */
10931 if (13 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
10933 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
10937 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
10939 if (ARM_RECORD_STRH
== str_type
)
10941 record_buf_mem
[0] = 2;
10942 record_buf_mem
[1] = tgt_mem_addr
;
10943 arm_insn_r
->mem_rec_count
= 1;
10945 else if (ARM_RECORD_STRD
== str_type
)
10947 record_buf_mem
[0] = 4;
10948 record_buf_mem
[1] = tgt_mem_addr
;
10949 record_buf_mem
[2] = 4;
10950 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10951 arm_insn_r
->mem_rec_count
= 2;
10953 /* Record Rn also as it changes. */
10954 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
10955 arm_insn_r
->reg_rec_count
= 1;
10960 /* Handling ARM extension space insns. */
10963 arm_record_extension_space (insn_decode_record
*arm_insn_r
)
10965 uint32_t ret
= 0; /* Return value: -1:record failure ; 0:success */
10966 uint32_t opcode1
= 0, opcode2
= 0, insn_op1
= 0;
10967 uint32_t record_buf
[8], record_buf_mem
[8];
10968 uint32_t reg_src1
= 0;
10969 uint32_t immed_high
= 0, immed_low
= 0,offset_8
= 0, tgt_mem_addr
= 0;
10970 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10971 ULONGEST u_regval
= 0;
10973 gdb_assert (!INSN_RECORDED(arm_insn_r
));
10974 /* Handle unconditional insn extension space. */
10976 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 27);
10977 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
10978 if (arm_insn_r
->cond
)
10980 /* PLD has no affect on architectural state, it just affects
10982 if (5 == ((opcode1
& 0xE0) >> 5))
10985 record_buf
[0] = ARM_PS_REGNUM
;
10986 record_buf
[1] = ARM_LR_REGNUM
;
10987 arm_insn_r
->reg_rec_count
= 2;
10989 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10993 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
10994 if (3 == opcode1
&& bit (arm_insn_r
->arm_insn
, 4))
10997 /* Undefined instruction on ARM V5; need to handle if later
10998 versions define it. */
11001 opcode1
= bits (arm_insn_r
->arm_insn
, 24, 27);
11002 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
11003 insn_op1
= bits (arm_insn_r
->arm_insn
, 20, 23);
11005 /* Handle arithmetic insn extension space. */
11006 if (!opcode1
&& 9 == opcode2
&& 1 != arm_insn_r
->cond
11007 && !INSN_RECORDED(arm_insn_r
))
11009 /* Handle MLA(S) and MUL(S). */
11010 if (0 <= insn_op1
&& 3 >= insn_op1
)
11012 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11013 record_buf
[1] = ARM_PS_REGNUM
;
11014 arm_insn_r
->reg_rec_count
= 2;
11016 else if (4 <= insn_op1
&& 15 >= insn_op1
)
11018 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11019 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
11020 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
11021 record_buf
[2] = ARM_PS_REGNUM
;
11022 arm_insn_r
->reg_rec_count
= 3;
11026 opcode1
= bits (arm_insn_r
->arm_insn
, 26, 27);
11027 opcode2
= bits (arm_insn_r
->arm_insn
, 23, 24);
11028 insn_op1
= bits (arm_insn_r
->arm_insn
, 21, 22);
11030 /* Handle control insn extension space. */
11032 if (!opcode1
&& 2 == opcode2
&& !bit (arm_insn_r
->arm_insn
, 20)
11033 && 1 != arm_insn_r
->cond
&& !INSN_RECORDED(arm_insn_r
))
11035 if (!bit (arm_insn_r
->arm_insn
,25))
11037 if (!bits (arm_insn_r
->arm_insn
, 4, 7))
11039 if ((0 == insn_op1
) || (2 == insn_op1
))
11042 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11043 arm_insn_r
->reg_rec_count
= 1;
11045 else if (1 == insn_op1
)
11047 /* CSPR is going to be changed. */
11048 record_buf
[0] = ARM_PS_REGNUM
;
11049 arm_insn_r
->reg_rec_count
= 1;
11051 else if (3 == insn_op1
)
11053 /* SPSR is going to be changed. */
11054 /* We need to get SPSR value, which is yet to be done. */
11055 printf_unfiltered (_("Process record does not support "
11056 "instruction 0x%0x at address %s.\n"),
11057 arm_insn_r
->arm_insn
,
11058 paddress (arm_insn_r
->gdbarch
,
11059 arm_insn_r
->this_addr
));
11063 else if (1 == bits (arm_insn_r
->arm_insn
, 4, 7))
11068 record_buf
[0] = ARM_PS_REGNUM
;
11069 arm_insn_r
->reg_rec_count
= 1;
11071 else if (3 == insn_op1
)
11074 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11075 arm_insn_r
->reg_rec_count
= 1;
11078 else if (3 == bits (arm_insn_r
->arm_insn
, 4, 7))
11081 record_buf
[0] = ARM_PS_REGNUM
;
11082 record_buf
[1] = ARM_LR_REGNUM
;
11083 arm_insn_r
->reg_rec_count
= 2;
11085 else if (5 == bits (arm_insn_r
->arm_insn
, 4, 7))
11087 /* QADD, QSUB, QDADD, QDSUB */
11088 record_buf
[0] = ARM_PS_REGNUM
;
11089 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
11090 arm_insn_r
->reg_rec_count
= 2;
11092 else if (7 == bits (arm_insn_r
->arm_insn
, 4, 7))
11095 record_buf
[0] = ARM_PS_REGNUM
;
11096 record_buf
[1] = ARM_LR_REGNUM
;
11097 arm_insn_r
->reg_rec_count
= 2;
11099 /* Save SPSR also;how? */
11100 printf_unfiltered (_("Process record does not support "
11101 "instruction 0x%0x at address %s.\n"),
11102 arm_insn_r
->arm_insn
,
11103 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11106 else if(8 == bits (arm_insn_r
->arm_insn
, 4, 7)
11107 || 10 == bits (arm_insn_r
->arm_insn
, 4, 7)
11108 || 12 == bits (arm_insn_r
->arm_insn
, 4, 7)
11109 || 14 == bits (arm_insn_r
->arm_insn
, 4, 7)
11112 if (0 == insn_op1
|| 1 == insn_op1
)
11114 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11115 /* We dont do optimization for SMULW<y> where we
11117 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11118 record_buf
[1] = ARM_PS_REGNUM
;
11119 arm_insn_r
->reg_rec_count
= 2;
11121 else if (2 == insn_op1
)
11124 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11125 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
11126 arm_insn_r
->reg_rec_count
= 2;
11128 else if (3 == insn_op1
)
11131 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11132 arm_insn_r
->reg_rec_count
= 1;
11138 /* MSR : immediate form. */
11141 /* CSPR is going to be changed. */
11142 record_buf
[0] = ARM_PS_REGNUM
;
11143 arm_insn_r
->reg_rec_count
= 1;
11145 else if (3 == insn_op1
)
11147 /* SPSR is going to be changed. */
11148 /* we need to get SPSR value, which is yet to be done */
11149 printf_unfiltered (_("Process record does not support "
11150 "instruction 0x%0x at address %s.\n"),
11151 arm_insn_r
->arm_insn
,
11152 paddress (arm_insn_r
->gdbarch
,
11153 arm_insn_r
->this_addr
));
11159 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
11160 opcode2
= bits (arm_insn_r
->arm_insn
, 20, 24);
11161 insn_op1
= bits (arm_insn_r
->arm_insn
, 5, 6);
11163 /* Handle load/store insn extension space. */
11165 if (!opcode1
&& bit (arm_insn_r
->arm_insn
, 7)
11166 && bit (arm_insn_r
->arm_insn
, 4) && 1 != arm_insn_r
->cond
11167 && !INSN_RECORDED(arm_insn_r
))
11172 /* These insn, changes register and memory as well. */
11173 /* SWP or SWPB insn. */
11174 /* Get memory address given by Rn. */
11175 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
11176 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
11177 /* SWP insn ?, swaps word. */
11178 if (8 == arm_insn_r
->opcode
)
11180 record_buf_mem
[0] = 4;
11184 /* SWPB insn, swaps only byte. */
11185 record_buf_mem
[0] = 1;
11187 record_buf_mem
[1] = u_regval
;
11188 arm_insn_r
->mem_rec_count
= 1;
11189 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11190 arm_insn_r
->reg_rec_count
= 1;
11192 else if (1 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
11195 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
11198 else if (2 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
11201 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11202 record_buf
[1] = record_buf
[0] + 1;
11203 arm_insn_r
->reg_rec_count
= 2;
11205 else if (3 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
11208 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
11211 else if (bit (arm_insn_r
->arm_insn
, 20) && insn_op1
<= 3)
11213 /* LDRH, LDRSB, LDRSH. */
11214 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11215 arm_insn_r
->reg_rec_count
= 1;
11220 opcode1
= bits (arm_insn_r
->arm_insn
, 23, 27);
11221 if (24 == opcode1
&& bit (arm_insn_r
->arm_insn
, 21)
11222 && !INSN_RECORDED(arm_insn_r
))
11225 /* Handle coprocessor insn extension space. */
11228 /* To be done for ARMv5 and later; as of now we return -1. */
11230 printf_unfiltered (_("Process record does not support instruction x%0x "
11231 "at address %s.\n"),arm_insn_r
->arm_insn
,
11232 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11235 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11236 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11241 /* Handling opcode 000 insns. */
11244 arm_record_data_proc_misc_ld_str (insn_decode_record
*arm_insn_r
)
11246 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11247 uint32_t record_buf
[8], record_buf_mem
[8];
11248 ULONGEST u_regval
[2] = {0};
11250 uint32_t reg_src1
= 0, reg_src2
= 0, reg_dest
= 0;
11251 uint32_t immed_high
= 0, immed_low
= 0, offset_8
= 0, tgt_mem_addr
= 0;
11252 uint32_t opcode1
= 0;
11254 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
11255 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
11256 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 24);
11258 /* Data processing insn /multiply insn. */
11259 if (9 == arm_insn_r
->decode
11260 && ((4 <= arm_insn_r
->opcode
&& 7 >= arm_insn_r
->opcode
)
11261 || (0 == arm_insn_r
->opcode
|| 1 == arm_insn_r
->opcode
)))
11263 /* Handle multiply instructions. */
11264 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11265 if (0 == arm_insn_r
->opcode
|| 1 == arm_insn_r
->opcode
)
11267 /* Handle MLA and MUL. */
11268 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
11269 record_buf
[1] = ARM_PS_REGNUM
;
11270 arm_insn_r
->reg_rec_count
= 2;
11272 else if (4 <= arm_insn_r
->opcode
&& 7 >= arm_insn_r
->opcode
)
11274 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11275 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
11276 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
11277 record_buf
[2] = ARM_PS_REGNUM
;
11278 arm_insn_r
->reg_rec_count
= 3;
11281 else if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
)
11282 && (11 == arm_insn_r
->decode
|| 13 == arm_insn_r
->decode
))
11284 /* Handle misc load insns, as 20th bit (L = 1). */
11285 /* LDR insn has a capability to do branching, if
11286 MOV LR, PC is precceded by LDR insn having Rn as R15
11287 in that case, it emulates branch and link insn, and hence we
11288 need to save CSPR and PC as well. I am not sure this is right
11289 place; as opcode = 010 LDR insn make this happen, if R15 was
11291 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
11292 if (15 != reg_dest
)
11294 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11295 arm_insn_r
->reg_rec_count
= 1;
11299 record_buf
[0] = reg_dest
;
11300 record_buf
[1] = ARM_PS_REGNUM
;
11301 arm_insn_r
->reg_rec_count
= 2;
11304 else if ((9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
)
11305 && sbo_sbz (arm_insn_r
->arm_insn
, 5, 12, 0)
11306 && sbo_sbz (arm_insn_r
->arm_insn
, 13, 4, 1)
11307 && 2 == bits (arm_insn_r
->arm_insn
, 20, 21))
11309 /* Handle MSR insn. */
11310 if (9 == arm_insn_r
->opcode
)
11312 /* CSPR is going to be changed. */
11313 record_buf
[0] = ARM_PS_REGNUM
;
11314 arm_insn_r
->reg_rec_count
= 1;
11318 /* SPSR is going to be changed. */
11319 /* How to read SPSR value? */
11320 printf_unfiltered (_("Process record does not support instruction "
11321 "0x%0x at address %s.\n"),
11322 arm_insn_r
->arm_insn
,
11323 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11327 else if (9 == arm_insn_r
->decode
11328 && (8 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
11329 && !bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11331 /* Handling SWP, SWPB. */
11332 /* These insn, changes register and memory as well. */
11333 /* SWP or SWPB insn. */
11335 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
11336 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
11337 /* SWP insn ?, swaps word. */
11338 if (8 == arm_insn_r
->opcode
)
11340 record_buf_mem
[0] = 4;
11344 /* SWPB insn, swaps only byte. */
11345 record_buf_mem
[0] = 1;
11347 record_buf_mem
[1] = u_regval
[0];
11348 arm_insn_r
->mem_rec_count
= 1;
11349 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11350 arm_insn_r
->reg_rec_count
= 1;
11352 else if (3 == arm_insn_r
->decode
&& 0x12 == opcode1
11353 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
11355 /* Handle BLX, branch and link/exchange. */
11356 if (9 == arm_insn_r
->opcode
)
11358 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11359 and R14 stores the return address. */
11360 record_buf
[0] = ARM_PS_REGNUM
;
11361 record_buf
[1] = ARM_LR_REGNUM
;
11362 arm_insn_r
->reg_rec_count
= 2;
11365 else if (7 == arm_insn_r
->decode
&& 0x12 == opcode1
)
11367 /* Handle enhanced software breakpoint insn, BKPT. */
11368 /* CPSR is changed to be executed in ARM state, disabling normal
11369 interrupts, entering abort mode. */
11370 /* According to high vector configuration PC is set. */
11371 /* user hit breakpoint and type reverse, in
11372 that case, we need to go back with previous CPSR and
11373 Program Counter. */
11374 record_buf
[0] = ARM_PS_REGNUM
;
11375 record_buf
[1] = ARM_LR_REGNUM
;
11376 arm_insn_r
->reg_rec_count
= 2;
11378 /* Save SPSR also; how? */
11379 printf_unfiltered (_("Process record does not support instruction "
11380 "0x%0x at address %s.\n"),arm_insn_r
->arm_insn
,
11381 paddress (arm_insn_r
->gdbarch
,
11382 arm_insn_r
->this_addr
));
11385 else if (11 == arm_insn_r
->decode
11386 && !bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11388 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11390 /* Handle str(x) insn */
11391 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
11394 else if (1 == arm_insn_r
->decode
&& 0x12 == opcode1
11395 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
11397 /* Handle BX, branch and link/exchange. */
11398 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11399 record_buf
[0] = ARM_PS_REGNUM
;
11400 arm_insn_r
->reg_rec_count
= 1;
11402 else if (1 == arm_insn_r
->decode
&& 0x16 == opcode1
11403 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 4, 1)
11404 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1))
11406 /* Count leading zeros: CLZ. */
11407 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11408 arm_insn_r
->reg_rec_count
= 1;
11410 else if (!bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
)
11411 && (8 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
11412 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1)
11413 && sbo_sbz (arm_insn_r
->arm_insn
, 1, 12, 0)
11416 /* Handle MRS insn. */
11417 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11418 arm_insn_r
->reg_rec_count
= 1;
11420 else if (arm_insn_r
->opcode
<= 15)
11422 /* Normal data processing insns. */
11423 /* Out of 11 shifter operands mode, all the insn modifies destination
11424 register, which is specified by 13-16 decode. */
11425 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11426 record_buf
[1] = ARM_PS_REGNUM
;
11427 arm_insn_r
->reg_rec_count
= 2;
11434 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11435 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11439 /* Handling opcode 001 insns. */
11442 arm_record_data_proc_imm (insn_decode_record
*arm_insn_r
)
11444 uint32_t record_buf
[8], record_buf_mem
[8];
11446 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
11447 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
11449 if ((9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
)
11450 && 2 == bits (arm_insn_r
->arm_insn
, 20, 21)
11451 && sbo_sbz (arm_insn_r
->arm_insn
, 13, 4, 1)
11454 /* Handle MSR insn. */
11455 if (9 == arm_insn_r
->opcode
)
11457 /* CSPR is going to be changed. */
11458 record_buf
[0] = ARM_PS_REGNUM
;
11459 arm_insn_r
->reg_rec_count
= 1;
11463 /* SPSR is going to be changed. */
11466 else if (arm_insn_r
->opcode
<= 15)
11468 /* Normal data processing insns. */
11469 /* Out of 11 shifter operands mode, all the insn modifies destination
11470 register, which is specified by 13-16 decode. */
11471 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11472 record_buf
[1] = ARM_PS_REGNUM
;
11473 arm_insn_r
->reg_rec_count
= 2;
11480 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11481 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11485 /* Handle ARM mode instructions with opcode 010. */
11488 arm_record_ld_st_imm_offset (insn_decode_record
*arm_insn_r
)
11490 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11492 uint32_t reg_base
, reg_dest
;
11493 uint32_t offset_12
, tgt_mem_addr
;
11494 uint32_t record_buf
[8], record_buf_mem
[8];
11495 unsigned char wback
;
11498 /* Calculate wback. */
11499 wback
= (bit (arm_insn_r
->arm_insn
, 24) == 0)
11500 || (bit (arm_insn_r
->arm_insn
, 21) == 1);
11502 arm_insn_r
->reg_rec_count
= 0;
11503 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
11505 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11507 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11510 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
11511 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_dest
;
11513 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11514 preceeds a LDR instruction having R15 as reg_base, it
11515 emulates a branch and link instruction, and hence we need to save
11516 CPSR and PC as well. */
11517 if (ARM_PC_REGNUM
== reg_dest
)
11518 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
11520 /* If wback is true, also save the base register, which is going to be
11523 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11527 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11529 offset_12
= bits (arm_insn_r
->arm_insn
, 0, 11);
11530 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
11532 /* Handle bit U. */
11533 if (bit (arm_insn_r
->arm_insn
, 23))
11535 /* U == 1: Add the offset. */
11536 tgt_mem_addr
= (uint32_t) u_regval
+ offset_12
;
11540 /* U == 0: subtract the offset. */
11541 tgt_mem_addr
= (uint32_t) u_regval
- offset_12
;
11544 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11546 if (bit (arm_insn_r
->arm_insn
, 22))
11548 /* STRB and STRBT: 1 byte. */
11549 record_buf_mem
[0] = 1;
11553 /* STR and STRT: 4 bytes. */
11554 record_buf_mem
[0] = 4;
11557 /* Handle bit P. */
11558 if (bit (arm_insn_r
->arm_insn
, 24))
11559 record_buf_mem
[1] = tgt_mem_addr
;
11561 record_buf_mem
[1] = (uint32_t) u_regval
;
11563 arm_insn_r
->mem_rec_count
= 1;
11565 /* If wback is true, also save the base register, which is going to be
11568 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11571 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11572 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11576 /* Handling opcode 011 insns. */
11579 arm_record_ld_st_reg_offset (insn_decode_record
*arm_insn_r
)
11581 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11583 uint32_t shift_imm
= 0;
11584 uint32_t reg_src1
= 0, reg_src2
= 0, reg_dest
= 0;
11585 uint32_t offset_12
= 0, tgt_mem_addr
= 0;
11586 uint32_t record_buf
[8], record_buf_mem
[8];
11589 ULONGEST u_regval
[2];
11591 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
11592 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
11594 /* Handle enhanced store insns and LDRD DSP insn,
11595 order begins according to addressing modes for store insns
11599 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11601 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
11602 /* LDR insn has a capability to do branching, if
11603 MOV LR, PC is precedded by LDR insn having Rn as R15
11604 in that case, it emulates branch and link insn, and hence we
11605 need to save CSPR and PC as well. */
11606 if (15 != reg_dest
)
11608 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11609 arm_insn_r
->reg_rec_count
= 1;
11613 record_buf
[0] = reg_dest
;
11614 record_buf
[1] = ARM_PS_REGNUM
;
11615 arm_insn_r
->reg_rec_count
= 2;
11620 if (! bits (arm_insn_r
->arm_insn
, 4, 11))
11622 /* Store insn, register offset and register pre-indexed,
11623 register post-indexed. */
11625 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
11627 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
11628 regcache_raw_read_unsigned (reg_cache
, reg_src1
11630 regcache_raw_read_unsigned (reg_cache
, reg_src2
11632 if (15 == reg_src2
)
11634 /* If R15 was used as Rn, hence current PC+8. */
11635 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11636 u_regval
[0] = u_regval
[0] + 8;
11638 /* Calculate target store address, Rn +/- Rm, register offset. */
11640 if (bit (arm_insn_r
->arm_insn
, 23))
11642 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
11646 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
11649 switch (arm_insn_r
->opcode
)
11663 record_buf_mem
[0] = 4;
11678 record_buf_mem
[0] = 1;
11682 gdb_assert_not_reached ("no decoding pattern found");
11685 record_buf_mem
[1] = tgt_mem_addr
;
11686 arm_insn_r
->mem_rec_count
= 1;
11688 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
11689 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
11690 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
11691 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
11692 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
11693 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
11696 /* Rn is going to be changed in pre-indexed mode and
11697 post-indexed mode as well. */
11698 record_buf
[0] = reg_src2
;
11699 arm_insn_r
->reg_rec_count
= 1;
11704 /* Store insn, scaled register offset; scaled pre-indexed. */
11705 offset_12
= bits (arm_insn_r
->arm_insn
, 5, 6);
11707 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
11709 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
11710 /* Get shift_imm. */
11711 shift_imm
= bits (arm_insn_r
->arm_insn
, 7, 11);
11712 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
11713 regcache_raw_read_signed (reg_cache
, reg_src1
, &s_word
);
11714 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
11715 /* Offset_12 used as shift. */
11719 /* Offset_12 used as index. */
11720 offset_12
= u_regval
[0] << shift_imm
;
11724 offset_12
= (!shift_imm
)?0:u_regval
[0] >> shift_imm
;
11730 if (bit (u_regval
[0], 31))
11732 offset_12
= 0xFFFFFFFF;
11741 /* This is arithmetic shift. */
11742 offset_12
= s_word
>> shift_imm
;
11749 regcache_raw_read_unsigned (reg_cache
, ARM_PS_REGNUM
,
11751 /* Get C flag value and shift it by 31. */
11752 offset_12
= (((bit (u_regval
[1], 29)) << 31) \
11753 | (u_regval
[0]) >> 1);
11757 offset_12
= (u_regval
[0] >> shift_imm
) \
11759 (sizeof(uint32_t) - shift_imm
));
11764 gdb_assert_not_reached ("no decoding pattern found");
11768 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
11770 if (bit (arm_insn_r
->arm_insn
, 23))
11772 tgt_mem_addr
= u_regval
[1] + offset_12
;
11776 tgt_mem_addr
= u_regval
[1] - offset_12
;
11779 switch (arm_insn_r
->opcode
)
11793 record_buf_mem
[0] = 4;
11808 record_buf_mem
[0] = 1;
11812 gdb_assert_not_reached ("no decoding pattern found");
11815 record_buf_mem
[1] = tgt_mem_addr
;
11816 arm_insn_r
->mem_rec_count
= 1;
11818 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
11819 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
11820 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
11821 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
11822 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
11823 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
11826 /* Rn is going to be changed in register scaled pre-indexed
11827 mode,and scaled post indexed mode. */
11828 record_buf
[0] = reg_src2
;
11829 arm_insn_r
->reg_rec_count
= 1;
11834 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11835 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11839 /* Handle ARM mode instructions with opcode 100. */
11842 arm_record_ld_st_multiple (insn_decode_record
*arm_insn_r
)
11844 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11845 uint32_t register_count
= 0, register_bits
;
11846 uint32_t reg_base
, addr_mode
;
11847 uint32_t record_buf
[24], record_buf_mem
[48];
11851 /* Fetch the list of registers. */
11852 register_bits
= bits (arm_insn_r
->arm_insn
, 0, 15);
11853 arm_insn_r
->reg_rec_count
= 0;
11855 /* Fetch the base register that contains the address we are loading data
11857 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
11859 /* Calculate wback. */
11860 wback
= (bit (arm_insn_r
->arm_insn
, 21) == 1);
11862 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11864 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11866 /* Find out which registers are going to be loaded from memory. */
11867 while (register_bits
)
11869 if (register_bits
& 0x00000001)
11870 record_buf
[arm_insn_r
->reg_rec_count
++] = register_count
;
11871 register_bits
= register_bits
>> 1;
11876 /* If wback is true, also save the base register, which is going to be
11879 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11881 /* Save the CPSR register. */
11882 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
11886 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11888 addr_mode
= bits (arm_insn_r
->arm_insn
, 23, 24);
11890 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
11892 /* Find out how many registers are going to be stored to memory. */
11893 while (register_bits
)
11895 if (register_bits
& 0x00000001)
11897 register_bits
= register_bits
>> 1;
11902 /* STMDA (STMED): Decrement after. */
11904 record_buf_mem
[1] = (uint32_t) u_regval
11905 - register_count
* INT_REGISTER_SIZE
+ 4;
11907 /* STM (STMIA, STMEA): Increment after. */
11909 record_buf_mem
[1] = (uint32_t) u_regval
;
11911 /* STMDB (STMFD): Decrement before. */
11913 record_buf_mem
[1] = (uint32_t) u_regval
11914 - register_count
* INT_REGISTER_SIZE
;
11916 /* STMIB (STMFA): Increment before. */
11918 record_buf_mem
[1] = (uint32_t) u_regval
+ INT_REGISTER_SIZE
;
11921 gdb_assert_not_reached ("no decoding pattern found");
11925 record_buf_mem
[0] = register_count
* INT_REGISTER_SIZE
;
11926 arm_insn_r
->mem_rec_count
= 1;
11928 /* If wback is true, also save the base register, which is going to be
11931 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11934 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11935 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11939 /* Handling opcode 101 insns. */
11942 arm_record_b_bl (insn_decode_record
*arm_insn_r
)
11944 uint32_t record_buf
[8];
11946 /* Handle B, BL, BLX(1) insns. */
11947 /* B simply branches so we do nothing here. */
11948 /* Note: BLX(1) doesnt fall here but instead it falls into
11949 extension space. */
11950 if (bit (arm_insn_r
->arm_insn
, 24))
11952 record_buf
[0] = ARM_LR_REGNUM
;
11953 arm_insn_r
->reg_rec_count
= 1;
11956 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11961 /* Handling opcode 110 insns. */
11964 arm_record_unsupported_insn (insn_decode_record
*arm_insn_r
)
11966 printf_unfiltered (_("Process record does not support instruction "
11967 "0x%0x at address %s.\n"),arm_insn_r
->arm_insn
,
11968 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11973 /* Record handler for vector data transfer instructions. */
11976 arm_record_vdata_transfer_insn (insn_decode_record
*arm_insn_r
)
11978 uint32_t bits_a
, bit_c
, bit_l
, reg_t
, reg_v
;
11979 uint32_t record_buf
[4];
11981 const int num_regs
= gdbarch_num_regs (arm_insn_r
->gdbarch
);
11982 reg_t
= bits (arm_insn_r
->arm_insn
, 12, 15);
11983 reg_v
= bits (arm_insn_r
->arm_insn
, 21, 23);
11984 bits_a
= bits (arm_insn_r
->arm_insn
, 21, 23);
11985 bit_l
= bit (arm_insn_r
->arm_insn
, 20);
11986 bit_c
= bit (arm_insn_r
->arm_insn
, 8);
11988 /* Handle VMOV instruction. */
11989 if (bit_l
&& bit_c
)
11991 record_buf
[0] = reg_t
;
11992 arm_insn_r
->reg_rec_count
= 1;
11994 else if (bit_l
&& !bit_c
)
11996 /* Handle VMOV instruction. */
11997 if (bits_a
== 0x00)
11999 if (bit (arm_insn_r
->arm_insn
, 20))
12000 record_buf
[0] = reg_t
;
12002 record_buf
[0] = num_regs
+ (bit (arm_insn_r
->arm_insn
, 7) |
12005 arm_insn_r
->reg_rec_count
= 1;
12007 /* Handle VMRS instruction. */
12008 else if (bits_a
== 0x07)
12011 reg_t
= ARM_PS_REGNUM
;
12013 record_buf
[0] = reg_t
;
12014 arm_insn_r
->reg_rec_count
= 1;
12017 else if (!bit_l
&& !bit_c
)
12019 /* Handle VMOV instruction. */
12020 if (bits_a
== 0x00)
12022 if (bit (arm_insn_r
->arm_insn
, 20))
12023 record_buf
[0] = reg_t
;
12025 record_buf
[0] = num_regs
+ (bit (arm_insn_r
->arm_insn
, 7) |
12028 arm_insn_r
->reg_rec_count
= 1;
12030 /* Handle VMSR instruction. */
12031 else if (bits_a
== 0x07)
12033 record_buf
[0] = ARM_FPSCR_REGNUM
;
12034 arm_insn_r
->reg_rec_count
= 1;
12037 else if (!bit_l
&& bit_c
)
12039 /* Handle VMOV instruction. */
12040 if (!(bits_a
& 0x04))
12042 record_buf
[0] = (reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4))
12044 arm_insn_r
->reg_rec_count
= 1;
12046 /* Handle VDUP instruction. */
12049 if (bit (arm_insn_r
->arm_insn
, 21))
12051 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
12052 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
12053 record_buf
[1] = reg_v
+ ARM_D0_REGNUM
+ 1;
12054 arm_insn_r
->reg_rec_count
= 2;
12058 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
12059 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
12060 arm_insn_r
->reg_rec_count
= 1;
12065 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
12069 /* Record handler for extension register load/store instructions. */
12072 arm_record_exreg_ld_st_insn (insn_decode_record
*arm_insn_r
)
12074 uint32_t opcode
, single_reg
;
12075 uint8_t op_vldm_vstm
;
12076 uint32_t record_buf
[8], record_buf_mem
[128];
12077 ULONGEST u_regval
= 0;
12079 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
12080 const int num_regs
= gdbarch_num_regs (arm_insn_r
->gdbarch
);
12082 opcode
= bits (arm_insn_r
->arm_insn
, 20, 24);
12083 single_reg
= bit (arm_insn_r
->arm_insn
, 8);
12084 op_vldm_vstm
= opcode
& 0x1b;
12086 /* Handle VMOV instructions. */
12087 if ((opcode
& 0x1e) == 0x04)
12089 if (bit (arm_insn_r
->arm_insn
, 4))
12091 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
12092 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
12093 arm_insn_r
->reg_rec_count
= 2;
12097 uint8_t reg_m
= (bits (arm_insn_r
->arm_insn
, 0, 3) << 1)
12098 | bit (arm_insn_r
->arm_insn
, 5);
12102 record_buf
[0] = num_regs
+ reg_m
;
12103 record_buf
[1] = num_regs
+ reg_m
+ 1;
12104 arm_insn_r
->reg_rec_count
= 2;
12108 record_buf
[0] = reg_m
+ ARM_D0_REGNUM
;
12109 arm_insn_r
->reg_rec_count
= 1;
12113 /* Handle VSTM and VPUSH instructions. */
12114 else if (op_vldm_vstm
== 0x08 || op_vldm_vstm
== 0x0a
12115 || op_vldm_vstm
== 0x12)
12117 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
, memory_count
;
12118 uint32_t memory_index
= 0;
12120 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
12121 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12122 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
12123 imm_off32
= imm_off8
<< 24;
12124 memory_count
= imm_off8
;
12126 if (bit (arm_insn_r
->arm_insn
, 23))
12127 start_address
= u_regval
;
12129 start_address
= u_regval
- imm_off32
;
12131 if (bit (arm_insn_r
->arm_insn
, 21))
12133 record_buf
[0] = reg_rn
;
12134 arm_insn_r
->reg_rec_count
= 1;
12137 while (memory_count
> 0)
12141 record_buf_mem
[memory_index
] = start_address
;
12142 record_buf_mem
[memory_index
+ 1] = 4;
12143 start_address
= start_address
+ 4;
12144 memory_index
= memory_index
+ 2;
12148 record_buf_mem
[memory_index
] = start_address
;
12149 record_buf_mem
[memory_index
+ 1] = 4;
12150 record_buf_mem
[memory_index
+ 2] = start_address
+ 4;
12151 record_buf_mem
[memory_index
+ 3] = 4;
12152 start_address
= start_address
+ 8;
12153 memory_index
= memory_index
+ 4;
12157 arm_insn_r
->mem_rec_count
= (memory_index
>> 1);
12159 /* Handle VLDM instructions. */
12160 else if (op_vldm_vstm
== 0x09 || op_vldm_vstm
== 0x0b
12161 || op_vldm_vstm
== 0x13)
12163 uint32_t reg_count
, reg_vd
;
12164 uint32_t reg_index
= 0;
12166 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
12167 reg_count
= bits (arm_insn_r
->arm_insn
, 0, 7);
12170 reg_vd
= reg_vd
| (bit (arm_insn_r
->arm_insn
, 22) << 4);
12172 reg_vd
= (reg_vd
<< 1) | bit (arm_insn_r
->arm_insn
, 22);
12174 if (bit (arm_insn_r
->arm_insn
, 21))
12175 record_buf
[reg_index
++] = bits (arm_insn_r
->arm_insn
, 16, 19);
12177 while (reg_count
> 0)
12180 record_buf
[reg_index
++] = num_regs
+ reg_vd
+ reg_count
- 1;
12182 record_buf
[reg_index
++] = ARM_D0_REGNUM
+ reg_vd
+ reg_count
- 1;
12186 arm_insn_r
->reg_rec_count
= reg_index
;
12188 /* VSTR Vector store register. */
12189 else if ((opcode
& 0x13) == 0x10)
12191 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
, memory_count
;
12192 uint32_t memory_index
= 0;
12194 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
12195 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12196 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
12197 imm_off32
= imm_off8
<< 24;
12198 memory_count
= imm_off8
;
12200 if (bit (arm_insn_r
->arm_insn
, 23))
12201 start_address
= u_regval
+ imm_off32
;
12203 start_address
= u_regval
- imm_off32
;
12207 record_buf_mem
[memory_index
] = start_address
;
12208 record_buf_mem
[memory_index
+ 1] = 4;
12209 arm_insn_r
->mem_rec_count
= 1;
12213 record_buf_mem
[memory_index
] = start_address
;
12214 record_buf_mem
[memory_index
+ 1] = 4;
12215 record_buf_mem
[memory_index
+ 2] = start_address
+ 4;
12216 record_buf_mem
[memory_index
+ 3] = 4;
12217 arm_insn_r
->mem_rec_count
= 2;
12220 /* VLDR Vector load register. */
12221 else if ((opcode
& 0x13) == 0x11)
12223 uint32_t reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
12227 reg_vd
= reg_vd
| (bit (arm_insn_r
->arm_insn
, 22) << 4);
12228 record_buf
[0] = ARM_D0_REGNUM
+ reg_vd
;
12232 reg_vd
= (reg_vd
<< 1) | bit (arm_insn_r
->arm_insn
, 22);
12233 record_buf
[0] = num_regs
+ reg_vd
;
12235 arm_insn_r
->reg_rec_count
= 1;
12238 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
12239 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
12243 /* Record handler for arm/thumb mode VFP data processing instructions. */
12246 arm_record_vfp_data_proc_insn (insn_decode_record
*arm_insn_r
)
12248 uint32_t opc1
, opc2
, opc3
, dp_op_sz
, bit_d
, reg_vd
;
12249 uint32_t record_buf
[4];
12250 enum insn_types
{INSN_T0
, INSN_T1
, INSN_T2
, INSN_T3
, INSN_INV
};
12251 enum insn_types curr_insn_type
= INSN_INV
;
12253 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
12254 opc1
= bits (arm_insn_r
->arm_insn
, 20, 23);
12255 opc2
= bits (arm_insn_r
->arm_insn
, 16, 19);
12256 opc3
= bits (arm_insn_r
->arm_insn
, 6, 7);
12257 dp_op_sz
= bit (arm_insn_r
->arm_insn
, 8);
12258 bit_d
= bit (arm_insn_r
->arm_insn
, 22);
12259 opc1
= opc1
& 0x04;
12261 /* Handle VMLA, VMLS. */
12264 if (bit (arm_insn_r
->arm_insn
, 10))
12266 if (bit (arm_insn_r
->arm_insn
, 6))
12267 curr_insn_type
= INSN_T0
;
12269 curr_insn_type
= INSN_T1
;
12274 curr_insn_type
= INSN_T1
;
12276 curr_insn_type
= INSN_T2
;
12279 /* Handle VNMLA, VNMLS, VNMUL. */
12280 else if (opc1
== 0x01)
12283 curr_insn_type
= INSN_T1
;
12285 curr_insn_type
= INSN_T2
;
12288 else if (opc1
== 0x02 && !(opc3
& 0x01))
12290 if (bit (arm_insn_r
->arm_insn
, 10))
12292 if (bit (arm_insn_r
->arm_insn
, 6))
12293 curr_insn_type
= INSN_T0
;
12295 curr_insn_type
= INSN_T1
;
12300 curr_insn_type
= INSN_T1
;
12302 curr_insn_type
= INSN_T2
;
12305 /* Handle VADD, VSUB. */
12306 else if (opc1
== 0x03)
12308 if (!bit (arm_insn_r
->arm_insn
, 9))
12310 if (bit (arm_insn_r
->arm_insn
, 6))
12311 curr_insn_type
= INSN_T0
;
12313 curr_insn_type
= INSN_T1
;
12318 curr_insn_type
= INSN_T1
;
12320 curr_insn_type
= INSN_T2
;
12324 else if (opc1
== 0x0b)
12327 curr_insn_type
= INSN_T1
;
12329 curr_insn_type
= INSN_T2
;
12331 /* Handle all other vfp data processing instructions. */
12332 else if (opc1
== 0x0b)
12335 if (!(opc3
& 0x01) || (opc2
== 0x00 && opc3
== 0x01))
12337 if (bit (arm_insn_r
->arm_insn
, 4))
12339 if (bit (arm_insn_r
->arm_insn
, 6))
12340 curr_insn_type
= INSN_T0
;
12342 curr_insn_type
= INSN_T1
;
12347 curr_insn_type
= INSN_T1
;
12349 curr_insn_type
= INSN_T2
;
12352 /* Handle VNEG and VABS. */
12353 else if ((opc2
== 0x01 && opc3
== 0x01)
12354 || (opc2
== 0x00 && opc3
== 0x03))
12356 if (!bit (arm_insn_r
->arm_insn
, 11))
12358 if (bit (arm_insn_r
->arm_insn
, 6))
12359 curr_insn_type
= INSN_T0
;
12361 curr_insn_type
= INSN_T1
;
12366 curr_insn_type
= INSN_T1
;
12368 curr_insn_type
= INSN_T2
;
12371 /* Handle VSQRT. */
12372 else if (opc2
== 0x01 && opc3
== 0x03)
12375 curr_insn_type
= INSN_T1
;
12377 curr_insn_type
= INSN_T2
;
12380 else if (opc2
== 0x07 && opc3
== 0x03)
12383 curr_insn_type
= INSN_T1
;
12385 curr_insn_type
= INSN_T2
;
12387 else if (opc3
& 0x01)
12390 if ((opc2
== 0x08) || (opc2
& 0x0e) == 0x0c)
12392 if (!bit (arm_insn_r
->arm_insn
, 18))
12393 curr_insn_type
= INSN_T2
;
12397 curr_insn_type
= INSN_T1
;
12399 curr_insn_type
= INSN_T2
;
12403 else if ((opc2
& 0x0e) == 0x0a || (opc2
& 0x0e) == 0x0e)
12406 curr_insn_type
= INSN_T1
;
12408 curr_insn_type
= INSN_T2
;
12410 /* Handle VCVTB, VCVTT. */
12411 else if ((opc2
& 0x0e) == 0x02)
12412 curr_insn_type
= INSN_T2
;
12413 /* Handle VCMP, VCMPE. */
12414 else if ((opc2
& 0x0e) == 0x04)
12415 curr_insn_type
= INSN_T3
;
12419 switch (curr_insn_type
)
12422 reg_vd
= reg_vd
| (bit_d
<< 4);
12423 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
12424 record_buf
[1] = reg_vd
+ ARM_D0_REGNUM
+ 1;
12425 arm_insn_r
->reg_rec_count
= 2;
12429 reg_vd
= reg_vd
| (bit_d
<< 4);
12430 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
12431 arm_insn_r
->reg_rec_count
= 1;
12435 reg_vd
= (reg_vd
<< 1) | bit_d
;
12436 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
12437 arm_insn_r
->reg_rec_count
= 1;
12441 record_buf
[0] = ARM_FPSCR_REGNUM
;
12442 arm_insn_r
->reg_rec_count
= 1;
12446 gdb_assert_not_reached ("no decoding pattern found");
12450 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
12454 /* Handling opcode 110 insns. */
12457 arm_record_asimd_vfp_coproc (insn_decode_record
*arm_insn_r
)
12459 uint32_t op
, op1
, op1_sbit
, op1_ebit
, coproc
;
12461 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
12462 op1
= bits (arm_insn_r
->arm_insn
, 20, 25);
12463 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
12465 if ((coproc
& 0x0e) == 0x0a)
12467 /* Handle extension register ld/st instructions. */
12469 return arm_record_exreg_ld_st_insn (arm_insn_r
);
12471 /* 64-bit transfers between arm core and extension registers. */
12472 if ((op1
& 0x3e) == 0x04)
12473 return arm_record_exreg_ld_st_insn (arm_insn_r
);
12477 /* Handle coprocessor ld/st instructions. */
12482 return arm_record_unsupported_insn (arm_insn_r
);
12485 return arm_record_unsupported_insn (arm_insn_r
);
12488 /* Move to coprocessor from two arm core registers. */
12490 return arm_record_unsupported_insn (arm_insn_r
);
12492 /* Move to two arm core registers from coprocessor. */
12497 reg_t
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
12498 reg_t
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
12499 arm_insn_r
->reg_rec_count
= 2;
12501 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, reg_t
);
12505 return arm_record_unsupported_insn (arm_insn_r
);
12508 /* Handling opcode 111 insns. */
12511 arm_record_coproc_data_proc (insn_decode_record
*arm_insn_r
)
12513 uint32_t op
, op1_sbit
, op1_ebit
, coproc
;
12514 struct gdbarch_tdep
*tdep
= gdbarch_tdep (arm_insn_r
->gdbarch
);
12515 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
12516 ULONGEST u_regval
= 0;
12518 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 24, 27);
12519 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
12520 op1_sbit
= bit (arm_insn_r
->arm_insn
, 24);
12521 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
12522 op
= bit (arm_insn_r
->arm_insn
, 4);
12524 /* Handle arm SWI/SVC system call instructions. */
12527 if (tdep
->arm_syscall_record
!= NULL
)
12529 ULONGEST svc_operand
, svc_number
;
12531 svc_operand
= (0x00ffffff & arm_insn_r
->arm_insn
);
12533 if (svc_operand
) /* OABI. */
12534 svc_number
= svc_operand
- 0x900000;
12536 regcache_raw_read_unsigned (reg_cache
, 7, &svc_number
);
12538 return tdep
->arm_syscall_record (reg_cache
, svc_number
);
12542 printf_unfiltered (_("no syscall record support\n"));
12547 if ((coproc
& 0x0e) == 0x0a)
12549 /* VFP data-processing instructions. */
12550 if (!op1_sbit
&& !op
)
12551 return arm_record_vfp_data_proc_insn (arm_insn_r
);
12553 /* Advanced SIMD, VFP instructions. */
12554 if (!op1_sbit
&& op
)
12555 return arm_record_vdata_transfer_insn (arm_insn_r
);
12559 /* Coprocessor data operations. */
12560 if (!op1_sbit
&& !op
)
12561 return arm_record_unsupported_insn (arm_insn_r
);
12563 /* Move to Coprocessor from ARM core register. */
12564 if (!op1_sbit
&& !op1_ebit
&& op
)
12565 return arm_record_unsupported_insn (arm_insn_r
);
12567 /* Move to arm core register from coprocessor. */
12568 if (!op1_sbit
&& op1_ebit
&& op
)
12570 uint32_t record_buf
[1];
12572 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
12573 if (record_buf
[0] == 15)
12574 record_buf
[0] = ARM_PS_REGNUM
;
12576 arm_insn_r
->reg_rec_count
= 1;
12577 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
,
12583 return arm_record_unsupported_insn (arm_insn_r
);
12586 /* Handling opcode 000 insns. */
12589 thumb_record_shift_add_sub (insn_decode_record
*thumb_insn_r
)
12591 uint32_t record_buf
[8];
12592 uint32_t reg_src1
= 0;
12594 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12596 record_buf
[0] = ARM_PS_REGNUM
;
12597 record_buf
[1] = reg_src1
;
12598 thumb_insn_r
->reg_rec_count
= 2;
12600 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12606 /* Handling opcode 001 insns. */
12609 thumb_record_add_sub_cmp_mov (insn_decode_record
*thumb_insn_r
)
12611 uint32_t record_buf
[8];
12612 uint32_t reg_src1
= 0;
12614 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12616 record_buf
[0] = ARM_PS_REGNUM
;
12617 record_buf
[1] = reg_src1
;
12618 thumb_insn_r
->reg_rec_count
= 2;
12620 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12625 /* Handling opcode 010 insns. */
12628 thumb_record_ld_st_reg_offset (insn_decode_record
*thumb_insn_r
)
12630 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12631 uint32_t record_buf
[8], record_buf_mem
[8];
12633 uint32_t reg_src1
= 0, reg_src2
= 0;
12634 uint32_t opcode1
= 0, opcode2
= 0, opcode3
= 0;
12636 ULONGEST u_regval
[2] = {0};
12638 opcode1
= bits (thumb_insn_r
->arm_insn
, 10, 12);
12640 if (bit (thumb_insn_r
->arm_insn
, 12))
12642 /* Handle load/store register offset. */
12643 opcode2
= bits (thumb_insn_r
->arm_insn
, 9, 10);
12644 if (opcode2
>= 12 && opcode2
<= 15)
12646 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12647 reg_src1
= bits (thumb_insn_r
->arm_insn
,0, 2);
12648 record_buf
[0] = reg_src1
;
12649 thumb_insn_r
->reg_rec_count
= 1;
12651 else if (opcode2
>= 8 && opcode2
<= 10)
12653 /* STR(2), STRB(2), STRH(2) . */
12654 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12655 reg_src2
= bits (thumb_insn_r
->arm_insn
, 6, 8);
12656 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
12657 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
12659 record_buf_mem
[0] = 4; /* STR (2). */
12660 else if (10 == opcode2
)
12661 record_buf_mem
[0] = 1; /* STRB (2). */
12662 else if (9 == opcode2
)
12663 record_buf_mem
[0] = 2; /* STRH (2). */
12664 record_buf_mem
[1] = u_regval
[0] + u_regval
[1];
12665 thumb_insn_r
->mem_rec_count
= 1;
12668 else if (bit (thumb_insn_r
->arm_insn
, 11))
12670 /* Handle load from literal pool. */
12672 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12673 record_buf
[0] = reg_src1
;
12674 thumb_insn_r
->reg_rec_count
= 1;
12678 opcode2
= bits (thumb_insn_r
->arm_insn
, 8, 9);
12679 opcode3
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12680 if ((3 == opcode2
) && (!opcode3
))
12682 /* Branch with exchange. */
12683 record_buf
[0] = ARM_PS_REGNUM
;
12684 thumb_insn_r
->reg_rec_count
= 1;
12688 /* Format 8; special data processing insns. */
12689 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12690 record_buf
[0] = ARM_PS_REGNUM
;
12691 record_buf
[1] = reg_src1
;
12692 thumb_insn_r
->reg_rec_count
= 2;
12697 /* Format 5; data processing insns. */
12698 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12699 if (bit (thumb_insn_r
->arm_insn
, 7))
12701 reg_src1
= reg_src1
+ 8;
12703 record_buf
[0] = ARM_PS_REGNUM
;
12704 record_buf
[1] = reg_src1
;
12705 thumb_insn_r
->reg_rec_count
= 2;
12708 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12709 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12715 /* Handling opcode 001 insns. */
12718 thumb_record_ld_st_imm_offset (insn_decode_record
*thumb_insn_r
)
12720 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12721 uint32_t record_buf
[8], record_buf_mem
[8];
12723 uint32_t reg_src1
= 0;
12724 uint32_t opcode
= 0, immed_5
= 0;
12726 ULONGEST u_regval
= 0;
12728 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12733 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12734 record_buf
[0] = reg_src1
;
12735 thumb_insn_r
->reg_rec_count
= 1;
12740 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12741 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
12742 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12743 record_buf_mem
[0] = 4;
12744 record_buf_mem
[1] = u_regval
+ (immed_5
* 4);
12745 thumb_insn_r
->mem_rec_count
= 1;
12748 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12749 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12755 /* Handling opcode 100 insns. */
12758 thumb_record_ld_st_stack (insn_decode_record
*thumb_insn_r
)
12760 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12761 uint32_t record_buf
[8], record_buf_mem
[8];
12763 uint32_t reg_src1
= 0;
12764 uint32_t opcode
= 0, immed_8
= 0, immed_5
= 0;
12766 ULONGEST u_regval
= 0;
12768 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12773 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12774 record_buf
[0] = reg_src1
;
12775 thumb_insn_r
->reg_rec_count
= 1;
12777 else if (1 == opcode
)
12780 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12781 record_buf
[0] = reg_src1
;
12782 thumb_insn_r
->reg_rec_count
= 1;
12784 else if (2 == opcode
)
12787 immed_8
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12788 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
12789 record_buf_mem
[0] = 4;
12790 record_buf_mem
[1] = u_regval
+ (immed_8
* 4);
12791 thumb_insn_r
->mem_rec_count
= 1;
12793 else if (0 == opcode
)
12796 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
12797 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12798 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12799 record_buf_mem
[0] = 2;
12800 record_buf_mem
[1] = u_regval
+ (immed_5
* 2);
12801 thumb_insn_r
->mem_rec_count
= 1;
12804 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12805 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12811 /* Handling opcode 101 insns. */
12814 thumb_record_misc (insn_decode_record
*thumb_insn_r
)
12816 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12818 uint32_t opcode
= 0, opcode1
= 0, opcode2
= 0;
12819 uint32_t register_bits
= 0, register_count
= 0;
12820 uint32_t register_list
[8] = {0}, index
= 0, start_address
= 0;
12821 uint32_t record_buf
[24], record_buf_mem
[48];
12824 ULONGEST u_regval
= 0;
12826 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12827 opcode1
= bits (thumb_insn_r
->arm_insn
, 8, 12);
12828 opcode2
= bits (thumb_insn_r
->arm_insn
, 9, 12);
12833 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12834 while (register_bits
)
12836 if (register_bits
& 0x00000001)
12837 record_buf
[index
++] = register_count
;
12838 register_bits
= register_bits
>> 1;
12841 record_buf
[index
++] = ARM_PS_REGNUM
;
12842 record_buf
[index
++] = ARM_SP_REGNUM
;
12843 thumb_insn_r
->reg_rec_count
= index
;
12845 else if (10 == opcode2
)
12848 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12849 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
12850 while (register_bits
)
12852 if (register_bits
& 0x00000001)
12854 register_bits
= register_bits
>> 1;
12856 start_address
= u_regval
- \
12857 (4 * (bit (thumb_insn_r
->arm_insn
, 8) + register_count
));
12858 thumb_insn_r
->mem_rec_count
= register_count
;
12859 while (register_count
)
12861 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12862 record_buf_mem
[(register_count
* 2) - 2] = 4;
12863 start_address
= start_address
+ 4;
12866 record_buf
[0] = ARM_SP_REGNUM
;
12867 thumb_insn_r
->reg_rec_count
= 1;
12869 else if (0x1E == opcode1
)
12872 /* Handle enhanced software breakpoint insn, BKPT. */
12873 /* CPSR is changed to be executed in ARM state, disabling normal
12874 interrupts, entering abort mode. */
12875 /* According to high vector configuration PC is set. */
12876 /* User hits breakpoint and type reverse, in that case, we need to go back with
12877 previous CPSR and Program Counter. */
12878 record_buf
[0] = ARM_PS_REGNUM
;
12879 record_buf
[1] = ARM_LR_REGNUM
;
12880 thumb_insn_r
->reg_rec_count
= 2;
12881 /* We need to save SPSR value, which is not yet done. */
12882 printf_unfiltered (_("Process record does not support instruction "
12883 "0x%0x at address %s.\n"),
12884 thumb_insn_r
->arm_insn
,
12885 paddress (thumb_insn_r
->gdbarch
,
12886 thumb_insn_r
->this_addr
));
12889 else if ((0 == opcode
) || (1 == opcode
))
12891 /* ADD(5), ADD(6). */
12892 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12893 record_buf
[0] = reg_src1
;
12894 thumb_insn_r
->reg_rec_count
= 1;
12896 else if (2 == opcode
)
12898 /* ADD(7), SUB(4). */
12899 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12900 record_buf
[0] = ARM_SP_REGNUM
;
12901 thumb_insn_r
->reg_rec_count
= 1;
12904 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12905 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12911 /* Handling opcode 110 insns. */
12914 thumb_record_ldm_stm_swi (insn_decode_record
*thumb_insn_r
)
12916 struct gdbarch_tdep
*tdep
= gdbarch_tdep (thumb_insn_r
->gdbarch
);
12917 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12919 uint32_t ret
= 0; /* function return value: -1:record failure ; 0:success */
12920 uint32_t reg_src1
= 0;
12921 uint32_t opcode1
= 0, opcode2
= 0, register_bits
= 0, register_count
= 0;
12922 uint32_t register_list
[8] = {0}, index
= 0, start_address
= 0;
12923 uint32_t record_buf
[24], record_buf_mem
[48];
12925 ULONGEST u_regval
= 0;
12927 opcode1
= bits (thumb_insn_r
->arm_insn
, 8, 12);
12928 opcode2
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12934 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12936 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12937 while (register_bits
)
12939 if (register_bits
& 0x00000001)
12940 record_buf
[index
++] = register_count
;
12941 register_bits
= register_bits
>> 1;
12944 record_buf
[index
++] = reg_src1
;
12945 thumb_insn_r
->reg_rec_count
= index
;
12947 else if (0 == opcode2
)
12949 /* It handles both STMIA. */
12950 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12952 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12953 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12954 while (register_bits
)
12956 if (register_bits
& 0x00000001)
12958 register_bits
= register_bits
>> 1;
12960 start_address
= u_regval
;
12961 thumb_insn_r
->mem_rec_count
= register_count
;
12962 while (register_count
)
12964 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12965 record_buf_mem
[(register_count
* 2) - 2] = 4;
12966 start_address
= start_address
+ 4;
12970 else if (0x1F == opcode1
)
12972 /* Handle arm syscall insn. */
12973 if (tdep
->arm_syscall_record
!= NULL
)
12975 regcache_raw_read_unsigned (reg_cache
, 7, &u_regval
);
12976 ret
= tdep
->arm_syscall_record (reg_cache
, u_regval
);
12980 printf_unfiltered (_("no syscall record support\n"));
12985 /* B (1), conditional branch is automatically taken care in process_record,
12986 as PC is saved there. */
12988 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12989 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12995 /* Handling opcode 111 insns. */
12998 thumb_record_branch (insn_decode_record
*thumb_insn_r
)
13000 uint32_t record_buf
[8];
13001 uint32_t bits_h
= 0;
13003 bits_h
= bits (thumb_insn_r
->arm_insn
, 11, 12);
13005 if (2 == bits_h
|| 3 == bits_h
)
13008 record_buf
[0] = ARM_LR_REGNUM
;
13009 thumb_insn_r
->reg_rec_count
= 1;
13011 else if (1 == bits_h
)
13014 record_buf
[0] = ARM_PS_REGNUM
;
13015 record_buf
[1] = ARM_LR_REGNUM
;
13016 thumb_insn_r
->reg_rec_count
= 2;
13019 /* B(2) is automatically taken care in process_record, as PC is
13022 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
13027 /* Handler for thumb2 load/store multiple instructions. */
13030 thumb2_record_ld_st_multiple (insn_decode_record
*thumb2_insn_r
)
13032 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13034 uint32_t reg_rn
, op
;
13035 uint32_t register_bits
= 0, register_count
= 0;
13036 uint32_t index
= 0, start_address
= 0;
13037 uint32_t record_buf
[24], record_buf_mem
[48];
13039 ULONGEST u_regval
= 0;
13041 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13042 op
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
13044 if (0 == op
|| 3 == op
)
13046 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
13048 /* Handle RFE instruction. */
13049 record_buf
[0] = ARM_PS_REGNUM
;
13050 thumb2_insn_r
->reg_rec_count
= 1;
13054 /* Handle SRS instruction after reading banked SP. */
13055 return arm_record_unsupported_insn (thumb2_insn_r
);
13058 else if (1 == op
|| 2 == op
)
13060 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
13062 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13063 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
13064 while (register_bits
)
13066 if (register_bits
& 0x00000001)
13067 record_buf
[index
++] = register_count
;
13070 register_bits
= register_bits
>> 1;
13072 record_buf
[index
++] = reg_rn
;
13073 record_buf
[index
++] = ARM_PS_REGNUM
;
13074 thumb2_insn_r
->reg_rec_count
= index
;
13078 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13079 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
13080 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
13081 while (register_bits
)
13083 if (register_bits
& 0x00000001)
13086 register_bits
= register_bits
>> 1;
13091 /* Start address calculation for LDMDB/LDMEA. */
13092 start_address
= u_regval
;
13096 /* Start address calculation for LDMDB/LDMEA. */
13097 start_address
= u_regval
- register_count
* 4;
13100 thumb2_insn_r
->mem_rec_count
= register_count
;
13101 while (register_count
)
13103 record_buf_mem
[register_count
* 2 - 1] = start_address
;
13104 record_buf_mem
[register_count
* 2 - 2] = 4;
13105 start_address
= start_address
+ 4;
13108 record_buf
[0] = reg_rn
;
13109 record_buf
[1] = ARM_PS_REGNUM
;
13110 thumb2_insn_r
->reg_rec_count
= 2;
13114 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13116 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13118 return ARM_RECORD_SUCCESS
;
13121 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13125 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record
*thumb2_insn_r
)
13127 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13129 uint32_t reg_rd
, reg_rn
, offset_imm
;
13130 uint32_t reg_dest1
, reg_dest2
;
13131 uint32_t address
, offset_addr
;
13132 uint32_t record_buf
[8], record_buf_mem
[8];
13133 uint32_t op1
, op2
, op3
;
13136 ULONGEST u_regval
[2];
13138 op1
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
13139 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 21);
13140 op3
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
13142 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
13144 if(!(1 == op1
&& 1 == op2
&& (0 == op3
|| 1 == op3
)))
13146 reg_dest1
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
13147 record_buf
[0] = reg_dest1
;
13148 record_buf
[1] = ARM_PS_REGNUM
;
13149 thumb2_insn_r
->reg_rec_count
= 2;
13152 if (3 == op2
|| (op1
& 2) || (1 == op1
&& 1 == op2
&& 7 == op3
))
13154 reg_dest2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13155 record_buf
[2] = reg_dest2
;
13156 thumb2_insn_r
->reg_rec_count
= 3;
13161 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13162 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
13164 if (0 == op1
&& 0 == op2
)
13166 /* Handle STREX. */
13167 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
13168 address
= u_regval
[0] + (offset_imm
* 4);
13169 record_buf_mem
[0] = 4;
13170 record_buf_mem
[1] = address
;
13171 thumb2_insn_r
->mem_rec_count
= 1;
13172 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
13173 record_buf
[0] = reg_rd
;
13174 thumb2_insn_r
->reg_rec_count
= 1;
13176 else if (1 == op1
&& 0 == op2
)
13178 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
13179 record_buf
[0] = reg_rd
;
13180 thumb2_insn_r
->reg_rec_count
= 1;
13181 address
= u_regval
[0];
13182 record_buf_mem
[1] = address
;
13186 /* Handle STREXB. */
13187 record_buf_mem
[0] = 1;
13188 thumb2_insn_r
->mem_rec_count
= 1;
13192 /* Handle STREXH. */
13193 record_buf_mem
[0] = 2 ;
13194 thumb2_insn_r
->mem_rec_count
= 1;
13198 /* Handle STREXD. */
13199 address
= u_regval
[0];
13200 record_buf_mem
[0] = 4;
13201 record_buf_mem
[2] = 4;
13202 record_buf_mem
[3] = address
+ 4;
13203 thumb2_insn_r
->mem_rec_count
= 2;
13208 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
13210 if (bit (thumb2_insn_r
->arm_insn
, 24))
13212 if (bit (thumb2_insn_r
->arm_insn
, 23))
13213 offset_addr
= u_regval
[0] + (offset_imm
* 4);
13215 offset_addr
= u_regval
[0] - (offset_imm
* 4);
13217 address
= offset_addr
;
13220 address
= u_regval
[0];
13222 record_buf_mem
[0] = 4;
13223 record_buf_mem
[1] = address
;
13224 record_buf_mem
[2] = 4;
13225 record_buf_mem
[3] = address
+ 4;
13226 thumb2_insn_r
->mem_rec_count
= 2;
13227 record_buf
[0] = reg_rn
;
13228 thumb2_insn_r
->reg_rec_count
= 1;
13232 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13234 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13236 return ARM_RECORD_SUCCESS
;
13239 /* Handler for thumb2 data processing (shift register and modified immediate)
13243 thumb2_record_data_proc_sreg_mimm (insn_decode_record
*thumb2_insn_r
)
13245 uint32_t reg_rd
, op
;
13246 uint32_t record_buf
[8];
13248 op
= bits (thumb2_insn_r
->arm_insn
, 21, 24);
13249 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13251 if ((0 == op
|| 4 == op
|| 8 == op
|| 13 == op
) && 15 == reg_rd
)
13253 record_buf
[0] = ARM_PS_REGNUM
;
13254 thumb2_insn_r
->reg_rec_count
= 1;
13258 record_buf
[0] = reg_rd
;
13259 record_buf
[1] = ARM_PS_REGNUM
;
13260 thumb2_insn_r
->reg_rec_count
= 2;
13263 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13265 return ARM_RECORD_SUCCESS
;
13268 /* Generic handler for thumb2 instructions which effect destination and PS
13272 thumb2_record_ps_dest_generic (insn_decode_record
*thumb2_insn_r
)
13275 uint32_t record_buf
[8];
13277 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13279 record_buf
[0] = reg_rd
;
13280 record_buf
[1] = ARM_PS_REGNUM
;
13281 thumb2_insn_r
->reg_rec_count
= 2;
13283 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13285 return ARM_RECORD_SUCCESS
;
13288 /* Handler for thumb2 branch and miscellaneous control instructions. */
13291 thumb2_record_branch_misc_cntrl (insn_decode_record
*thumb2_insn_r
)
13293 uint32_t op
, op1
, op2
;
13294 uint32_t record_buf
[8];
13296 op
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
13297 op1
= bits (thumb2_insn_r
->arm_insn
, 12, 14);
13298 op2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13300 /* Handle MSR insn. */
13301 if (!(op1
& 0x2) && 0x38 == op
)
13305 /* CPSR is going to be changed. */
13306 record_buf
[0] = ARM_PS_REGNUM
;
13307 thumb2_insn_r
->reg_rec_count
= 1;
13311 arm_record_unsupported_insn(thumb2_insn_r
);
13315 else if (4 == (op1
& 0x5) || 5 == (op1
& 0x5))
13318 record_buf
[0] = ARM_PS_REGNUM
;
13319 record_buf
[1] = ARM_LR_REGNUM
;
13320 thumb2_insn_r
->reg_rec_count
= 2;
13323 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13325 return ARM_RECORD_SUCCESS
;
13328 /* Handler for thumb2 store single data item instructions. */
13331 thumb2_record_str_single_data (insn_decode_record
*thumb2_insn_r
)
13333 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13335 uint32_t reg_rn
, reg_rm
, offset_imm
, shift_imm
;
13336 uint32_t address
, offset_addr
;
13337 uint32_t record_buf
[8], record_buf_mem
[8];
13340 ULONGEST u_regval
[2];
13342 op1
= bits (thumb2_insn_r
->arm_insn
, 21, 23);
13343 op2
= bits (thumb2_insn_r
->arm_insn
, 6, 11);
13344 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13345 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
13347 if (bit (thumb2_insn_r
->arm_insn
, 23))
13350 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 11);
13351 offset_addr
= u_regval
[0] + offset_imm
;
13352 address
= offset_addr
;
13357 if ((0 == op1
|| 1 == op1
|| 2 == op1
) && !(op2
& 0x20))
13359 /* Handle STRB (register). */
13360 reg_rm
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
13361 regcache_raw_read_unsigned (reg_cache
, reg_rm
, &u_regval
[1]);
13362 shift_imm
= bits (thumb2_insn_r
->arm_insn
, 4, 5);
13363 offset_addr
= u_regval
[1] << shift_imm
;
13364 address
= u_regval
[0] + offset_addr
;
13368 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
13369 if (bit (thumb2_insn_r
->arm_insn
, 10))
13371 if (bit (thumb2_insn_r
->arm_insn
, 9))
13372 offset_addr
= u_regval
[0] + offset_imm
;
13374 offset_addr
= u_regval
[0] - offset_imm
;
13376 address
= offset_addr
;
13379 address
= u_regval
[0];
13385 /* Store byte instructions. */
13388 record_buf_mem
[0] = 1;
13390 /* Store half word instructions. */
13393 record_buf_mem
[0] = 2;
13395 /* Store word instructions. */
13398 record_buf_mem
[0] = 4;
13402 gdb_assert_not_reached ("no decoding pattern found");
13406 record_buf_mem
[1] = address
;
13407 thumb2_insn_r
->mem_rec_count
= 1;
13408 record_buf
[0] = reg_rn
;
13409 thumb2_insn_r
->reg_rec_count
= 1;
13411 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13413 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13415 return ARM_RECORD_SUCCESS
;
13418 /* Handler for thumb2 load memory hints instructions. */
13421 thumb2_record_ld_mem_hints (insn_decode_record
*thumb2_insn_r
)
13423 uint32_t record_buf
[8];
13424 uint32_t reg_rt
, reg_rn
;
13426 reg_rt
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
13427 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13429 if (ARM_PC_REGNUM
!= reg_rt
)
13431 record_buf
[0] = reg_rt
;
13432 record_buf
[1] = reg_rn
;
13433 record_buf
[2] = ARM_PS_REGNUM
;
13434 thumb2_insn_r
->reg_rec_count
= 3;
13436 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13438 return ARM_RECORD_SUCCESS
;
13441 return ARM_RECORD_FAILURE
;
13444 /* Handler for thumb2 load word instructions. */
13447 thumb2_record_ld_word (insn_decode_record
*thumb2_insn_r
)
13449 uint32_t opcode1
= 0, opcode2
= 0;
13450 uint32_t record_buf
[8];
13452 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
13453 record_buf
[1] = ARM_PS_REGNUM
;
13454 thumb2_insn_r
->reg_rec_count
= 2;
13456 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13458 return ARM_RECORD_SUCCESS
;
13461 /* Handler for thumb2 long multiply, long multiply accumulate, and
13462 divide instructions. */
13465 thumb2_record_lmul_lmla_div (insn_decode_record
*thumb2_insn_r
)
13467 uint32_t opcode1
= 0, opcode2
= 0;
13468 uint32_t record_buf
[8];
13469 uint32_t reg_src1
= 0;
13471 opcode1
= bits (thumb2_insn_r
->arm_insn
, 20, 22);
13472 opcode2
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
13474 if (0 == opcode1
|| 2 == opcode1
|| (opcode1
>= 4 && opcode1
<= 6))
13476 /* Handle SMULL, UMULL, SMULAL. */
13477 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13478 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
13479 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
13480 record_buf
[2] = ARM_PS_REGNUM
;
13481 thumb2_insn_r
->reg_rec_count
= 3;
13483 else if (1 == opcode1
|| 3 == opcode2
)
13485 /* Handle SDIV and UDIV. */
13486 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
13487 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
13488 record_buf
[2] = ARM_PS_REGNUM
;
13489 thumb2_insn_r
->reg_rec_count
= 3;
13492 return ARM_RECORD_FAILURE
;
13494 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13496 return ARM_RECORD_SUCCESS
;
13499 /* Record handler for thumb32 coprocessor instructions. */
13502 thumb2_record_coproc_insn (insn_decode_record
*thumb2_insn_r
)
13504 if (bit (thumb2_insn_r
->arm_insn
, 25))
13505 return arm_record_coproc_data_proc (thumb2_insn_r
);
13507 return arm_record_asimd_vfp_coproc (thumb2_insn_r
);
13510 /* Record handler for advance SIMD structure load/store instructions. */
13513 thumb2_record_asimd_struct_ld_st (insn_decode_record
*thumb2_insn_r
)
13515 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13516 uint32_t l_bit
, a_bit
, b_bits
;
13517 uint32_t record_buf
[128], record_buf_mem
[128];
13518 uint32_t reg_rn
, reg_vd
, address
, f_esize
, f_elem
;
13519 uint32_t index_r
= 0, index_e
= 0, bf_regs
= 0, index_m
= 0, loop_t
= 0;
13522 l_bit
= bit (thumb2_insn_r
->arm_insn
, 21);
13523 a_bit
= bit (thumb2_insn_r
->arm_insn
, 23);
13524 b_bits
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13525 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13526 reg_vd
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
13527 reg_vd
= (bit (thumb2_insn_r
->arm_insn
, 22) << 4) | reg_vd
;
13528 f_ebytes
= (1 << bits (thumb2_insn_r
->arm_insn
, 6, 7));
13529 f_esize
= 8 * f_ebytes
;
13530 f_elem
= 8 / f_ebytes
;
13534 ULONGEST u_regval
= 0;
13535 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
13536 address
= u_regval
;
13541 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
13543 if (b_bits
== 0x07)
13545 else if (b_bits
== 0x0a)
13547 else if (b_bits
== 0x06)
13549 else if (b_bits
== 0x02)
13554 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
13556 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13558 record_buf_mem
[index_m
++] = f_ebytes
;
13559 record_buf_mem
[index_m
++] = address
;
13560 address
= address
+ f_ebytes
;
13561 thumb2_insn_r
->mem_rec_count
+= 1;
13566 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
13568 if (b_bits
== 0x09 || b_bits
== 0x08)
13570 else if (b_bits
== 0x03)
13575 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
13576 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13578 for (loop_t
= 0; loop_t
< 2; loop_t
++)
13580 record_buf_mem
[index_m
++] = f_ebytes
;
13581 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13582 thumb2_insn_r
->mem_rec_count
+= 1;
13584 address
= address
+ (2 * f_ebytes
);
13588 else if ((b_bits
& 0x0e) == 0x04)
13590 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13592 for (loop_t
= 0; loop_t
< 3; loop_t
++)
13594 record_buf_mem
[index_m
++] = f_ebytes
;
13595 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13596 thumb2_insn_r
->mem_rec_count
+= 1;
13598 address
= address
+ (3 * f_ebytes
);
13602 else if (!(b_bits
& 0x0e))
13604 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13606 for (loop_t
= 0; loop_t
< 4; loop_t
++)
13608 record_buf_mem
[index_m
++] = f_ebytes
;
13609 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13610 thumb2_insn_r
->mem_rec_count
+= 1;
13612 address
= address
+ (4 * f_ebytes
);
13618 uint8_t bft_size
= bits (thumb2_insn_r
->arm_insn
, 10, 11);
13620 if (bft_size
== 0x00)
13622 else if (bft_size
== 0x01)
13624 else if (bft_size
== 0x02)
13630 if (!(b_bits
& 0x0b) || b_bits
== 0x08)
13631 thumb2_insn_r
->mem_rec_count
= 1;
13633 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09)
13634 thumb2_insn_r
->mem_rec_count
= 2;
13636 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a)
13637 thumb2_insn_r
->mem_rec_count
= 3;
13639 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b)
13640 thumb2_insn_r
->mem_rec_count
= 4;
13642 for (index_m
= 0; index_m
< thumb2_insn_r
->mem_rec_count
; index_m
++)
13644 record_buf_mem
[index_m
] = f_ebytes
;
13645 record_buf_mem
[index_m
] = address
+ (index_m
* f_ebytes
);
13654 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
13655 thumb2_insn_r
->reg_rec_count
= 1;
13657 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
13658 thumb2_insn_r
->reg_rec_count
= 2;
13660 else if ((b_bits
& 0x0e) == 0x04)
13661 thumb2_insn_r
->reg_rec_count
= 3;
13663 else if (!(b_bits
& 0x0e))
13664 thumb2_insn_r
->reg_rec_count
= 4;
13669 if (!(b_bits
& 0x0b) || b_bits
== 0x08 || b_bits
== 0x0c)
13670 thumb2_insn_r
->reg_rec_count
= 1;
13672 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09 || b_bits
== 0x0d)
13673 thumb2_insn_r
->reg_rec_count
= 2;
13675 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a || b_bits
== 0x0e)
13676 thumb2_insn_r
->reg_rec_count
= 3;
13678 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b || b_bits
== 0x0f)
13679 thumb2_insn_r
->reg_rec_count
= 4;
13681 for (index_r
= 0; index_r
< thumb2_insn_r
->reg_rec_count
; index_r
++)
13682 record_buf
[index_r
] = reg_vd
+ ARM_D0_REGNUM
+ index_r
;
13686 if (bits (thumb2_insn_r
->arm_insn
, 0, 3) != 15)
13688 record_buf
[index_r
] = reg_rn
;
13689 thumb2_insn_r
->reg_rec_count
+= 1;
13692 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13694 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13699 /* Decodes thumb2 instruction type and invokes its record handler. */
13701 static unsigned int
13702 thumb2_record_decode_insn_handler (insn_decode_record
*thumb2_insn_r
)
13704 uint32_t op
, op1
, op2
;
13706 op
= bit (thumb2_insn_r
->arm_insn
, 15);
13707 op1
= bits (thumb2_insn_r
->arm_insn
, 27, 28);
13708 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
13712 if (!(op2
& 0x64 ))
13714 /* Load/store multiple instruction. */
13715 return thumb2_record_ld_st_multiple (thumb2_insn_r
);
13717 else if (!((op2
& 0x64) ^ 0x04))
13719 /* Load/store (dual/exclusive) and table branch instruction. */
13720 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r
);
13722 else if (!((op2
& 0x20) ^ 0x20))
13724 /* Data-processing (shifted register). */
13725 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
13727 else if (op2
& 0x40)
13729 /* Co-processor instructions. */
13730 return thumb2_record_coproc_insn (thumb2_insn_r
);
13733 else if (op1
== 0x02)
13737 /* Branches and miscellaneous control instructions. */
13738 return thumb2_record_branch_misc_cntrl (thumb2_insn_r
);
13740 else if (op2
& 0x20)
13742 /* Data-processing (plain binary immediate) instruction. */
13743 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13747 /* Data-processing (modified immediate). */
13748 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
13751 else if (op1
== 0x03)
13753 if (!(op2
& 0x71 ))
13755 /* Store single data item. */
13756 return thumb2_record_str_single_data (thumb2_insn_r
);
13758 else if (!((op2
& 0x71) ^ 0x10))
13760 /* Advanced SIMD or structure load/store instructions. */
13761 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r
);
13763 else if (!((op2
& 0x67) ^ 0x01))
13765 /* Load byte, memory hints instruction. */
13766 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
13768 else if (!((op2
& 0x67) ^ 0x03))
13770 /* Load halfword, memory hints instruction. */
13771 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
13773 else if (!((op2
& 0x67) ^ 0x05))
13775 /* Load word instruction. */
13776 return thumb2_record_ld_word (thumb2_insn_r
);
13778 else if (!((op2
& 0x70) ^ 0x20))
13780 /* Data-processing (register) instruction. */
13781 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13783 else if (!((op2
& 0x78) ^ 0x30))
13785 /* Multiply, multiply accumulate, abs diff instruction. */
13786 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13788 else if (!((op2
& 0x78) ^ 0x38))
13790 /* Long multiply, long multiply accumulate, and divide. */
13791 return thumb2_record_lmul_lmla_div (thumb2_insn_r
);
13793 else if (op2
& 0x40)
13795 /* Co-processor instructions. */
13796 return thumb2_record_coproc_insn (thumb2_insn_r
);
13803 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13804 and positive val on fauilure. */
13807 extract_arm_insn (insn_decode_record
*insn_record
, uint32_t insn_size
)
13809 gdb_byte buf
[insn_size
];
13811 memset (&buf
[0], 0, insn_size
);
13813 if (target_read_memory (insn_record
->this_addr
, &buf
[0], insn_size
))
13815 insn_record
->arm_insn
= (uint32_t) extract_unsigned_integer (&buf
[0],
13817 gdbarch_byte_order_for_code (insn_record
->gdbarch
));
13821 typedef int (*sti_arm_hdl_fp_t
) (insn_decode_record
*);
13823 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13827 decode_insn (insn_decode_record
*arm_record
, record_type_t record_type
,
13828 uint32_t insn_size
)
13831 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13832 static const sti_arm_hdl_fp_t
const arm_handle_insn
[8] =
13834 arm_record_data_proc_misc_ld_str
, /* 000. */
13835 arm_record_data_proc_imm
, /* 001. */
13836 arm_record_ld_st_imm_offset
, /* 010. */
13837 arm_record_ld_st_reg_offset
, /* 011. */
13838 arm_record_ld_st_multiple
, /* 100. */
13839 arm_record_b_bl
, /* 101. */
13840 arm_record_asimd_vfp_coproc
, /* 110. */
13841 arm_record_coproc_data_proc
/* 111. */
13844 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13845 static const sti_arm_hdl_fp_t
const thumb_handle_insn
[8] =
13847 thumb_record_shift_add_sub
, /* 000. */
13848 thumb_record_add_sub_cmp_mov
, /* 001. */
13849 thumb_record_ld_st_reg_offset
, /* 010. */
13850 thumb_record_ld_st_imm_offset
, /* 011. */
13851 thumb_record_ld_st_stack
, /* 100. */
13852 thumb_record_misc
, /* 101. */
13853 thumb_record_ldm_stm_swi
, /* 110. */
13854 thumb_record_branch
/* 111. */
13857 uint32_t ret
= 0; /* return value: negative:failure 0:success. */
13858 uint32_t insn_id
= 0;
13860 if (extract_arm_insn (arm_record
, insn_size
))
13864 printf_unfiltered (_("Process record: error reading memory at "
13865 "addr %s len = %d.\n"),
13866 paddress (arm_record
->gdbarch
, arm_record
->this_addr
), insn_size
);
13870 else if (ARM_RECORD
== record_type
)
13872 arm_record
->cond
= bits (arm_record
->arm_insn
, 28, 31);
13873 insn_id
= bits (arm_record
->arm_insn
, 25, 27);
13874 ret
= arm_record_extension_space (arm_record
);
13875 /* If this insn has fallen into extension space
13876 then we need not decode it anymore. */
13877 if (ret
!= -1 && !INSN_RECORDED(arm_record
))
13879 ret
= arm_handle_insn
[insn_id
] (arm_record
);
13882 else if (THUMB_RECORD
== record_type
)
13884 /* As thumb does not have condition codes, we set negative. */
13885 arm_record
->cond
= -1;
13886 insn_id
= bits (arm_record
->arm_insn
, 13, 15);
13887 ret
= thumb_handle_insn
[insn_id
] (arm_record
);
13889 else if (THUMB2_RECORD
== record_type
)
13891 /* As thumb does not have condition codes, we set negative. */
13892 arm_record
->cond
= -1;
13894 /* Swap first half of 32bit thumb instruction with second half. */
13895 arm_record
->arm_insn
13896 = (arm_record
->arm_insn
>> 16) | (arm_record
->arm_insn
<< 16);
13898 insn_id
= thumb2_record_decode_insn_handler (arm_record
);
13900 if (insn_id
!= ARM_RECORD_SUCCESS
)
13902 arm_record_unsupported_insn (arm_record
);
13908 /* Throw assertion. */
13909 gdb_assert_not_reached ("not a valid instruction, could not decode");
13916 /* Cleans up local record registers and memory allocations. */
13919 deallocate_reg_mem (insn_decode_record
*record
)
13921 xfree (record
->arm_regs
);
13922 xfree (record
->arm_mems
);
13926 /* Parse the current instruction and record the values of the registers and
13927 memory that will be changed in current instruction to record_arch_list".
13928 Return -1 if something is wrong. */
13931 arm_process_record (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
13932 CORE_ADDR insn_addr
)
13935 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
13936 uint32_t no_of_rec
= 0;
13937 uint32_t ret
= 0; /* return value: -1:record failure ; 0:success */
13938 ULONGEST t_bit
= 0, insn_id
= 0;
13940 ULONGEST u_regval
= 0;
13942 insn_decode_record arm_record
;
13944 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13945 arm_record
.regcache
= regcache
;
13946 arm_record
.this_addr
= insn_addr
;
13947 arm_record
.gdbarch
= gdbarch
;
13950 if (record_debug
> 1)
13952 fprintf_unfiltered (gdb_stdlog
, "Process record: arm_process_record "
13954 paddress (gdbarch
, arm_record
.this_addr
));
13957 if (extract_arm_insn (&arm_record
, 2))
13961 printf_unfiltered (_("Process record: error reading memory at "
13962 "addr %s len = %d.\n"),
13963 paddress (arm_record
.gdbarch
,
13964 arm_record
.this_addr
), 2);
13969 /* Check the insn, whether it is thumb or arm one. */
13971 t_bit
= arm_psr_thumb_bit (arm_record
.gdbarch
);
13972 regcache_raw_read_unsigned (arm_record
.regcache
, ARM_PS_REGNUM
, &u_regval
);
13975 if (!(u_regval
& t_bit
))
13977 /* We are decoding arm insn. */
13978 ret
= decode_insn (&arm_record
, ARM_RECORD
, ARM_INSN_SIZE_BYTES
);
13982 insn_id
= bits (arm_record
.arm_insn
, 11, 15);
13983 /* is it thumb2 insn? */
13984 if ((0x1D == insn_id
) || (0x1E == insn_id
) || (0x1F == insn_id
))
13986 ret
= decode_insn (&arm_record
, THUMB2_RECORD
,
13987 THUMB2_INSN_SIZE_BYTES
);
13991 /* We are decoding thumb insn. */
13992 ret
= decode_insn (&arm_record
, THUMB_RECORD
, THUMB_INSN_SIZE_BYTES
);
13998 /* Record registers. */
13999 record_full_arch_list_add_reg (arm_record
.regcache
, ARM_PC_REGNUM
);
14000 if (arm_record
.arm_regs
)
14002 for (no_of_rec
= 0; no_of_rec
< arm_record
.reg_rec_count
; no_of_rec
++)
14004 if (record_full_arch_list_add_reg
14005 (arm_record
.regcache
, arm_record
.arm_regs
[no_of_rec
]))
14009 /* Record memories. */
14010 if (arm_record
.arm_mems
)
14012 for (no_of_rec
= 0; no_of_rec
< arm_record
.mem_rec_count
; no_of_rec
++)
14014 if (record_full_arch_list_add_mem
14015 ((CORE_ADDR
)arm_record
.arm_mems
[no_of_rec
].addr
,
14016 arm_record
.arm_mems
[no_of_rec
].len
))
14021 if (record_full_arch_list_add_end ())
14026 deallocate_reg_mem (&arm_record
);