]> git.ipfire.org Git - thirdparty/binutils-gdb.git/blob - gdb/arm-tdep.c
Share enum arm_breakpoint_kinds
[thirdparty/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2016 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "regcache.h"
31 #include "reggroups.h"
32 #include "doublest.h"
33 #include "value.h"
34 #include "arch-utils.h"
35 #include "osabi.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
39 #include "objfiles.h"
40 #include "dwarf2-frame.h"
41 #include "gdbtypes.h"
42 #include "prologue-value.h"
43 #include "remote.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
46 #include "observer.h"
47
48 #include "arch/arm.h"
49 #include "arch/arm-get-next-pcs.h"
50 #include "arm-tdep.h"
51 #include "gdb/sim-arm.h"
52
53 #include "elf-bfd.h"
54 #include "coff/internal.h"
55 #include "elf/arm.h"
56
57 #include "vec.h"
58
59 #include "record.h"
60 #include "record-full.h"
61 #include <algorithm>
62
63 #include "features/arm/arm-with-m.c"
64 #include "features/arm/arm-with-m-fpa-layout.c"
65 #include "features/arm/arm-with-m-vfp-d16.c"
66 #include "features/arm/arm-with-iwmmxt.c"
67 #include "features/arm/arm-with-vfpv2.c"
68 #include "features/arm/arm-with-vfpv3.c"
69 #include "features/arm/arm-with-neon.c"
70
71 static int arm_debug;
72
73 /* Macros for setting and testing a bit in a minimal symbol that marks
74 it as Thumb function. The MSB of the minimal symbol's "info" field
75 is used for this purpose.
76
77 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
78 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
79
80 #define MSYMBOL_SET_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym) = 1
82
83 #define MSYMBOL_IS_SPECIAL(msym) \
84 MSYMBOL_TARGET_FLAG_1 (msym)
85
86 /* Per-objfile data used for mapping symbols. */
87 static const struct objfile_data *arm_objfile_data_key;
88
89 struct arm_mapping_symbol
90 {
91 bfd_vma value;
92 char type;
93 };
94 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
95 DEF_VEC_O(arm_mapping_symbol_s);
96
97 struct arm_per_objfile
98 {
99 VEC(arm_mapping_symbol_s) **section_maps;
100 };
101
102 /* The list of available "set arm ..." and "show arm ..." commands. */
103 static struct cmd_list_element *setarmcmdlist = NULL;
104 static struct cmd_list_element *showarmcmdlist = NULL;
105
106 /* The type of floating-point to use. Keep this in sync with enum
107 arm_float_model, and the help string in _initialize_arm_tdep. */
108 static const char *const fp_model_strings[] =
109 {
110 "auto",
111 "softfpa",
112 "fpa",
113 "softvfp",
114 "vfp",
115 NULL
116 };
117
118 /* A variable that can be configured by the user. */
119 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
120 static const char *current_fp_model = "auto";
121
122 /* The ABI to use. Keep this in sync with arm_abi_kind. */
123 static const char *const arm_abi_strings[] =
124 {
125 "auto",
126 "APCS",
127 "AAPCS",
128 NULL
129 };
130
131 /* A variable that can be configured by the user. */
132 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
133 static const char *arm_abi_string = "auto";
134
135 /* The execution mode to assume. */
136 static const char *const arm_mode_strings[] =
137 {
138 "auto",
139 "arm",
140 "thumb",
141 NULL
142 };
143
144 static const char *arm_fallback_mode_string = "auto";
145 static const char *arm_force_mode_string = "auto";
146
147 /* Internal override of the execution mode. -1 means no override,
148 0 means override to ARM mode, 1 means override to Thumb mode.
149 The effect is the same as if arm_force_mode has been set by the
150 user (except the internal override has precedence over a user's
151 arm_force_mode override). */
152 static int arm_override_mode = -1;
153
154 /* Number of different reg name sets (options). */
155 static int num_disassembly_options;
156
157 /* The standard register names, and all the valid aliases for them. Note
158 that `fp', `sp' and `pc' are not added in this alias list, because they
159 have been added as builtin user registers in
160 std-regs.c:_initialize_frame_reg. */
161 static const struct
162 {
163 const char *name;
164 int regnum;
165 } arm_register_aliases[] = {
166 /* Basic register numbers. */
167 { "r0", 0 },
168 { "r1", 1 },
169 { "r2", 2 },
170 { "r3", 3 },
171 { "r4", 4 },
172 { "r5", 5 },
173 { "r6", 6 },
174 { "r7", 7 },
175 { "r8", 8 },
176 { "r9", 9 },
177 { "r10", 10 },
178 { "r11", 11 },
179 { "r12", 12 },
180 { "r13", 13 },
181 { "r14", 14 },
182 { "r15", 15 },
183 /* Synonyms (argument and variable registers). */
184 { "a1", 0 },
185 { "a2", 1 },
186 { "a3", 2 },
187 { "a4", 3 },
188 { "v1", 4 },
189 { "v2", 5 },
190 { "v3", 6 },
191 { "v4", 7 },
192 { "v5", 8 },
193 { "v6", 9 },
194 { "v7", 10 },
195 { "v8", 11 },
196 /* Other platform-specific names for r9. */
197 { "sb", 9 },
198 { "tr", 9 },
199 /* Special names. */
200 { "ip", 12 },
201 { "lr", 14 },
202 /* Names used by GCC (not listed in the ARM EABI). */
203 { "sl", 10 },
204 /* A special name from the older ATPCS. */
205 { "wr", 7 },
206 };
207
208 static const char *const arm_register_names[] =
209 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
210 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
211 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
212 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
213 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
214 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
215 "fps", "cpsr" }; /* 24 25 */
216
217 /* Valid register name styles. */
218 static const char **valid_disassembly_styles;
219
220 /* Disassembly style to use. Default to "std" register names. */
221 static const char *disassembly_style;
222
223 /* This is used to keep the bfd arch_info in sync with the disassembly
224 style. */
225 static void set_disassembly_style_sfunc(char *, int,
226 struct cmd_list_element *);
227 static void set_disassembly_style (void);
228
229 static void convert_from_extended (const struct floatformat *, const void *,
230 void *, int);
231 static void convert_to_extended (const struct floatformat *, void *,
232 const void *, int);
233
234 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
235 struct regcache *regcache,
236 int regnum, gdb_byte *buf);
237 static void arm_neon_quad_write (struct gdbarch *gdbarch,
238 struct regcache *regcache,
239 int regnum, const gdb_byte *buf);
240
241 static CORE_ADDR
242 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
243
244
245 /* get_next_pcs operations. */
246 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
247 arm_get_next_pcs_read_memory_unsigned_integer,
248 arm_get_next_pcs_syscall_next_pc,
249 arm_get_next_pcs_addr_bits_remove,
250 arm_get_next_pcs_is_thumb,
251 NULL,
252 };
253
254 struct arm_prologue_cache
255 {
256 /* The stack pointer at the time this frame was created; i.e. the
257 caller's stack pointer when this function was called. It is used
258 to identify this frame. */
259 CORE_ADDR prev_sp;
260
261 /* The frame base for this frame is just prev_sp - frame size.
262 FRAMESIZE is the distance from the frame pointer to the
263 initial stack pointer. */
264
265 int framesize;
266
267 /* The register used to hold the frame pointer for this frame. */
268 int framereg;
269
270 /* Saved register offsets. */
271 struct trad_frame_saved_reg *saved_regs;
272 };
273
274 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
275 CORE_ADDR prologue_start,
276 CORE_ADDR prologue_end,
277 struct arm_prologue_cache *cache);
278
279 /* Architecture version for displaced stepping. This effects the behaviour of
280 certain instructions, and really should not be hard-wired. */
281
282 #define DISPLACED_STEPPING_ARCH_VERSION 5
283
284 /* Set to true if the 32-bit mode is in use. */
285
286 int arm_apcs_32 = 1;
287
288 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
289
290 int
291 arm_psr_thumb_bit (struct gdbarch *gdbarch)
292 {
293 if (gdbarch_tdep (gdbarch)->is_m)
294 return XPSR_T;
295 else
296 return CPSR_T;
297 }
298
299 /* Determine if the processor is currently executing in Thumb mode. */
300
301 int
302 arm_is_thumb (struct regcache *regcache)
303 {
304 ULONGEST cpsr;
305 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
306
307 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
308
309 return (cpsr & t_bit) != 0;
310 }
311
312 /* Determine if FRAME is executing in Thumb mode. */
313
314 int
315 arm_frame_is_thumb (struct frame_info *frame)
316 {
317 CORE_ADDR cpsr;
318 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
319
320 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
321 directly (from a signal frame or dummy frame) or by interpreting
322 the saved LR (from a prologue or DWARF frame). So consult it and
323 trust the unwinders. */
324 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
325
326 return (cpsr & t_bit) != 0;
327 }
328
329 /* Callback for VEC_lower_bound. */
330
331 static inline int
332 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
333 const struct arm_mapping_symbol *rhs)
334 {
335 return lhs->value < rhs->value;
336 }
337
338 /* Search for the mapping symbol covering MEMADDR. If one is found,
339 return its type. Otherwise, return 0. If START is non-NULL,
340 set *START to the location of the mapping symbol. */
341
342 static char
343 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
344 {
345 struct obj_section *sec;
346
347 /* If there are mapping symbols, consult them. */
348 sec = find_pc_section (memaddr);
349 if (sec != NULL)
350 {
351 struct arm_per_objfile *data;
352 VEC(arm_mapping_symbol_s) *map;
353 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
354 0 };
355 unsigned int idx;
356
357 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
358 arm_objfile_data_key);
359 if (data != NULL)
360 {
361 map = data->section_maps[sec->the_bfd_section->index];
362 if (!VEC_empty (arm_mapping_symbol_s, map))
363 {
364 struct arm_mapping_symbol *map_sym;
365
366 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
367 arm_compare_mapping_symbols);
368
369 /* VEC_lower_bound finds the earliest ordered insertion
370 point. If the following symbol starts at this exact
371 address, we use that; otherwise, the preceding
372 mapping symbol covers this address. */
373 if (idx < VEC_length (arm_mapping_symbol_s, map))
374 {
375 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
376 if (map_sym->value == map_key.value)
377 {
378 if (start)
379 *start = map_sym->value + obj_section_addr (sec);
380 return map_sym->type;
381 }
382 }
383
384 if (idx > 0)
385 {
386 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
387 if (start)
388 *start = map_sym->value + obj_section_addr (sec);
389 return map_sym->type;
390 }
391 }
392 }
393 }
394
395 return 0;
396 }
397
398 /* Determine if the program counter specified in MEMADDR is in a Thumb
399 function. This function should be called for addresses unrelated to
400 any executing frame; otherwise, prefer arm_frame_is_thumb. */
401
402 int
403 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
404 {
405 struct bound_minimal_symbol sym;
406 char type;
407 struct displaced_step_closure* dsc
408 = get_displaced_step_closure_by_addr(memaddr);
409
410 /* If checking the mode of displaced instruction in copy area, the mode
411 should be determined by instruction on the original address. */
412 if (dsc)
413 {
414 if (debug_displaced)
415 fprintf_unfiltered (gdb_stdlog,
416 "displaced: check mode of %.8lx instead of %.8lx\n",
417 (unsigned long) dsc->insn_addr,
418 (unsigned long) memaddr);
419 memaddr = dsc->insn_addr;
420 }
421
422 /* If bit 0 of the address is set, assume this is a Thumb address. */
423 if (IS_THUMB_ADDR (memaddr))
424 return 1;
425
426 /* Respect internal mode override if active. */
427 if (arm_override_mode != -1)
428 return arm_override_mode;
429
430 /* If the user wants to override the symbol table, let him. */
431 if (strcmp (arm_force_mode_string, "arm") == 0)
432 return 0;
433 if (strcmp (arm_force_mode_string, "thumb") == 0)
434 return 1;
435
436 /* ARM v6-M and v7-M are always in Thumb mode. */
437 if (gdbarch_tdep (gdbarch)->is_m)
438 return 1;
439
440 /* If there are mapping symbols, consult them. */
441 type = arm_find_mapping_symbol (memaddr, NULL);
442 if (type)
443 return type == 't';
444
445 /* Thumb functions have a "special" bit set in minimal symbols. */
446 sym = lookup_minimal_symbol_by_pc (memaddr);
447 if (sym.minsym)
448 return (MSYMBOL_IS_SPECIAL (sym.minsym));
449
450 /* If the user wants to override the fallback mode, let them. */
451 if (strcmp (arm_fallback_mode_string, "arm") == 0)
452 return 0;
453 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
454 return 1;
455
456 /* If we couldn't find any symbol, but we're talking to a running
457 target, then trust the current value of $cpsr. This lets
458 "display/i $pc" always show the correct mode (though if there is
459 a symbol table we will not reach here, so it still may not be
460 displayed in the mode it will be executed). */
461 if (target_has_registers)
462 return arm_frame_is_thumb (get_current_frame ());
463
464 /* Otherwise we're out of luck; we assume ARM. */
465 return 0;
466 }
467
468 /* Determine if the address specified equals any of these magic return
469 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
470 architectures.
471
472 From ARMv6-M Reference Manual B1.5.8
473 Table B1-5 Exception return behavior
474
475 EXC_RETURN Return To Return Stack
476 0xFFFFFFF1 Handler mode Main
477 0xFFFFFFF9 Thread mode Main
478 0xFFFFFFFD Thread mode Process
479
480 From ARMv7-M Reference Manual B1.5.8
481 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
482
483 EXC_RETURN Return To Return Stack
484 0xFFFFFFF1 Handler mode Main
485 0xFFFFFFF9 Thread mode Main
486 0xFFFFFFFD Thread mode Process
487
488 Table B1-9 EXC_RETURN definition of exception return behavior, with
489 FP
490
491 EXC_RETURN Return To Return Stack Frame Type
492 0xFFFFFFE1 Handler mode Main Extended
493 0xFFFFFFE9 Thread mode Main Extended
494 0xFFFFFFED Thread mode Process Extended
495 0xFFFFFFF1 Handler mode Main Basic
496 0xFFFFFFF9 Thread mode Main Basic
497 0xFFFFFFFD Thread mode Process Basic
498
499 For more details see "B1.5.8 Exception return behavior"
500 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
501
502 static int
503 arm_m_addr_is_magic (CORE_ADDR addr)
504 {
505 switch (addr)
506 {
507 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
508 the exception return behavior. */
509 case 0xffffffe1:
510 case 0xffffffe9:
511 case 0xffffffed:
512 case 0xfffffff1:
513 case 0xfffffff9:
514 case 0xfffffffd:
515 /* Address is magic. */
516 return 1;
517
518 default:
519 /* Address is not magic. */
520 return 0;
521 }
522 }
523
524 /* Remove useless bits from addresses in a running program. */
525 static CORE_ADDR
526 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
527 {
528 /* On M-profile devices, do not strip the low bit from EXC_RETURN
529 (the magic exception return address). */
530 if (gdbarch_tdep (gdbarch)->is_m
531 && arm_m_addr_is_magic (val))
532 return val;
533
534 if (arm_apcs_32)
535 return UNMAKE_THUMB_ADDR (val);
536 else
537 return (val & 0x03fffffc);
538 }
539
540 /* Return 1 if PC is the start of a compiler helper function which
541 can be safely ignored during prologue skipping. IS_THUMB is true
542 if the function is known to be a Thumb function due to the way it
543 is being called. */
544 static int
545 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
546 {
547 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
548 struct bound_minimal_symbol msym;
549
550 msym = lookup_minimal_symbol_by_pc (pc);
551 if (msym.minsym != NULL
552 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
553 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
554 {
555 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
556
557 /* The GNU linker's Thumb call stub to foo is named
558 __foo_from_thumb. */
559 if (strstr (name, "_from_thumb") != NULL)
560 name += 2;
561
562 /* On soft-float targets, __truncdfsf2 is called to convert promoted
563 arguments to their argument types in non-prototyped
564 functions. */
565 if (startswith (name, "__truncdfsf2"))
566 return 1;
567 if (startswith (name, "__aeabi_d2f"))
568 return 1;
569
570 /* Internal functions related to thread-local storage. */
571 if (startswith (name, "__tls_get_addr"))
572 return 1;
573 if (startswith (name, "__aeabi_read_tp"))
574 return 1;
575 }
576 else
577 {
578 /* If we run against a stripped glibc, we may be unable to identify
579 special functions by name. Check for one important case,
580 __aeabi_read_tp, by comparing the *code* against the default
581 implementation (this is hand-written ARM assembler in glibc). */
582
583 if (!is_thumb
584 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
585 == 0xe3e00a0f /* mov r0, #0xffff0fff */
586 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
587 == 0xe240f01f) /* sub pc, r0, #31 */
588 return 1;
589 }
590
591 return 0;
592 }
593
594 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
595 the first 16-bit of instruction, and INSN2 is the second 16-bit of
596 instruction. */
597 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
598 ((bits ((insn1), 0, 3) << 12) \
599 | (bits ((insn1), 10, 10) << 11) \
600 | (bits ((insn2), 12, 14) << 8) \
601 | bits ((insn2), 0, 7))
602
603 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
604 the 32-bit instruction. */
605 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
606 ((bits ((insn), 16, 19) << 12) \
607 | bits ((insn), 0, 11))
608
609 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
610
611 static unsigned int
612 thumb_expand_immediate (unsigned int imm)
613 {
614 unsigned int count = imm >> 7;
615
616 if (count < 8)
617 switch (count / 2)
618 {
619 case 0:
620 return imm & 0xff;
621 case 1:
622 return (imm & 0xff) | ((imm & 0xff) << 16);
623 case 2:
624 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
625 case 3:
626 return (imm & 0xff) | ((imm & 0xff) << 8)
627 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
628 }
629
630 return (0x80 | (imm & 0x7f)) << (32 - count);
631 }
632
633 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
634 epilogue, 0 otherwise. */
635
636 static int
637 thumb_instruction_restores_sp (unsigned short insn)
638 {
639 return (insn == 0x46bd /* mov sp, r7 */
640 || (insn & 0xff80) == 0xb000 /* add sp, imm */
641 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
642 }
643
644 /* Analyze a Thumb prologue, looking for a recognizable stack frame
645 and frame pointer. Scan until we encounter a store that could
646 clobber the stack frame unexpectedly, or an unknown instruction.
647 Return the last address which is definitely safe to skip for an
648 initial breakpoint. */
649
650 static CORE_ADDR
651 thumb_analyze_prologue (struct gdbarch *gdbarch,
652 CORE_ADDR start, CORE_ADDR limit,
653 struct arm_prologue_cache *cache)
654 {
655 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
656 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
657 int i;
658 pv_t regs[16];
659 struct pv_area *stack;
660 struct cleanup *back_to;
661 CORE_ADDR offset;
662 CORE_ADDR unrecognized_pc = 0;
663
664 for (i = 0; i < 16; i++)
665 regs[i] = pv_register (i, 0);
666 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
667 back_to = make_cleanup_free_pv_area (stack);
668
669 while (start < limit)
670 {
671 unsigned short insn;
672
673 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
674
675 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
676 {
677 int regno;
678 int mask;
679
680 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
681 break;
682
683 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
684 whether to save LR (R14). */
685 mask = (insn & 0xff) | ((insn & 0x100) << 6);
686
687 /* Calculate offsets of saved R0-R7 and LR. */
688 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
689 if (mask & (1 << regno))
690 {
691 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
692 -4);
693 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
694 }
695 }
696 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
697 {
698 offset = (insn & 0x7f) << 2; /* get scaled offset */
699 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
700 -offset);
701 }
702 else if (thumb_instruction_restores_sp (insn))
703 {
704 /* Don't scan past the epilogue. */
705 break;
706 }
707 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
708 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
709 (insn & 0xff) << 2);
710 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
711 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
712 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
713 bits (insn, 6, 8));
714 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
715 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
716 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
717 bits (insn, 0, 7));
718 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
719 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
720 && pv_is_constant (regs[bits (insn, 3, 5)]))
721 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
722 regs[bits (insn, 6, 8)]);
723 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
724 && pv_is_constant (regs[bits (insn, 3, 6)]))
725 {
726 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
727 int rm = bits (insn, 3, 6);
728 regs[rd] = pv_add (regs[rd], regs[rm]);
729 }
730 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
731 {
732 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
733 int src_reg = (insn & 0x78) >> 3;
734 regs[dst_reg] = regs[src_reg];
735 }
736 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
737 {
738 /* Handle stores to the stack. Normally pushes are used,
739 but with GCC -mtpcs-frame, there may be other stores
740 in the prologue to create the frame. */
741 int regno = (insn >> 8) & 0x7;
742 pv_t addr;
743
744 offset = (insn & 0xff) << 2;
745 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
746
747 if (pv_area_store_would_trash (stack, addr))
748 break;
749
750 pv_area_store (stack, addr, 4, regs[regno]);
751 }
752 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
753 {
754 int rd = bits (insn, 0, 2);
755 int rn = bits (insn, 3, 5);
756 pv_t addr;
757
758 offset = bits (insn, 6, 10) << 2;
759 addr = pv_add_constant (regs[rn], offset);
760
761 if (pv_area_store_would_trash (stack, addr))
762 break;
763
764 pv_area_store (stack, addr, 4, regs[rd]);
765 }
766 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
767 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
768 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
769 /* Ignore stores of argument registers to the stack. */
770 ;
771 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
772 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
773 /* Ignore block loads from the stack, potentially copying
774 parameters from memory. */
775 ;
776 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
777 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
778 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
779 /* Similarly ignore single loads from the stack. */
780 ;
781 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
782 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
783 /* Skip register copies, i.e. saves to another register
784 instead of the stack. */
785 ;
786 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
787 /* Recognize constant loads; even with small stacks these are necessary
788 on Thumb. */
789 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
790 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
791 {
792 /* Constant pool loads, for the same reason. */
793 unsigned int constant;
794 CORE_ADDR loc;
795
796 loc = start + 4 + bits (insn, 0, 7) * 4;
797 constant = read_memory_unsigned_integer (loc, 4, byte_order);
798 regs[bits (insn, 8, 10)] = pv_constant (constant);
799 }
800 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
801 {
802 unsigned short inst2;
803
804 inst2 = read_memory_unsigned_integer (start + 2, 2,
805 byte_order_for_code);
806
807 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
808 {
809 /* BL, BLX. Allow some special function calls when
810 skipping the prologue; GCC generates these before
811 storing arguments to the stack. */
812 CORE_ADDR nextpc;
813 int j1, j2, imm1, imm2;
814
815 imm1 = sbits (insn, 0, 10);
816 imm2 = bits (inst2, 0, 10);
817 j1 = bit (inst2, 13);
818 j2 = bit (inst2, 11);
819
820 offset = ((imm1 << 12) + (imm2 << 1));
821 offset ^= ((!j2) << 22) | ((!j1) << 23);
822
823 nextpc = start + 4 + offset;
824 /* For BLX make sure to clear the low bits. */
825 if (bit (inst2, 12) == 0)
826 nextpc = nextpc & 0xfffffffc;
827
828 if (!skip_prologue_function (gdbarch, nextpc,
829 bit (inst2, 12) != 0))
830 break;
831 }
832
833 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
834 { registers } */
835 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
836 {
837 pv_t addr = regs[bits (insn, 0, 3)];
838 int regno;
839
840 if (pv_area_store_would_trash (stack, addr))
841 break;
842
843 /* Calculate offsets of saved registers. */
844 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
845 if (inst2 & (1 << regno))
846 {
847 addr = pv_add_constant (addr, -4);
848 pv_area_store (stack, addr, 4, regs[regno]);
849 }
850
851 if (insn & 0x0020)
852 regs[bits (insn, 0, 3)] = addr;
853 }
854
855 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
856 [Rn, #+/-imm]{!} */
857 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
858 {
859 int regno1 = bits (inst2, 12, 15);
860 int regno2 = bits (inst2, 8, 11);
861 pv_t addr = regs[bits (insn, 0, 3)];
862
863 offset = inst2 & 0xff;
864 if (insn & 0x0080)
865 addr = pv_add_constant (addr, offset);
866 else
867 addr = pv_add_constant (addr, -offset);
868
869 if (pv_area_store_would_trash (stack, addr))
870 break;
871
872 pv_area_store (stack, addr, 4, regs[regno1]);
873 pv_area_store (stack, pv_add_constant (addr, 4),
874 4, regs[regno2]);
875
876 if (insn & 0x0020)
877 regs[bits (insn, 0, 3)] = addr;
878 }
879
880 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
881 && (inst2 & 0x0c00) == 0x0c00
882 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
883 {
884 int regno = bits (inst2, 12, 15);
885 pv_t addr = regs[bits (insn, 0, 3)];
886
887 offset = inst2 & 0xff;
888 if (inst2 & 0x0200)
889 addr = pv_add_constant (addr, offset);
890 else
891 addr = pv_add_constant (addr, -offset);
892
893 if (pv_area_store_would_trash (stack, addr))
894 break;
895
896 pv_area_store (stack, addr, 4, regs[regno]);
897
898 if (inst2 & 0x0100)
899 regs[bits (insn, 0, 3)] = addr;
900 }
901
902 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
903 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
904 {
905 int regno = bits (inst2, 12, 15);
906 pv_t addr;
907
908 offset = inst2 & 0xfff;
909 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
910
911 if (pv_area_store_would_trash (stack, addr))
912 break;
913
914 pv_area_store (stack, addr, 4, regs[regno]);
915 }
916
917 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
918 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
919 /* Ignore stores of argument registers to the stack. */
920 ;
921
922 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
923 && (inst2 & 0x0d00) == 0x0c00
924 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
925 /* Ignore stores of argument registers to the stack. */
926 ;
927
928 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
929 { registers } */
930 && (inst2 & 0x8000) == 0x0000
931 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
932 /* Ignore block loads from the stack, potentially copying
933 parameters from memory. */
934 ;
935
936 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
937 [Rn, #+/-imm] */
938 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
939 /* Similarly ignore dual loads from the stack. */
940 ;
941
942 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
943 && (inst2 & 0x0d00) == 0x0c00
944 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
945 /* Similarly ignore single loads from the stack. */
946 ;
947
948 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
949 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
950 /* Similarly ignore single loads from the stack. */
951 ;
952
953 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
954 && (inst2 & 0x8000) == 0x0000)
955 {
956 unsigned int imm = ((bits (insn, 10, 10) << 11)
957 | (bits (inst2, 12, 14) << 8)
958 | bits (inst2, 0, 7));
959
960 regs[bits (inst2, 8, 11)]
961 = pv_add_constant (regs[bits (insn, 0, 3)],
962 thumb_expand_immediate (imm));
963 }
964
965 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
966 && (inst2 & 0x8000) == 0x0000)
967 {
968 unsigned int imm = ((bits (insn, 10, 10) << 11)
969 | (bits (inst2, 12, 14) << 8)
970 | bits (inst2, 0, 7));
971
972 regs[bits (inst2, 8, 11)]
973 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
974 }
975
976 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
977 && (inst2 & 0x8000) == 0x0000)
978 {
979 unsigned int imm = ((bits (insn, 10, 10) << 11)
980 | (bits (inst2, 12, 14) << 8)
981 | bits (inst2, 0, 7));
982
983 regs[bits (inst2, 8, 11)]
984 = pv_add_constant (regs[bits (insn, 0, 3)],
985 - (CORE_ADDR) thumb_expand_immediate (imm));
986 }
987
988 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
989 && (inst2 & 0x8000) == 0x0000)
990 {
991 unsigned int imm = ((bits (insn, 10, 10) << 11)
992 | (bits (inst2, 12, 14) << 8)
993 | bits (inst2, 0, 7));
994
995 regs[bits (inst2, 8, 11)]
996 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
997 }
998
999 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1000 {
1001 unsigned int imm = ((bits (insn, 10, 10) << 11)
1002 | (bits (inst2, 12, 14) << 8)
1003 | bits (inst2, 0, 7));
1004
1005 regs[bits (inst2, 8, 11)]
1006 = pv_constant (thumb_expand_immediate (imm));
1007 }
1008
1009 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1010 {
1011 unsigned int imm
1012 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1013
1014 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1015 }
1016
1017 else if (insn == 0xea5f /* mov.w Rd,Rm */
1018 && (inst2 & 0xf0f0) == 0)
1019 {
1020 int dst_reg = (inst2 & 0x0f00) >> 8;
1021 int src_reg = inst2 & 0xf;
1022 regs[dst_reg] = regs[src_reg];
1023 }
1024
1025 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1026 {
1027 /* Constant pool loads. */
1028 unsigned int constant;
1029 CORE_ADDR loc;
1030
1031 offset = bits (inst2, 0, 11);
1032 if (insn & 0x0080)
1033 loc = start + 4 + offset;
1034 else
1035 loc = start + 4 - offset;
1036
1037 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1038 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1039 }
1040
1041 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1042 {
1043 /* Constant pool loads. */
1044 unsigned int constant;
1045 CORE_ADDR loc;
1046
1047 offset = bits (inst2, 0, 7) << 2;
1048 if (insn & 0x0080)
1049 loc = start + 4 + offset;
1050 else
1051 loc = start + 4 - offset;
1052
1053 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1054 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1055
1056 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1057 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1058 }
1059
1060 else if (thumb2_instruction_changes_pc (insn, inst2))
1061 {
1062 /* Don't scan past anything that might change control flow. */
1063 break;
1064 }
1065 else
1066 {
1067 /* The optimizer might shove anything into the prologue,
1068 so we just skip what we don't recognize. */
1069 unrecognized_pc = start;
1070 }
1071
1072 start += 2;
1073 }
1074 else if (thumb_instruction_changes_pc (insn))
1075 {
1076 /* Don't scan past anything that might change control flow. */
1077 break;
1078 }
1079 else
1080 {
1081 /* The optimizer might shove anything into the prologue,
1082 so we just skip what we don't recognize. */
1083 unrecognized_pc = start;
1084 }
1085
1086 start += 2;
1087 }
1088
1089 if (arm_debug)
1090 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1091 paddress (gdbarch, start));
1092
1093 if (unrecognized_pc == 0)
1094 unrecognized_pc = start;
1095
1096 if (cache == NULL)
1097 {
1098 do_cleanups (back_to);
1099 return unrecognized_pc;
1100 }
1101
1102 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1103 {
1104 /* Frame pointer is fp. Frame size is constant. */
1105 cache->framereg = ARM_FP_REGNUM;
1106 cache->framesize = -regs[ARM_FP_REGNUM].k;
1107 }
1108 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1109 {
1110 /* Frame pointer is r7. Frame size is constant. */
1111 cache->framereg = THUMB_FP_REGNUM;
1112 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1113 }
1114 else
1115 {
1116 /* Try the stack pointer... this is a bit desperate. */
1117 cache->framereg = ARM_SP_REGNUM;
1118 cache->framesize = -regs[ARM_SP_REGNUM].k;
1119 }
1120
1121 for (i = 0; i < 16; i++)
1122 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1123 cache->saved_regs[i].addr = offset;
1124
1125 do_cleanups (back_to);
1126 return unrecognized_pc;
1127 }
1128
1129
1130 /* Try to analyze the instructions starting from PC, which load symbol
1131 __stack_chk_guard. Return the address of instruction after loading this
1132 symbol, set the dest register number to *BASEREG, and set the size of
1133 instructions for loading symbol in OFFSET. Return 0 if instructions are
1134 not recognized. */
1135
1136 static CORE_ADDR
1137 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1138 unsigned int *destreg, int *offset)
1139 {
1140 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1141 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1142 unsigned int low, high, address;
1143
1144 address = 0;
1145 if (is_thumb)
1146 {
1147 unsigned short insn1
1148 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1149
1150 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1151 {
1152 *destreg = bits (insn1, 8, 10);
1153 *offset = 2;
1154 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1155 address = read_memory_unsigned_integer (address, 4,
1156 byte_order_for_code);
1157 }
1158 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1159 {
1160 unsigned short insn2
1161 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1162
1163 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1164
1165 insn1
1166 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1167 insn2
1168 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1169
1170 /* movt Rd, #const */
1171 if ((insn1 & 0xfbc0) == 0xf2c0)
1172 {
1173 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1174 *destreg = bits (insn2, 8, 11);
1175 *offset = 8;
1176 address = (high << 16 | low);
1177 }
1178 }
1179 }
1180 else
1181 {
1182 unsigned int insn
1183 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1184
1185 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1186 {
1187 address = bits (insn, 0, 11) + pc + 8;
1188 address = read_memory_unsigned_integer (address, 4,
1189 byte_order_for_code);
1190
1191 *destreg = bits (insn, 12, 15);
1192 *offset = 4;
1193 }
1194 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1195 {
1196 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1197
1198 insn
1199 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1200
1201 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1202 {
1203 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1204 *destreg = bits (insn, 12, 15);
1205 *offset = 8;
1206 address = (high << 16 | low);
1207 }
1208 }
1209 }
1210
1211 return address;
1212 }
1213
1214 /* Try to skip a sequence of instructions used for stack protector. If PC
1215 points to the first instruction of this sequence, return the address of
1216 first instruction after this sequence, otherwise, return original PC.
1217
1218 On arm, this sequence of instructions is composed of mainly three steps,
1219 Step 1: load symbol __stack_chk_guard,
1220 Step 2: load from address of __stack_chk_guard,
1221 Step 3: store it to somewhere else.
1222
1223 Usually, instructions on step 2 and step 3 are the same on various ARM
1224 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1225 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1226 instructions in step 1 vary from different ARM architectures. On ARMv7,
1227 they are,
1228
1229 movw Rn, #:lower16:__stack_chk_guard
1230 movt Rn, #:upper16:__stack_chk_guard
1231
1232 On ARMv5t, it is,
1233
1234 ldr Rn, .Label
1235 ....
1236 .Lable:
1237 .word __stack_chk_guard
1238
1239 Since ldr/str is a very popular instruction, we can't use them as
1240 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1241 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1242 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1243
1244 static CORE_ADDR
1245 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1246 {
1247 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1248 unsigned int basereg;
1249 struct bound_minimal_symbol stack_chk_guard;
1250 int offset;
1251 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1252 CORE_ADDR addr;
1253
1254 /* Try to parse the instructions in Step 1. */
1255 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1256 &basereg, &offset);
1257 if (!addr)
1258 return pc;
1259
1260 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1261 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1262 Otherwise, this sequence cannot be for stack protector. */
1263 if (stack_chk_guard.minsym == NULL
1264 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1265 return pc;
1266
1267 if (is_thumb)
1268 {
1269 unsigned int destreg;
1270 unsigned short insn
1271 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1272
1273 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1274 if ((insn & 0xf800) != 0x6800)
1275 return pc;
1276 if (bits (insn, 3, 5) != basereg)
1277 return pc;
1278 destreg = bits (insn, 0, 2);
1279
1280 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1281 byte_order_for_code);
1282 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1283 if ((insn & 0xf800) != 0x6000)
1284 return pc;
1285 if (destreg != bits (insn, 0, 2))
1286 return pc;
1287 }
1288 else
1289 {
1290 unsigned int destreg;
1291 unsigned int insn
1292 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1293
1294 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1295 if ((insn & 0x0e500000) != 0x04100000)
1296 return pc;
1297 if (bits (insn, 16, 19) != basereg)
1298 return pc;
1299 destreg = bits (insn, 12, 15);
1300 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1301 insn = read_memory_unsigned_integer (pc + offset + 4,
1302 4, byte_order_for_code);
1303 if ((insn & 0x0e500000) != 0x04000000)
1304 return pc;
1305 if (bits (insn, 12, 15) != destreg)
1306 return pc;
1307 }
1308 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1309 on arm. */
1310 if (is_thumb)
1311 return pc + offset + 4;
1312 else
1313 return pc + offset + 8;
1314 }
1315
1316 /* Advance the PC across any function entry prologue instructions to
1317 reach some "real" code.
1318
1319 The APCS (ARM Procedure Call Standard) defines the following
1320 prologue:
1321
1322 mov ip, sp
1323 [stmfd sp!, {a1,a2,a3,a4}]
1324 stmfd sp!, {...,fp,ip,lr,pc}
1325 [stfe f7, [sp, #-12]!]
1326 [stfe f6, [sp, #-12]!]
1327 [stfe f5, [sp, #-12]!]
1328 [stfe f4, [sp, #-12]!]
1329 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1330
1331 static CORE_ADDR
1332 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1333 {
1334 CORE_ADDR func_addr, limit_pc;
1335
1336 /* See if we can determine the end of the prologue via the symbol table.
1337 If so, then return either PC, or the PC after the prologue, whichever
1338 is greater. */
1339 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1340 {
1341 CORE_ADDR post_prologue_pc
1342 = skip_prologue_using_sal (gdbarch, func_addr);
1343 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1344
1345 if (post_prologue_pc)
1346 post_prologue_pc
1347 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1348
1349
1350 /* GCC always emits a line note before the prologue and another
1351 one after, even if the two are at the same address or on the
1352 same line. Take advantage of this so that we do not need to
1353 know every instruction that might appear in the prologue. We
1354 will have producer information for most binaries; if it is
1355 missing (e.g. for -gstabs), assuming the GNU tools. */
1356 if (post_prologue_pc
1357 && (cust == NULL
1358 || COMPUNIT_PRODUCER (cust) == NULL
1359 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1360 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1361 return post_prologue_pc;
1362
1363 if (post_prologue_pc != 0)
1364 {
1365 CORE_ADDR analyzed_limit;
1366
1367 /* For non-GCC compilers, make sure the entire line is an
1368 acceptable prologue; GDB will round this function's
1369 return value up to the end of the following line so we
1370 can not skip just part of a line (and we do not want to).
1371
1372 RealView does not treat the prologue specially, but does
1373 associate prologue code with the opening brace; so this
1374 lets us skip the first line if we think it is the opening
1375 brace. */
1376 if (arm_pc_is_thumb (gdbarch, func_addr))
1377 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1378 post_prologue_pc, NULL);
1379 else
1380 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1381 post_prologue_pc, NULL);
1382
1383 if (analyzed_limit != post_prologue_pc)
1384 return func_addr;
1385
1386 return post_prologue_pc;
1387 }
1388 }
1389
1390 /* Can't determine prologue from the symbol table, need to examine
1391 instructions. */
1392
1393 /* Find an upper limit on the function prologue using the debug
1394 information. If the debug information could not be used to provide
1395 that bound, then use an arbitrary large number as the upper bound. */
1396 /* Like arm_scan_prologue, stop no later than pc + 64. */
1397 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1398 if (limit_pc == 0)
1399 limit_pc = pc + 64; /* Magic. */
1400
1401
1402 /* Check if this is Thumb code. */
1403 if (arm_pc_is_thumb (gdbarch, pc))
1404 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1405 else
1406 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1407 }
1408
1409 /* *INDENT-OFF* */
1410 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1411 This function decodes a Thumb function prologue to determine:
1412 1) the size of the stack frame
1413 2) which registers are saved on it
1414 3) the offsets of saved regs
1415 4) the offset from the stack pointer to the frame pointer
1416
1417 A typical Thumb function prologue would create this stack frame
1418 (offsets relative to FP)
1419 old SP -> 24 stack parameters
1420 20 LR
1421 16 R7
1422 R7 -> 0 local variables (16 bytes)
1423 SP -> -12 additional stack space (12 bytes)
1424 The frame size would thus be 36 bytes, and the frame offset would be
1425 12 bytes. The frame register is R7.
1426
1427 The comments for thumb_skip_prolog() describe the algorithm we use
1428 to detect the end of the prolog. */
1429 /* *INDENT-ON* */
1430
1431 static void
1432 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1433 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1434 {
1435 CORE_ADDR prologue_start;
1436 CORE_ADDR prologue_end;
1437
1438 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1439 &prologue_end))
1440 {
1441 /* See comment in arm_scan_prologue for an explanation of
1442 this heuristics. */
1443 if (prologue_end > prologue_start + 64)
1444 {
1445 prologue_end = prologue_start + 64;
1446 }
1447 }
1448 else
1449 /* We're in the boondocks: we have no idea where the start of the
1450 function is. */
1451 return;
1452
1453 prologue_end = std::min (prologue_end, prev_pc);
1454
1455 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1456 }
1457
1458 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1459 otherwise. */
1460
1461 static int
1462 arm_instruction_restores_sp (unsigned int insn)
1463 {
1464 if (bits (insn, 28, 31) != INST_NV)
1465 {
1466 if ((insn & 0x0df0f000) == 0x0080d000
1467 /* ADD SP (register or immediate). */
1468 || (insn & 0x0df0f000) == 0x0040d000
1469 /* SUB SP (register or immediate). */
1470 || (insn & 0x0ffffff0) == 0x01a0d000
1471 /* MOV SP. */
1472 || (insn & 0x0fff0000) == 0x08bd0000
1473 /* POP (LDMIA). */
1474 || (insn & 0x0fff0000) == 0x049d0000)
1475 /* POP of a single register. */
1476 return 1;
1477 }
1478
1479 return 0;
1480 }
1481
1482 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1483 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1484 fill it in. Return the first address not recognized as a prologue
1485 instruction.
1486
1487 We recognize all the instructions typically found in ARM prologues,
1488 plus harmless instructions which can be skipped (either for analysis
1489 purposes, or a more restrictive set that can be skipped when finding
1490 the end of the prologue). */
1491
1492 static CORE_ADDR
1493 arm_analyze_prologue (struct gdbarch *gdbarch,
1494 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1495 struct arm_prologue_cache *cache)
1496 {
1497 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1498 int regno;
1499 CORE_ADDR offset, current_pc;
1500 pv_t regs[ARM_FPS_REGNUM];
1501 struct pv_area *stack;
1502 struct cleanup *back_to;
1503 CORE_ADDR unrecognized_pc = 0;
1504
1505 /* Search the prologue looking for instructions that set up the
1506 frame pointer, adjust the stack pointer, and save registers.
1507
1508 Be careful, however, and if it doesn't look like a prologue,
1509 don't try to scan it. If, for instance, a frameless function
1510 begins with stmfd sp!, then we will tell ourselves there is
1511 a frame, which will confuse stack traceback, as well as "finish"
1512 and other operations that rely on a knowledge of the stack
1513 traceback. */
1514
1515 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1516 regs[regno] = pv_register (regno, 0);
1517 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1518 back_to = make_cleanup_free_pv_area (stack);
1519
1520 for (current_pc = prologue_start;
1521 current_pc < prologue_end;
1522 current_pc += 4)
1523 {
1524 unsigned int insn
1525 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1526
1527 if (insn == 0xe1a0c00d) /* mov ip, sp */
1528 {
1529 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1530 continue;
1531 }
1532 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1533 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1534 {
1535 unsigned imm = insn & 0xff; /* immediate value */
1536 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1537 int rd = bits (insn, 12, 15);
1538 imm = (imm >> rot) | (imm << (32 - rot));
1539 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1540 continue;
1541 }
1542 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1543 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1544 {
1545 unsigned imm = insn & 0xff; /* immediate value */
1546 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1547 int rd = bits (insn, 12, 15);
1548 imm = (imm >> rot) | (imm << (32 - rot));
1549 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1550 continue;
1551 }
1552 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1553 [sp, #-4]! */
1554 {
1555 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1556 break;
1557 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1558 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1559 regs[bits (insn, 12, 15)]);
1560 continue;
1561 }
1562 else if ((insn & 0xffff0000) == 0xe92d0000)
1563 /* stmfd sp!, {..., fp, ip, lr, pc}
1564 or
1565 stmfd sp!, {a1, a2, a3, a4} */
1566 {
1567 int mask = insn & 0xffff;
1568
1569 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1570 break;
1571
1572 /* Calculate offsets of saved registers. */
1573 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1574 if (mask & (1 << regno))
1575 {
1576 regs[ARM_SP_REGNUM]
1577 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1578 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1579 }
1580 }
1581 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1582 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1583 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1584 {
1585 /* No need to add this to saved_regs -- it's just an arg reg. */
1586 continue;
1587 }
1588 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1589 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1590 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1591 {
1592 /* No need to add this to saved_regs -- it's just an arg reg. */
1593 continue;
1594 }
1595 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1596 { registers } */
1597 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1598 {
1599 /* No need to add this to saved_regs -- it's just arg regs. */
1600 continue;
1601 }
1602 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1603 {
1604 unsigned imm = insn & 0xff; /* immediate value */
1605 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1606 imm = (imm >> rot) | (imm << (32 - rot));
1607 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1608 }
1609 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1610 {
1611 unsigned imm = insn & 0xff; /* immediate value */
1612 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1613 imm = (imm >> rot) | (imm << (32 - rot));
1614 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1615 }
1616 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1617 [sp, -#c]! */
1618 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1619 {
1620 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1621 break;
1622
1623 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1624 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1625 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1626 }
1627 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1628 [sp!] */
1629 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1630 {
1631 int n_saved_fp_regs;
1632 unsigned int fp_start_reg, fp_bound_reg;
1633
1634 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1635 break;
1636
1637 if ((insn & 0x800) == 0x800) /* N0 is set */
1638 {
1639 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1640 n_saved_fp_regs = 3;
1641 else
1642 n_saved_fp_regs = 1;
1643 }
1644 else
1645 {
1646 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1647 n_saved_fp_regs = 2;
1648 else
1649 n_saved_fp_regs = 4;
1650 }
1651
1652 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1653 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1654 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1655 {
1656 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1657 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1658 regs[fp_start_reg++]);
1659 }
1660 }
1661 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1662 {
1663 /* Allow some special function calls when skipping the
1664 prologue; GCC generates these before storing arguments to
1665 the stack. */
1666 CORE_ADDR dest = BranchDest (current_pc, insn);
1667
1668 if (skip_prologue_function (gdbarch, dest, 0))
1669 continue;
1670 else
1671 break;
1672 }
1673 else if ((insn & 0xf0000000) != 0xe0000000)
1674 break; /* Condition not true, exit early. */
1675 else if (arm_instruction_changes_pc (insn))
1676 /* Don't scan past anything that might change control flow. */
1677 break;
1678 else if (arm_instruction_restores_sp (insn))
1679 {
1680 /* Don't scan past the epilogue. */
1681 break;
1682 }
1683 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1684 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1685 /* Ignore block loads from the stack, potentially copying
1686 parameters from memory. */
1687 continue;
1688 else if ((insn & 0xfc500000) == 0xe4100000
1689 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1690 /* Similarly ignore single loads from the stack. */
1691 continue;
1692 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1693 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1694 register instead of the stack. */
1695 continue;
1696 else
1697 {
1698 /* The optimizer might shove anything into the prologue, if
1699 we build up cache (cache != NULL) from scanning prologue,
1700 we just skip what we don't recognize and scan further to
1701 make cache as complete as possible. However, if we skip
1702 prologue, we'll stop immediately on unrecognized
1703 instruction. */
1704 unrecognized_pc = current_pc;
1705 if (cache != NULL)
1706 continue;
1707 else
1708 break;
1709 }
1710 }
1711
1712 if (unrecognized_pc == 0)
1713 unrecognized_pc = current_pc;
1714
1715 if (cache)
1716 {
1717 int framereg, framesize;
1718
1719 /* The frame size is just the distance from the frame register
1720 to the original stack pointer. */
1721 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1722 {
1723 /* Frame pointer is fp. */
1724 framereg = ARM_FP_REGNUM;
1725 framesize = -regs[ARM_FP_REGNUM].k;
1726 }
1727 else
1728 {
1729 /* Try the stack pointer... this is a bit desperate. */
1730 framereg = ARM_SP_REGNUM;
1731 framesize = -regs[ARM_SP_REGNUM].k;
1732 }
1733
1734 cache->framereg = framereg;
1735 cache->framesize = framesize;
1736
1737 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1738 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1739 cache->saved_regs[regno].addr = offset;
1740 }
1741
1742 if (arm_debug)
1743 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1744 paddress (gdbarch, unrecognized_pc));
1745
1746 do_cleanups (back_to);
1747 return unrecognized_pc;
1748 }
1749
1750 static void
1751 arm_scan_prologue (struct frame_info *this_frame,
1752 struct arm_prologue_cache *cache)
1753 {
1754 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1755 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1756 CORE_ADDR prologue_start, prologue_end;
1757 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1758 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1759
1760 /* Assume there is no frame until proven otherwise. */
1761 cache->framereg = ARM_SP_REGNUM;
1762 cache->framesize = 0;
1763
1764 /* Check for Thumb prologue. */
1765 if (arm_frame_is_thumb (this_frame))
1766 {
1767 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1768 return;
1769 }
1770
1771 /* Find the function prologue. If we can't find the function in
1772 the symbol table, peek in the stack frame to find the PC. */
1773 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1774 &prologue_end))
1775 {
1776 /* One way to find the end of the prologue (which works well
1777 for unoptimized code) is to do the following:
1778
1779 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1780
1781 if (sal.line == 0)
1782 prologue_end = prev_pc;
1783 else if (sal.end < prologue_end)
1784 prologue_end = sal.end;
1785
1786 This mechanism is very accurate so long as the optimizer
1787 doesn't move any instructions from the function body into the
1788 prologue. If this happens, sal.end will be the last
1789 instruction in the first hunk of prologue code just before
1790 the first instruction that the scheduler has moved from
1791 the body to the prologue.
1792
1793 In order to make sure that we scan all of the prologue
1794 instructions, we use a slightly less accurate mechanism which
1795 may scan more than necessary. To help compensate for this
1796 lack of accuracy, the prologue scanning loop below contains
1797 several clauses which'll cause the loop to terminate early if
1798 an implausible prologue instruction is encountered.
1799
1800 The expression
1801
1802 prologue_start + 64
1803
1804 is a suitable endpoint since it accounts for the largest
1805 possible prologue plus up to five instructions inserted by
1806 the scheduler. */
1807
1808 if (prologue_end > prologue_start + 64)
1809 {
1810 prologue_end = prologue_start + 64; /* See above. */
1811 }
1812 }
1813 else
1814 {
1815 /* We have no symbol information. Our only option is to assume this
1816 function has a standard stack frame and the normal frame register.
1817 Then, we can find the value of our frame pointer on entrance to
1818 the callee (or at the present moment if this is the innermost frame).
1819 The value stored there should be the address of the stmfd + 8. */
1820 CORE_ADDR frame_loc;
1821 LONGEST return_value;
1822
1823 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1824 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1825 return;
1826 else
1827 {
1828 prologue_start = gdbarch_addr_bits_remove
1829 (gdbarch, return_value) - 8;
1830 prologue_end = prologue_start + 64; /* See above. */
1831 }
1832 }
1833
1834 if (prev_pc < prologue_end)
1835 prologue_end = prev_pc;
1836
1837 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1838 }
1839
1840 static struct arm_prologue_cache *
1841 arm_make_prologue_cache (struct frame_info *this_frame)
1842 {
1843 int reg;
1844 struct arm_prologue_cache *cache;
1845 CORE_ADDR unwound_fp;
1846
1847 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1848 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1849
1850 arm_scan_prologue (this_frame, cache);
1851
1852 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1853 if (unwound_fp == 0)
1854 return cache;
1855
1856 cache->prev_sp = unwound_fp + cache->framesize;
1857
1858 /* Calculate actual addresses of saved registers using offsets
1859 determined by arm_scan_prologue. */
1860 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1861 if (trad_frame_addr_p (cache->saved_regs, reg))
1862 cache->saved_regs[reg].addr += cache->prev_sp;
1863
1864 return cache;
1865 }
1866
1867 /* Implementation of the stop_reason hook for arm_prologue frames. */
1868
1869 static enum unwind_stop_reason
1870 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1871 void **this_cache)
1872 {
1873 struct arm_prologue_cache *cache;
1874 CORE_ADDR pc;
1875
1876 if (*this_cache == NULL)
1877 *this_cache = arm_make_prologue_cache (this_frame);
1878 cache = (struct arm_prologue_cache *) *this_cache;
1879
1880 /* This is meant to halt the backtrace at "_start". */
1881 pc = get_frame_pc (this_frame);
1882 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1883 return UNWIND_OUTERMOST;
1884
1885 /* If we've hit a wall, stop. */
1886 if (cache->prev_sp == 0)
1887 return UNWIND_OUTERMOST;
1888
1889 return UNWIND_NO_REASON;
1890 }
1891
1892 /* Our frame ID for a normal frame is the current function's starting PC
1893 and the caller's SP when we were called. */
1894
1895 static void
1896 arm_prologue_this_id (struct frame_info *this_frame,
1897 void **this_cache,
1898 struct frame_id *this_id)
1899 {
1900 struct arm_prologue_cache *cache;
1901 struct frame_id id;
1902 CORE_ADDR pc, func;
1903
1904 if (*this_cache == NULL)
1905 *this_cache = arm_make_prologue_cache (this_frame);
1906 cache = (struct arm_prologue_cache *) *this_cache;
1907
1908 /* Use function start address as part of the frame ID. If we cannot
1909 identify the start address (due to missing symbol information),
1910 fall back to just using the current PC. */
1911 pc = get_frame_pc (this_frame);
1912 func = get_frame_func (this_frame);
1913 if (!func)
1914 func = pc;
1915
1916 id = frame_id_build (cache->prev_sp, func);
1917 *this_id = id;
1918 }
1919
1920 static struct value *
1921 arm_prologue_prev_register (struct frame_info *this_frame,
1922 void **this_cache,
1923 int prev_regnum)
1924 {
1925 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1926 struct arm_prologue_cache *cache;
1927
1928 if (*this_cache == NULL)
1929 *this_cache = arm_make_prologue_cache (this_frame);
1930 cache = (struct arm_prologue_cache *) *this_cache;
1931
1932 /* If we are asked to unwind the PC, then we need to return the LR
1933 instead. The prologue may save PC, but it will point into this
1934 frame's prologue, not the next frame's resume location. Also
1935 strip the saved T bit. A valid LR may have the low bit set, but
1936 a valid PC never does. */
1937 if (prev_regnum == ARM_PC_REGNUM)
1938 {
1939 CORE_ADDR lr;
1940
1941 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1942 return frame_unwind_got_constant (this_frame, prev_regnum,
1943 arm_addr_bits_remove (gdbarch, lr));
1944 }
1945
1946 /* SP is generally not saved to the stack, but this frame is
1947 identified by the next frame's stack pointer at the time of the call.
1948 The value was already reconstructed into PREV_SP. */
1949 if (prev_regnum == ARM_SP_REGNUM)
1950 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1951
1952 /* The CPSR may have been changed by the call instruction and by the
1953 called function. The only bit we can reconstruct is the T bit,
1954 by checking the low bit of LR as of the call. This is a reliable
1955 indicator of Thumb-ness except for some ARM v4T pre-interworking
1956 Thumb code, which could get away with a clear low bit as long as
1957 the called function did not use bx. Guess that all other
1958 bits are unchanged; the condition flags are presumably lost,
1959 but the processor status is likely valid. */
1960 if (prev_regnum == ARM_PS_REGNUM)
1961 {
1962 CORE_ADDR lr, cpsr;
1963 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1964
1965 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1966 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1967 if (IS_THUMB_ADDR (lr))
1968 cpsr |= t_bit;
1969 else
1970 cpsr &= ~t_bit;
1971 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1972 }
1973
1974 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1975 prev_regnum);
1976 }
1977
1978 struct frame_unwind arm_prologue_unwind = {
1979 NORMAL_FRAME,
1980 arm_prologue_unwind_stop_reason,
1981 arm_prologue_this_id,
1982 arm_prologue_prev_register,
1983 NULL,
1984 default_frame_sniffer
1985 };
1986
1987 /* Maintain a list of ARM exception table entries per objfile, similar to the
1988 list of mapping symbols. We only cache entries for standard ARM-defined
1989 personality routines; the cache will contain only the frame unwinding
1990 instructions associated with the entry (not the descriptors). */
1991
1992 static const struct objfile_data *arm_exidx_data_key;
1993
1994 struct arm_exidx_entry
1995 {
1996 bfd_vma addr;
1997 gdb_byte *entry;
1998 };
1999 typedef struct arm_exidx_entry arm_exidx_entry_s;
2000 DEF_VEC_O(arm_exidx_entry_s);
2001
2002 struct arm_exidx_data
2003 {
2004 VEC(arm_exidx_entry_s) **section_maps;
2005 };
2006
2007 static void
2008 arm_exidx_data_free (struct objfile *objfile, void *arg)
2009 {
2010 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
2011 unsigned int i;
2012
2013 for (i = 0; i < objfile->obfd->section_count; i++)
2014 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2015 }
2016
2017 static inline int
2018 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2019 const struct arm_exidx_entry *rhs)
2020 {
2021 return lhs->addr < rhs->addr;
2022 }
2023
2024 static struct obj_section *
2025 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2026 {
2027 struct obj_section *osect;
2028
2029 ALL_OBJFILE_OSECTIONS (objfile, osect)
2030 if (bfd_get_section_flags (objfile->obfd,
2031 osect->the_bfd_section) & SEC_ALLOC)
2032 {
2033 bfd_vma start, size;
2034 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2035 size = bfd_get_section_size (osect->the_bfd_section);
2036
2037 if (start <= vma && vma < start + size)
2038 return osect;
2039 }
2040
2041 return NULL;
2042 }
2043
2044 /* Parse contents of exception table and exception index sections
2045 of OBJFILE, and fill in the exception table entry cache.
2046
2047 For each entry that refers to a standard ARM-defined personality
2048 routine, extract the frame unwinding instructions (from either
2049 the index or the table section). The unwinding instructions
2050 are normalized by:
2051 - extracting them from the rest of the table data
2052 - converting to host endianness
2053 - appending the implicit 0xb0 ("Finish") code
2054
2055 The extracted and normalized instructions are stored for later
2056 retrieval by the arm_find_exidx_entry routine. */
2057
2058 static void
2059 arm_exidx_new_objfile (struct objfile *objfile)
2060 {
2061 struct cleanup *cleanups;
2062 struct arm_exidx_data *data;
2063 asection *exidx, *extab;
2064 bfd_vma exidx_vma = 0, extab_vma = 0;
2065 bfd_size_type exidx_size = 0, extab_size = 0;
2066 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2067 LONGEST i;
2068
2069 /* If we've already touched this file, do nothing. */
2070 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2071 return;
2072 cleanups = make_cleanup (null_cleanup, NULL);
2073
2074 /* Read contents of exception table and index. */
2075 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2076 if (exidx)
2077 {
2078 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2079 exidx_size = bfd_get_section_size (exidx);
2080 exidx_data = (gdb_byte *) xmalloc (exidx_size);
2081 make_cleanup (xfree, exidx_data);
2082
2083 if (!bfd_get_section_contents (objfile->obfd, exidx,
2084 exidx_data, 0, exidx_size))
2085 {
2086 do_cleanups (cleanups);
2087 return;
2088 }
2089 }
2090
2091 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2092 if (extab)
2093 {
2094 extab_vma = bfd_section_vma (objfile->obfd, extab);
2095 extab_size = bfd_get_section_size (extab);
2096 extab_data = (gdb_byte *) xmalloc (extab_size);
2097 make_cleanup (xfree, extab_data);
2098
2099 if (!bfd_get_section_contents (objfile->obfd, extab,
2100 extab_data, 0, extab_size))
2101 {
2102 do_cleanups (cleanups);
2103 return;
2104 }
2105 }
2106
2107 /* Allocate exception table data structure. */
2108 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2109 set_objfile_data (objfile, arm_exidx_data_key, data);
2110 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2111 objfile->obfd->section_count,
2112 VEC(arm_exidx_entry_s) *);
2113
2114 /* Fill in exception table. */
2115 for (i = 0; i < exidx_size / 8; i++)
2116 {
2117 struct arm_exidx_entry new_exidx_entry;
2118 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2119 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2120 bfd_vma addr = 0, word = 0;
2121 int n_bytes = 0, n_words = 0;
2122 struct obj_section *sec;
2123 gdb_byte *entry = NULL;
2124
2125 /* Extract address of start of function. */
2126 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2127 idx += exidx_vma + i * 8;
2128
2129 /* Find section containing function and compute section offset. */
2130 sec = arm_obj_section_from_vma (objfile, idx);
2131 if (sec == NULL)
2132 continue;
2133 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2134
2135 /* Determine address of exception table entry. */
2136 if (val == 1)
2137 {
2138 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2139 }
2140 else if ((val & 0xff000000) == 0x80000000)
2141 {
2142 /* Exception table entry embedded in .ARM.exidx
2143 -- must be short form. */
2144 word = val;
2145 n_bytes = 3;
2146 }
2147 else if (!(val & 0x80000000))
2148 {
2149 /* Exception table entry in .ARM.extab. */
2150 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2151 addr += exidx_vma + i * 8 + 4;
2152
2153 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2154 {
2155 word = bfd_h_get_32 (objfile->obfd,
2156 extab_data + addr - extab_vma);
2157 addr += 4;
2158
2159 if ((word & 0xff000000) == 0x80000000)
2160 {
2161 /* Short form. */
2162 n_bytes = 3;
2163 }
2164 else if ((word & 0xff000000) == 0x81000000
2165 || (word & 0xff000000) == 0x82000000)
2166 {
2167 /* Long form. */
2168 n_bytes = 2;
2169 n_words = ((word >> 16) & 0xff);
2170 }
2171 else if (!(word & 0x80000000))
2172 {
2173 bfd_vma pers;
2174 struct obj_section *pers_sec;
2175 int gnu_personality = 0;
2176
2177 /* Custom personality routine. */
2178 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2179 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2180
2181 /* Check whether we've got one of the variants of the
2182 GNU personality routines. */
2183 pers_sec = arm_obj_section_from_vma (objfile, pers);
2184 if (pers_sec)
2185 {
2186 static const char *personality[] =
2187 {
2188 "__gcc_personality_v0",
2189 "__gxx_personality_v0",
2190 "__gcj_personality_v0",
2191 "__gnu_objc_personality_v0",
2192 NULL
2193 };
2194
2195 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2196 int k;
2197
2198 for (k = 0; personality[k]; k++)
2199 if (lookup_minimal_symbol_by_pc_name
2200 (pc, personality[k], objfile))
2201 {
2202 gnu_personality = 1;
2203 break;
2204 }
2205 }
2206
2207 /* If so, the next word contains a word count in the high
2208 byte, followed by the same unwind instructions as the
2209 pre-defined forms. */
2210 if (gnu_personality
2211 && addr + 4 <= extab_vma + extab_size)
2212 {
2213 word = bfd_h_get_32 (objfile->obfd,
2214 extab_data + addr - extab_vma);
2215 addr += 4;
2216 n_bytes = 3;
2217 n_words = ((word >> 24) & 0xff);
2218 }
2219 }
2220 }
2221 }
2222
2223 /* Sanity check address. */
2224 if (n_words)
2225 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2226 n_words = n_bytes = 0;
2227
2228 /* The unwind instructions reside in WORD (only the N_BYTES least
2229 significant bytes are valid), followed by N_WORDS words in the
2230 extab section starting at ADDR. */
2231 if (n_bytes || n_words)
2232 {
2233 gdb_byte *p = entry
2234 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2235 n_bytes + n_words * 4 + 1);
2236
2237 while (n_bytes--)
2238 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2239
2240 while (n_words--)
2241 {
2242 word = bfd_h_get_32 (objfile->obfd,
2243 extab_data + addr - extab_vma);
2244 addr += 4;
2245
2246 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2247 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2248 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2249 *p++ = (gdb_byte) (word & 0xff);
2250 }
2251
2252 /* Implied "Finish" to terminate the list. */
2253 *p++ = 0xb0;
2254 }
2255
2256 /* Push entry onto vector. They are guaranteed to always
2257 appear in order of increasing addresses. */
2258 new_exidx_entry.addr = idx;
2259 new_exidx_entry.entry = entry;
2260 VEC_safe_push (arm_exidx_entry_s,
2261 data->section_maps[sec->the_bfd_section->index],
2262 &new_exidx_entry);
2263 }
2264
2265 do_cleanups (cleanups);
2266 }
2267
2268 /* Search for the exception table entry covering MEMADDR. If one is found,
2269 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2270 set *START to the start of the region covered by this entry. */
2271
2272 static gdb_byte *
2273 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2274 {
2275 struct obj_section *sec;
2276
2277 sec = find_pc_section (memaddr);
2278 if (sec != NULL)
2279 {
2280 struct arm_exidx_data *data;
2281 VEC(arm_exidx_entry_s) *map;
2282 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2283 unsigned int idx;
2284
2285 data = ((struct arm_exidx_data *)
2286 objfile_data (sec->objfile, arm_exidx_data_key));
2287 if (data != NULL)
2288 {
2289 map = data->section_maps[sec->the_bfd_section->index];
2290 if (!VEC_empty (arm_exidx_entry_s, map))
2291 {
2292 struct arm_exidx_entry *map_sym;
2293
2294 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2295 arm_compare_exidx_entries);
2296
2297 /* VEC_lower_bound finds the earliest ordered insertion
2298 point. If the following symbol starts at this exact
2299 address, we use that; otherwise, the preceding
2300 exception table entry covers this address. */
2301 if (idx < VEC_length (arm_exidx_entry_s, map))
2302 {
2303 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2304 if (map_sym->addr == map_key.addr)
2305 {
2306 if (start)
2307 *start = map_sym->addr + obj_section_addr (sec);
2308 return map_sym->entry;
2309 }
2310 }
2311
2312 if (idx > 0)
2313 {
2314 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2315 if (start)
2316 *start = map_sym->addr + obj_section_addr (sec);
2317 return map_sym->entry;
2318 }
2319 }
2320 }
2321 }
2322
2323 return NULL;
2324 }
2325
2326 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2327 instruction list from the ARM exception table entry ENTRY, allocate and
2328 return a prologue cache structure describing how to unwind this frame.
2329
2330 Return NULL if the unwinding instruction list contains a "spare",
2331 "reserved" or "refuse to unwind" instruction as defined in section
2332 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2333 for the ARM Architecture" document. */
2334
2335 static struct arm_prologue_cache *
2336 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2337 {
2338 CORE_ADDR vsp = 0;
2339 int vsp_valid = 0;
2340
2341 struct arm_prologue_cache *cache;
2342 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2343 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2344
2345 for (;;)
2346 {
2347 gdb_byte insn;
2348
2349 /* Whenever we reload SP, we actually have to retrieve its
2350 actual value in the current frame. */
2351 if (!vsp_valid)
2352 {
2353 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2354 {
2355 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2356 vsp = get_frame_register_unsigned (this_frame, reg);
2357 }
2358 else
2359 {
2360 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2361 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2362 }
2363
2364 vsp_valid = 1;
2365 }
2366
2367 /* Decode next unwind instruction. */
2368 insn = *entry++;
2369
2370 if ((insn & 0xc0) == 0)
2371 {
2372 int offset = insn & 0x3f;
2373 vsp += (offset << 2) + 4;
2374 }
2375 else if ((insn & 0xc0) == 0x40)
2376 {
2377 int offset = insn & 0x3f;
2378 vsp -= (offset << 2) + 4;
2379 }
2380 else if ((insn & 0xf0) == 0x80)
2381 {
2382 int mask = ((insn & 0xf) << 8) | *entry++;
2383 int i;
2384
2385 /* The special case of an all-zero mask identifies
2386 "Refuse to unwind". We return NULL to fall back
2387 to the prologue analyzer. */
2388 if (mask == 0)
2389 return NULL;
2390
2391 /* Pop registers r4..r15 under mask. */
2392 for (i = 0; i < 12; i++)
2393 if (mask & (1 << i))
2394 {
2395 cache->saved_regs[4 + i].addr = vsp;
2396 vsp += 4;
2397 }
2398
2399 /* Special-case popping SP -- we need to reload vsp. */
2400 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2401 vsp_valid = 0;
2402 }
2403 else if ((insn & 0xf0) == 0x90)
2404 {
2405 int reg = insn & 0xf;
2406
2407 /* Reserved cases. */
2408 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2409 return NULL;
2410
2411 /* Set SP from another register and mark VSP for reload. */
2412 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2413 vsp_valid = 0;
2414 }
2415 else if ((insn & 0xf0) == 0xa0)
2416 {
2417 int count = insn & 0x7;
2418 int pop_lr = (insn & 0x8) != 0;
2419 int i;
2420
2421 /* Pop r4..r[4+count]. */
2422 for (i = 0; i <= count; i++)
2423 {
2424 cache->saved_regs[4 + i].addr = vsp;
2425 vsp += 4;
2426 }
2427
2428 /* If indicated by flag, pop LR as well. */
2429 if (pop_lr)
2430 {
2431 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2432 vsp += 4;
2433 }
2434 }
2435 else if (insn == 0xb0)
2436 {
2437 /* We could only have updated PC by popping into it; if so, it
2438 will show up as address. Otherwise, copy LR into PC. */
2439 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2440 cache->saved_regs[ARM_PC_REGNUM]
2441 = cache->saved_regs[ARM_LR_REGNUM];
2442
2443 /* We're done. */
2444 break;
2445 }
2446 else if (insn == 0xb1)
2447 {
2448 int mask = *entry++;
2449 int i;
2450
2451 /* All-zero mask and mask >= 16 is "spare". */
2452 if (mask == 0 || mask >= 16)
2453 return NULL;
2454
2455 /* Pop r0..r3 under mask. */
2456 for (i = 0; i < 4; i++)
2457 if (mask & (1 << i))
2458 {
2459 cache->saved_regs[i].addr = vsp;
2460 vsp += 4;
2461 }
2462 }
2463 else if (insn == 0xb2)
2464 {
2465 ULONGEST offset = 0;
2466 unsigned shift = 0;
2467
2468 do
2469 {
2470 offset |= (*entry & 0x7f) << shift;
2471 shift += 7;
2472 }
2473 while (*entry++ & 0x80);
2474
2475 vsp += 0x204 + (offset << 2);
2476 }
2477 else if (insn == 0xb3)
2478 {
2479 int start = *entry >> 4;
2480 int count = (*entry++) & 0xf;
2481 int i;
2482
2483 /* Only registers D0..D15 are valid here. */
2484 if (start + count >= 16)
2485 return NULL;
2486
2487 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2488 for (i = 0; i <= count; i++)
2489 {
2490 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2491 vsp += 8;
2492 }
2493
2494 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2495 vsp += 4;
2496 }
2497 else if ((insn & 0xf8) == 0xb8)
2498 {
2499 int count = insn & 0x7;
2500 int i;
2501
2502 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2503 for (i = 0; i <= count; i++)
2504 {
2505 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2506 vsp += 8;
2507 }
2508
2509 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2510 vsp += 4;
2511 }
2512 else if (insn == 0xc6)
2513 {
2514 int start = *entry >> 4;
2515 int count = (*entry++) & 0xf;
2516 int i;
2517
2518 /* Only registers WR0..WR15 are valid. */
2519 if (start + count >= 16)
2520 return NULL;
2521
2522 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2523 for (i = 0; i <= count; i++)
2524 {
2525 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2526 vsp += 8;
2527 }
2528 }
2529 else if (insn == 0xc7)
2530 {
2531 int mask = *entry++;
2532 int i;
2533
2534 /* All-zero mask and mask >= 16 is "spare". */
2535 if (mask == 0 || mask >= 16)
2536 return NULL;
2537
2538 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2539 for (i = 0; i < 4; i++)
2540 if (mask & (1 << i))
2541 {
2542 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2543 vsp += 4;
2544 }
2545 }
2546 else if ((insn & 0xf8) == 0xc0)
2547 {
2548 int count = insn & 0x7;
2549 int i;
2550
2551 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2552 for (i = 0; i <= count; i++)
2553 {
2554 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2555 vsp += 8;
2556 }
2557 }
2558 else if (insn == 0xc8)
2559 {
2560 int start = *entry >> 4;
2561 int count = (*entry++) & 0xf;
2562 int i;
2563
2564 /* Only registers D0..D31 are valid. */
2565 if (start + count >= 16)
2566 return NULL;
2567
2568 /* Pop VFP double-precision registers
2569 D[16+start]..D[16+start+count]. */
2570 for (i = 0; i <= count; i++)
2571 {
2572 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2573 vsp += 8;
2574 }
2575 }
2576 else if (insn == 0xc9)
2577 {
2578 int start = *entry >> 4;
2579 int count = (*entry++) & 0xf;
2580 int i;
2581
2582 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2583 for (i = 0; i <= count; i++)
2584 {
2585 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2586 vsp += 8;
2587 }
2588 }
2589 else if ((insn & 0xf8) == 0xd0)
2590 {
2591 int count = insn & 0x7;
2592 int i;
2593
2594 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2595 for (i = 0; i <= count; i++)
2596 {
2597 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2598 vsp += 8;
2599 }
2600 }
2601 else
2602 {
2603 /* Everything else is "spare". */
2604 return NULL;
2605 }
2606 }
2607
2608 /* If we restore SP from a register, assume this was the frame register.
2609 Otherwise just fall back to SP as frame register. */
2610 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2611 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2612 else
2613 cache->framereg = ARM_SP_REGNUM;
2614
2615 /* Determine offset to previous frame. */
2616 cache->framesize
2617 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2618
2619 /* We already got the previous SP. */
2620 cache->prev_sp = vsp;
2621
2622 return cache;
2623 }
2624
2625 /* Unwinding via ARM exception table entries. Note that the sniffer
2626 already computes a filled-in prologue cache, which is then used
2627 with the same arm_prologue_this_id and arm_prologue_prev_register
2628 routines also used for prologue-parsing based unwinding. */
2629
2630 static int
2631 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2632 struct frame_info *this_frame,
2633 void **this_prologue_cache)
2634 {
2635 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2636 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2637 CORE_ADDR addr_in_block, exidx_region, func_start;
2638 struct arm_prologue_cache *cache;
2639 gdb_byte *entry;
2640
2641 /* See if we have an ARM exception table entry covering this address. */
2642 addr_in_block = get_frame_address_in_block (this_frame);
2643 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2644 if (!entry)
2645 return 0;
2646
2647 /* The ARM exception table does not describe unwind information
2648 for arbitrary PC values, but is guaranteed to be correct only
2649 at call sites. We have to decide here whether we want to use
2650 ARM exception table information for this frame, or fall back
2651 to using prologue parsing. (Note that if we have DWARF CFI,
2652 this sniffer isn't even called -- CFI is always preferred.)
2653
2654 Before we make this decision, however, we check whether we
2655 actually have *symbol* information for the current frame.
2656 If not, prologue parsing would not work anyway, so we might
2657 as well use the exception table and hope for the best. */
2658 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2659 {
2660 int exc_valid = 0;
2661
2662 /* If the next frame is "normal", we are at a call site in this
2663 frame, so exception information is guaranteed to be valid. */
2664 if (get_next_frame (this_frame)
2665 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2666 exc_valid = 1;
2667
2668 /* We also assume exception information is valid if we're currently
2669 blocked in a system call. The system library is supposed to
2670 ensure this, so that e.g. pthread cancellation works. */
2671 if (arm_frame_is_thumb (this_frame))
2672 {
2673 LONGEST insn;
2674
2675 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2676 byte_order_for_code, &insn)
2677 && (insn & 0xff00) == 0xdf00 /* svc */)
2678 exc_valid = 1;
2679 }
2680 else
2681 {
2682 LONGEST insn;
2683
2684 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2685 byte_order_for_code, &insn)
2686 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2687 exc_valid = 1;
2688 }
2689
2690 /* Bail out if we don't know that exception information is valid. */
2691 if (!exc_valid)
2692 return 0;
2693
2694 /* The ARM exception index does not mark the *end* of the region
2695 covered by the entry, and some functions will not have any entry.
2696 To correctly recognize the end of the covered region, the linker
2697 should have inserted dummy records with a CANTUNWIND marker.
2698
2699 Unfortunately, current versions of GNU ld do not reliably do
2700 this, and thus we may have found an incorrect entry above.
2701 As a (temporary) sanity check, we only use the entry if it
2702 lies *within* the bounds of the function. Note that this check
2703 might reject perfectly valid entries that just happen to cover
2704 multiple functions; therefore this check ought to be removed
2705 once the linker is fixed. */
2706 if (func_start > exidx_region)
2707 return 0;
2708 }
2709
2710 /* Decode the list of unwinding instructions into a prologue cache.
2711 Note that this may fail due to e.g. a "refuse to unwind" code. */
2712 cache = arm_exidx_fill_cache (this_frame, entry);
2713 if (!cache)
2714 return 0;
2715
2716 *this_prologue_cache = cache;
2717 return 1;
2718 }
2719
2720 struct frame_unwind arm_exidx_unwind = {
2721 NORMAL_FRAME,
2722 default_frame_unwind_stop_reason,
2723 arm_prologue_this_id,
2724 arm_prologue_prev_register,
2725 NULL,
2726 arm_exidx_unwind_sniffer
2727 };
2728
2729 static struct arm_prologue_cache *
2730 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2731 {
2732 struct arm_prologue_cache *cache;
2733 int reg;
2734
2735 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2736 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2737
2738 /* Still rely on the offset calculated from prologue. */
2739 arm_scan_prologue (this_frame, cache);
2740
2741 /* Since we are in epilogue, the SP has been restored. */
2742 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2743
2744 /* Calculate actual addresses of saved registers using offsets
2745 determined by arm_scan_prologue. */
2746 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2747 if (trad_frame_addr_p (cache->saved_regs, reg))
2748 cache->saved_regs[reg].addr += cache->prev_sp;
2749
2750 return cache;
2751 }
2752
2753 /* Implementation of function hook 'this_id' in
2754 'struct frame_uwnind' for epilogue unwinder. */
2755
2756 static void
2757 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2758 void **this_cache,
2759 struct frame_id *this_id)
2760 {
2761 struct arm_prologue_cache *cache;
2762 CORE_ADDR pc, func;
2763
2764 if (*this_cache == NULL)
2765 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2766 cache = (struct arm_prologue_cache *) *this_cache;
2767
2768 /* Use function start address as part of the frame ID. If we cannot
2769 identify the start address (due to missing symbol information),
2770 fall back to just using the current PC. */
2771 pc = get_frame_pc (this_frame);
2772 func = get_frame_func (this_frame);
2773 if (func == 0)
2774 func = pc;
2775
2776 (*this_id) = frame_id_build (cache->prev_sp, pc);
2777 }
2778
2779 /* Implementation of function hook 'prev_register' in
2780 'struct frame_uwnind' for epilogue unwinder. */
2781
2782 static struct value *
2783 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2784 void **this_cache, int regnum)
2785 {
2786 if (*this_cache == NULL)
2787 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2788
2789 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2790 }
2791
2792 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2793 CORE_ADDR pc);
2794 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2795 CORE_ADDR pc);
2796
2797 /* Implementation of function hook 'sniffer' in
2798 'struct frame_uwnind' for epilogue unwinder. */
2799
2800 static int
2801 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2802 struct frame_info *this_frame,
2803 void **this_prologue_cache)
2804 {
2805 if (frame_relative_level (this_frame) == 0)
2806 {
2807 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2808 CORE_ADDR pc = get_frame_pc (this_frame);
2809
2810 if (arm_frame_is_thumb (this_frame))
2811 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2812 else
2813 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2814 }
2815 else
2816 return 0;
2817 }
2818
2819 /* Frame unwinder from epilogue. */
2820
2821 static const struct frame_unwind arm_epilogue_frame_unwind =
2822 {
2823 NORMAL_FRAME,
2824 default_frame_unwind_stop_reason,
2825 arm_epilogue_frame_this_id,
2826 arm_epilogue_frame_prev_register,
2827 NULL,
2828 arm_epilogue_frame_sniffer,
2829 };
2830
2831 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2832 trampoline, return the target PC. Otherwise return 0.
2833
2834 void call0a (char c, short s, int i, long l) {}
2835
2836 int main (void)
2837 {
2838 (*pointer_to_call0a) (c, s, i, l);
2839 }
2840
2841 Instead of calling a stub library function _call_via_xx (xx is
2842 the register name), GCC may inline the trampoline in the object
2843 file as below (register r2 has the address of call0a).
2844
2845 .global main
2846 .type main, %function
2847 ...
2848 bl .L1
2849 ...
2850 .size main, .-main
2851
2852 .L1:
2853 bx r2
2854
2855 The trampoline 'bx r2' doesn't belong to main. */
2856
2857 static CORE_ADDR
2858 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2859 {
2860 /* The heuristics of recognizing such trampoline is that FRAME is
2861 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2862 if (arm_frame_is_thumb (frame))
2863 {
2864 gdb_byte buf[2];
2865
2866 if (target_read_memory (pc, buf, 2) == 0)
2867 {
2868 struct gdbarch *gdbarch = get_frame_arch (frame);
2869 enum bfd_endian byte_order_for_code
2870 = gdbarch_byte_order_for_code (gdbarch);
2871 uint16_t insn
2872 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2873
2874 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2875 {
2876 CORE_ADDR dest
2877 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2878
2879 /* Clear the LSB so that gdb core sets step-resume
2880 breakpoint at the right address. */
2881 return UNMAKE_THUMB_ADDR (dest);
2882 }
2883 }
2884 }
2885
2886 return 0;
2887 }
2888
2889 static struct arm_prologue_cache *
2890 arm_make_stub_cache (struct frame_info *this_frame)
2891 {
2892 struct arm_prologue_cache *cache;
2893
2894 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2895 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2896
2897 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2898
2899 return cache;
2900 }
2901
2902 /* Our frame ID for a stub frame is the current SP and LR. */
2903
2904 static void
2905 arm_stub_this_id (struct frame_info *this_frame,
2906 void **this_cache,
2907 struct frame_id *this_id)
2908 {
2909 struct arm_prologue_cache *cache;
2910
2911 if (*this_cache == NULL)
2912 *this_cache = arm_make_stub_cache (this_frame);
2913 cache = (struct arm_prologue_cache *) *this_cache;
2914
2915 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2916 }
2917
2918 static int
2919 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2920 struct frame_info *this_frame,
2921 void **this_prologue_cache)
2922 {
2923 CORE_ADDR addr_in_block;
2924 gdb_byte dummy[4];
2925 CORE_ADDR pc, start_addr;
2926 const char *name;
2927
2928 addr_in_block = get_frame_address_in_block (this_frame);
2929 pc = get_frame_pc (this_frame);
2930 if (in_plt_section (addr_in_block)
2931 /* We also use the stub winder if the target memory is unreadable
2932 to avoid having the prologue unwinder trying to read it. */
2933 || target_read_memory (pc, dummy, 4) != 0)
2934 return 1;
2935
2936 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2937 && arm_skip_bx_reg (this_frame, pc) != 0)
2938 return 1;
2939
2940 return 0;
2941 }
2942
2943 struct frame_unwind arm_stub_unwind = {
2944 NORMAL_FRAME,
2945 default_frame_unwind_stop_reason,
2946 arm_stub_this_id,
2947 arm_prologue_prev_register,
2948 NULL,
2949 arm_stub_unwind_sniffer
2950 };
2951
2952 /* Put here the code to store, into CACHE->saved_regs, the addresses
2953 of the saved registers of frame described by THIS_FRAME. CACHE is
2954 returned. */
2955
2956 static struct arm_prologue_cache *
2957 arm_m_exception_cache (struct frame_info *this_frame)
2958 {
2959 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2960 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2961 struct arm_prologue_cache *cache;
2962 CORE_ADDR unwound_sp;
2963 LONGEST xpsr;
2964
2965 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2966 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2967
2968 unwound_sp = get_frame_register_unsigned (this_frame,
2969 ARM_SP_REGNUM);
2970
2971 /* The hardware saves eight 32-bit words, comprising xPSR,
2972 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2973 "B1.5.6 Exception entry behavior" in
2974 "ARMv7-M Architecture Reference Manual". */
2975 cache->saved_regs[0].addr = unwound_sp;
2976 cache->saved_regs[1].addr = unwound_sp + 4;
2977 cache->saved_regs[2].addr = unwound_sp + 8;
2978 cache->saved_regs[3].addr = unwound_sp + 12;
2979 cache->saved_regs[12].addr = unwound_sp + 16;
2980 cache->saved_regs[14].addr = unwound_sp + 20;
2981 cache->saved_regs[15].addr = unwound_sp + 24;
2982 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2983
2984 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2985 aligner between the top of the 32-byte stack frame and the
2986 previous context's stack pointer. */
2987 cache->prev_sp = unwound_sp + 32;
2988 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2989 && (xpsr & (1 << 9)) != 0)
2990 cache->prev_sp += 4;
2991
2992 return cache;
2993 }
2994
2995 /* Implementation of function hook 'this_id' in
2996 'struct frame_uwnind'. */
2997
2998 static void
2999 arm_m_exception_this_id (struct frame_info *this_frame,
3000 void **this_cache,
3001 struct frame_id *this_id)
3002 {
3003 struct arm_prologue_cache *cache;
3004
3005 if (*this_cache == NULL)
3006 *this_cache = arm_m_exception_cache (this_frame);
3007 cache = (struct arm_prologue_cache *) *this_cache;
3008
3009 /* Our frame ID for a stub frame is the current SP and LR. */
3010 *this_id = frame_id_build (cache->prev_sp,
3011 get_frame_pc (this_frame));
3012 }
3013
3014 /* Implementation of function hook 'prev_register' in
3015 'struct frame_uwnind'. */
3016
3017 static struct value *
3018 arm_m_exception_prev_register (struct frame_info *this_frame,
3019 void **this_cache,
3020 int prev_regnum)
3021 {
3022 struct arm_prologue_cache *cache;
3023
3024 if (*this_cache == NULL)
3025 *this_cache = arm_m_exception_cache (this_frame);
3026 cache = (struct arm_prologue_cache *) *this_cache;
3027
3028 /* The value was already reconstructed into PREV_SP. */
3029 if (prev_regnum == ARM_SP_REGNUM)
3030 return frame_unwind_got_constant (this_frame, prev_regnum,
3031 cache->prev_sp);
3032
3033 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3034 prev_regnum);
3035 }
3036
3037 /* Implementation of function hook 'sniffer' in
3038 'struct frame_uwnind'. */
3039
3040 static int
3041 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3042 struct frame_info *this_frame,
3043 void **this_prologue_cache)
3044 {
3045 CORE_ADDR this_pc = get_frame_pc (this_frame);
3046
3047 /* No need to check is_m; this sniffer is only registered for
3048 M-profile architectures. */
3049
3050 /* Check if exception frame returns to a magic PC value. */
3051 return arm_m_addr_is_magic (this_pc);
3052 }
3053
3054 /* Frame unwinder for M-profile exceptions. */
3055
3056 struct frame_unwind arm_m_exception_unwind =
3057 {
3058 SIGTRAMP_FRAME,
3059 default_frame_unwind_stop_reason,
3060 arm_m_exception_this_id,
3061 arm_m_exception_prev_register,
3062 NULL,
3063 arm_m_exception_unwind_sniffer
3064 };
3065
3066 static CORE_ADDR
3067 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3068 {
3069 struct arm_prologue_cache *cache;
3070
3071 if (*this_cache == NULL)
3072 *this_cache = arm_make_prologue_cache (this_frame);
3073 cache = (struct arm_prologue_cache *) *this_cache;
3074
3075 return cache->prev_sp - cache->framesize;
3076 }
3077
3078 struct frame_base arm_normal_base = {
3079 &arm_prologue_unwind,
3080 arm_normal_frame_base,
3081 arm_normal_frame_base,
3082 arm_normal_frame_base
3083 };
3084
3085 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3086 dummy frame. The frame ID's base needs to match the TOS value
3087 saved by save_dummy_frame_tos() and returned from
3088 arm_push_dummy_call, and the PC needs to match the dummy frame's
3089 breakpoint. */
3090
3091 static struct frame_id
3092 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3093 {
3094 return frame_id_build (get_frame_register_unsigned (this_frame,
3095 ARM_SP_REGNUM),
3096 get_frame_pc (this_frame));
3097 }
3098
3099 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3100 be used to construct the previous frame's ID, after looking up the
3101 containing function). */
3102
3103 static CORE_ADDR
3104 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3105 {
3106 CORE_ADDR pc;
3107 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3108 return arm_addr_bits_remove (gdbarch, pc);
3109 }
3110
3111 static CORE_ADDR
3112 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3113 {
3114 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3115 }
3116
3117 static struct value *
3118 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3119 int regnum)
3120 {
3121 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3122 CORE_ADDR lr, cpsr;
3123 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3124
3125 switch (regnum)
3126 {
3127 case ARM_PC_REGNUM:
3128 /* The PC is normally copied from the return column, which
3129 describes saves of LR. However, that version may have an
3130 extra bit set to indicate Thumb state. The bit is not
3131 part of the PC. */
3132 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3133 return frame_unwind_got_constant (this_frame, regnum,
3134 arm_addr_bits_remove (gdbarch, lr));
3135
3136 case ARM_PS_REGNUM:
3137 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3138 cpsr = get_frame_register_unsigned (this_frame, regnum);
3139 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3140 if (IS_THUMB_ADDR (lr))
3141 cpsr |= t_bit;
3142 else
3143 cpsr &= ~t_bit;
3144 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3145
3146 default:
3147 internal_error (__FILE__, __LINE__,
3148 _("Unexpected register %d"), regnum);
3149 }
3150 }
3151
3152 static void
3153 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3154 struct dwarf2_frame_state_reg *reg,
3155 struct frame_info *this_frame)
3156 {
3157 switch (regnum)
3158 {
3159 case ARM_PC_REGNUM:
3160 case ARM_PS_REGNUM:
3161 reg->how = DWARF2_FRAME_REG_FN;
3162 reg->loc.fn = arm_dwarf2_prev_register;
3163 break;
3164 case ARM_SP_REGNUM:
3165 reg->how = DWARF2_FRAME_REG_CFA;
3166 break;
3167 }
3168 }
3169
3170 /* Implement the stack_frame_destroyed_p gdbarch method. */
3171
3172 static int
3173 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3174 {
3175 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3176 unsigned int insn, insn2;
3177 int found_return = 0, found_stack_adjust = 0;
3178 CORE_ADDR func_start, func_end;
3179 CORE_ADDR scan_pc;
3180 gdb_byte buf[4];
3181
3182 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3183 return 0;
3184
3185 /* The epilogue is a sequence of instructions along the following lines:
3186
3187 - add stack frame size to SP or FP
3188 - [if frame pointer used] restore SP from FP
3189 - restore registers from SP [may include PC]
3190 - a return-type instruction [if PC wasn't already restored]
3191
3192 In a first pass, we scan forward from the current PC and verify the
3193 instructions we find as compatible with this sequence, ending in a
3194 return instruction.
3195
3196 However, this is not sufficient to distinguish indirect function calls
3197 within a function from indirect tail calls in the epilogue in some cases.
3198 Therefore, if we didn't already find any SP-changing instruction during
3199 forward scan, we add a backward scanning heuristic to ensure we actually
3200 are in the epilogue. */
3201
3202 scan_pc = pc;
3203 while (scan_pc < func_end && !found_return)
3204 {
3205 if (target_read_memory (scan_pc, buf, 2))
3206 break;
3207
3208 scan_pc += 2;
3209 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3210
3211 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3212 found_return = 1;
3213 else if (insn == 0x46f7) /* mov pc, lr */
3214 found_return = 1;
3215 else if (thumb_instruction_restores_sp (insn))
3216 {
3217 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3218 found_return = 1;
3219 }
3220 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3221 {
3222 if (target_read_memory (scan_pc, buf, 2))
3223 break;
3224
3225 scan_pc += 2;
3226 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3227
3228 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3229 {
3230 if (insn2 & 0x8000) /* <registers> include PC. */
3231 found_return = 1;
3232 }
3233 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3234 && (insn2 & 0x0fff) == 0x0b04)
3235 {
3236 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3237 found_return = 1;
3238 }
3239 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3240 && (insn2 & 0x0e00) == 0x0a00)
3241 ;
3242 else
3243 break;
3244 }
3245 else
3246 break;
3247 }
3248
3249 if (!found_return)
3250 return 0;
3251
3252 /* Since any instruction in the epilogue sequence, with the possible
3253 exception of return itself, updates the stack pointer, we need to
3254 scan backwards for at most one instruction. Try either a 16-bit or
3255 a 32-bit instruction. This is just a heuristic, so we do not worry
3256 too much about false positives. */
3257
3258 if (pc - 4 < func_start)
3259 return 0;
3260 if (target_read_memory (pc - 4, buf, 4))
3261 return 0;
3262
3263 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3264 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3265
3266 if (thumb_instruction_restores_sp (insn2))
3267 found_stack_adjust = 1;
3268 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3269 found_stack_adjust = 1;
3270 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3271 && (insn2 & 0x0fff) == 0x0b04)
3272 found_stack_adjust = 1;
3273 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3274 && (insn2 & 0x0e00) == 0x0a00)
3275 found_stack_adjust = 1;
3276
3277 return found_stack_adjust;
3278 }
3279
3280 static int
3281 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3282 {
3283 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3284 unsigned int insn;
3285 int found_return;
3286 CORE_ADDR func_start, func_end;
3287
3288 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3289 return 0;
3290
3291 /* We are in the epilogue if the previous instruction was a stack
3292 adjustment and the next instruction is a possible return (bx, mov
3293 pc, or pop). We could have to scan backwards to find the stack
3294 adjustment, or forwards to find the return, but this is a decent
3295 approximation. First scan forwards. */
3296
3297 found_return = 0;
3298 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3299 if (bits (insn, 28, 31) != INST_NV)
3300 {
3301 if ((insn & 0x0ffffff0) == 0x012fff10)
3302 /* BX. */
3303 found_return = 1;
3304 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3305 /* MOV PC. */
3306 found_return = 1;
3307 else if ((insn & 0x0fff0000) == 0x08bd0000
3308 && (insn & 0x0000c000) != 0)
3309 /* POP (LDMIA), including PC or LR. */
3310 found_return = 1;
3311 }
3312
3313 if (!found_return)
3314 return 0;
3315
3316 /* Scan backwards. This is just a heuristic, so do not worry about
3317 false positives from mode changes. */
3318
3319 if (pc < func_start + 4)
3320 return 0;
3321
3322 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3323 if (arm_instruction_restores_sp (insn))
3324 return 1;
3325
3326 return 0;
3327 }
3328
3329 /* Implement the stack_frame_destroyed_p gdbarch method. */
3330
3331 static int
3332 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3333 {
3334 if (arm_pc_is_thumb (gdbarch, pc))
3335 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3336 else
3337 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3338 }
3339
3340 /* When arguments must be pushed onto the stack, they go on in reverse
3341 order. The code below implements a FILO (stack) to do this. */
3342
3343 struct stack_item
3344 {
3345 int len;
3346 struct stack_item *prev;
3347 gdb_byte *data;
3348 };
3349
3350 static struct stack_item *
3351 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3352 {
3353 struct stack_item *si;
3354 si = XNEW (struct stack_item);
3355 si->data = (gdb_byte *) xmalloc (len);
3356 si->len = len;
3357 si->prev = prev;
3358 memcpy (si->data, contents, len);
3359 return si;
3360 }
3361
3362 static struct stack_item *
3363 pop_stack_item (struct stack_item *si)
3364 {
3365 struct stack_item *dead = si;
3366 si = si->prev;
3367 xfree (dead->data);
3368 xfree (dead);
3369 return si;
3370 }
3371
3372
3373 /* Return the alignment (in bytes) of the given type. */
3374
3375 static int
3376 arm_type_align (struct type *t)
3377 {
3378 int n;
3379 int align;
3380 int falign;
3381
3382 t = check_typedef (t);
3383 switch (TYPE_CODE (t))
3384 {
3385 default:
3386 /* Should never happen. */
3387 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3388 return 4;
3389
3390 case TYPE_CODE_PTR:
3391 case TYPE_CODE_ENUM:
3392 case TYPE_CODE_INT:
3393 case TYPE_CODE_FLT:
3394 case TYPE_CODE_SET:
3395 case TYPE_CODE_RANGE:
3396 case TYPE_CODE_REF:
3397 case TYPE_CODE_CHAR:
3398 case TYPE_CODE_BOOL:
3399 return TYPE_LENGTH (t);
3400
3401 case TYPE_CODE_ARRAY:
3402 if (TYPE_VECTOR (t))
3403 {
3404 /* Use the natural alignment for vector types (the same for
3405 scalar type), but the maximum alignment is 64-bit. */
3406 if (TYPE_LENGTH (t) > 8)
3407 return 8;
3408 else
3409 return TYPE_LENGTH (t);
3410 }
3411 else
3412 return arm_type_align (TYPE_TARGET_TYPE (t));
3413 case TYPE_CODE_COMPLEX:
3414 return arm_type_align (TYPE_TARGET_TYPE (t));
3415
3416 case TYPE_CODE_STRUCT:
3417 case TYPE_CODE_UNION:
3418 align = 1;
3419 for (n = 0; n < TYPE_NFIELDS (t); n++)
3420 {
3421 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3422 if (falign > align)
3423 align = falign;
3424 }
3425 return align;
3426 }
3427 }
3428
3429 /* Possible base types for a candidate for passing and returning in
3430 VFP registers. */
3431
3432 enum arm_vfp_cprc_base_type
3433 {
3434 VFP_CPRC_UNKNOWN,
3435 VFP_CPRC_SINGLE,
3436 VFP_CPRC_DOUBLE,
3437 VFP_CPRC_VEC64,
3438 VFP_CPRC_VEC128
3439 };
3440
3441 /* The length of one element of base type B. */
3442
3443 static unsigned
3444 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3445 {
3446 switch (b)
3447 {
3448 case VFP_CPRC_SINGLE:
3449 return 4;
3450 case VFP_CPRC_DOUBLE:
3451 return 8;
3452 case VFP_CPRC_VEC64:
3453 return 8;
3454 case VFP_CPRC_VEC128:
3455 return 16;
3456 default:
3457 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3458 (int) b);
3459 }
3460 }
3461
3462 /* The character ('s', 'd' or 'q') for the type of VFP register used
3463 for passing base type B. */
3464
3465 static int
3466 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3467 {
3468 switch (b)
3469 {
3470 case VFP_CPRC_SINGLE:
3471 return 's';
3472 case VFP_CPRC_DOUBLE:
3473 return 'd';
3474 case VFP_CPRC_VEC64:
3475 return 'd';
3476 case VFP_CPRC_VEC128:
3477 return 'q';
3478 default:
3479 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3480 (int) b);
3481 }
3482 }
3483
3484 /* Determine whether T may be part of a candidate for passing and
3485 returning in VFP registers, ignoring the limit on the total number
3486 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3487 classification of the first valid component found; if it is not
3488 VFP_CPRC_UNKNOWN, all components must have the same classification
3489 as *BASE_TYPE. If it is found that T contains a type not permitted
3490 for passing and returning in VFP registers, a type differently
3491 classified from *BASE_TYPE, or two types differently classified
3492 from each other, return -1, otherwise return the total number of
3493 base-type elements found (possibly 0 in an empty structure or
3494 array). Vector types are not currently supported, matching the
3495 generic AAPCS support. */
3496
3497 static int
3498 arm_vfp_cprc_sub_candidate (struct type *t,
3499 enum arm_vfp_cprc_base_type *base_type)
3500 {
3501 t = check_typedef (t);
3502 switch (TYPE_CODE (t))
3503 {
3504 case TYPE_CODE_FLT:
3505 switch (TYPE_LENGTH (t))
3506 {
3507 case 4:
3508 if (*base_type == VFP_CPRC_UNKNOWN)
3509 *base_type = VFP_CPRC_SINGLE;
3510 else if (*base_type != VFP_CPRC_SINGLE)
3511 return -1;
3512 return 1;
3513
3514 case 8:
3515 if (*base_type == VFP_CPRC_UNKNOWN)
3516 *base_type = VFP_CPRC_DOUBLE;
3517 else if (*base_type != VFP_CPRC_DOUBLE)
3518 return -1;
3519 return 1;
3520
3521 default:
3522 return -1;
3523 }
3524 break;
3525
3526 case TYPE_CODE_COMPLEX:
3527 /* Arguments of complex T where T is one of the types float or
3528 double get treated as if they are implemented as:
3529
3530 struct complexT
3531 {
3532 T real;
3533 T imag;
3534 };
3535
3536 */
3537 switch (TYPE_LENGTH (t))
3538 {
3539 case 8:
3540 if (*base_type == VFP_CPRC_UNKNOWN)
3541 *base_type = VFP_CPRC_SINGLE;
3542 else if (*base_type != VFP_CPRC_SINGLE)
3543 return -1;
3544 return 2;
3545
3546 case 16:
3547 if (*base_type == VFP_CPRC_UNKNOWN)
3548 *base_type = VFP_CPRC_DOUBLE;
3549 else if (*base_type != VFP_CPRC_DOUBLE)
3550 return -1;
3551 return 2;
3552
3553 default:
3554 return -1;
3555 }
3556 break;
3557
3558 case TYPE_CODE_ARRAY:
3559 {
3560 if (TYPE_VECTOR (t))
3561 {
3562 /* A 64-bit or 128-bit containerized vector type are VFP
3563 CPRCs. */
3564 switch (TYPE_LENGTH (t))
3565 {
3566 case 8:
3567 if (*base_type == VFP_CPRC_UNKNOWN)
3568 *base_type = VFP_CPRC_VEC64;
3569 return 1;
3570 case 16:
3571 if (*base_type == VFP_CPRC_UNKNOWN)
3572 *base_type = VFP_CPRC_VEC128;
3573 return 1;
3574 default:
3575 return -1;
3576 }
3577 }
3578 else
3579 {
3580 int count;
3581 unsigned unitlen;
3582
3583 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3584 base_type);
3585 if (count == -1)
3586 return -1;
3587 if (TYPE_LENGTH (t) == 0)
3588 {
3589 gdb_assert (count == 0);
3590 return 0;
3591 }
3592 else if (count == 0)
3593 return -1;
3594 unitlen = arm_vfp_cprc_unit_length (*base_type);
3595 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3596 return TYPE_LENGTH (t) / unitlen;
3597 }
3598 }
3599 break;
3600
3601 case TYPE_CODE_STRUCT:
3602 {
3603 int count = 0;
3604 unsigned unitlen;
3605 int i;
3606 for (i = 0; i < TYPE_NFIELDS (t); i++)
3607 {
3608 int sub_count = 0;
3609
3610 if (!field_is_static (&TYPE_FIELD (t, i)))
3611 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3612 base_type);
3613 if (sub_count == -1)
3614 return -1;
3615 count += sub_count;
3616 }
3617 if (TYPE_LENGTH (t) == 0)
3618 {
3619 gdb_assert (count == 0);
3620 return 0;
3621 }
3622 else if (count == 0)
3623 return -1;
3624 unitlen = arm_vfp_cprc_unit_length (*base_type);
3625 if (TYPE_LENGTH (t) != unitlen * count)
3626 return -1;
3627 return count;
3628 }
3629
3630 case TYPE_CODE_UNION:
3631 {
3632 int count = 0;
3633 unsigned unitlen;
3634 int i;
3635 for (i = 0; i < TYPE_NFIELDS (t); i++)
3636 {
3637 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3638 base_type);
3639 if (sub_count == -1)
3640 return -1;
3641 count = (count > sub_count ? count : sub_count);
3642 }
3643 if (TYPE_LENGTH (t) == 0)
3644 {
3645 gdb_assert (count == 0);
3646 return 0;
3647 }
3648 else if (count == 0)
3649 return -1;
3650 unitlen = arm_vfp_cprc_unit_length (*base_type);
3651 if (TYPE_LENGTH (t) != unitlen * count)
3652 return -1;
3653 return count;
3654 }
3655
3656 default:
3657 break;
3658 }
3659
3660 return -1;
3661 }
3662
3663 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3664 if passed to or returned from a non-variadic function with the VFP
3665 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3666 *BASE_TYPE to the base type for T and *COUNT to the number of
3667 elements of that base type before returning. */
3668
3669 static int
3670 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3671 int *count)
3672 {
3673 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3674 int c = arm_vfp_cprc_sub_candidate (t, &b);
3675 if (c <= 0 || c > 4)
3676 return 0;
3677 *base_type = b;
3678 *count = c;
3679 return 1;
3680 }
3681
3682 /* Return 1 if the VFP ABI should be used for passing arguments to and
3683 returning values from a function of type FUNC_TYPE, 0
3684 otherwise. */
3685
3686 static int
3687 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3688 {
3689 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3690 /* Variadic functions always use the base ABI. Assume that functions
3691 without debug info are not variadic. */
3692 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3693 return 0;
3694 /* The VFP ABI is only supported as a variant of AAPCS. */
3695 if (tdep->arm_abi != ARM_ABI_AAPCS)
3696 return 0;
3697 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3698 }
3699
3700 /* We currently only support passing parameters in integer registers, which
3701 conforms with GCC's default model, and VFP argument passing following
3702 the VFP variant of AAPCS. Several other variants exist and
3703 we should probably support some of them based on the selected ABI. */
3704
3705 static CORE_ADDR
3706 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3707 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3708 struct value **args, CORE_ADDR sp, int struct_return,
3709 CORE_ADDR struct_addr)
3710 {
3711 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3712 int argnum;
3713 int argreg;
3714 int nstack;
3715 struct stack_item *si = NULL;
3716 int use_vfp_abi;
3717 struct type *ftype;
3718 unsigned vfp_regs_free = (1 << 16) - 1;
3719
3720 /* Determine the type of this function and whether the VFP ABI
3721 applies. */
3722 ftype = check_typedef (value_type (function));
3723 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3724 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3725 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3726
3727 /* Set the return address. For the ARM, the return breakpoint is
3728 always at BP_ADDR. */
3729 if (arm_pc_is_thumb (gdbarch, bp_addr))
3730 bp_addr |= 1;
3731 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3732
3733 /* Walk through the list of args and determine how large a temporary
3734 stack is required. Need to take care here as structs may be
3735 passed on the stack, and we have to push them. */
3736 nstack = 0;
3737
3738 argreg = ARM_A1_REGNUM;
3739 nstack = 0;
3740
3741 /* The struct_return pointer occupies the first parameter
3742 passing register. */
3743 if (struct_return)
3744 {
3745 if (arm_debug)
3746 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3747 gdbarch_register_name (gdbarch, argreg),
3748 paddress (gdbarch, struct_addr));
3749 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3750 argreg++;
3751 }
3752
3753 for (argnum = 0; argnum < nargs; argnum++)
3754 {
3755 int len;
3756 struct type *arg_type;
3757 struct type *target_type;
3758 enum type_code typecode;
3759 const bfd_byte *val;
3760 int align;
3761 enum arm_vfp_cprc_base_type vfp_base_type;
3762 int vfp_base_count;
3763 int may_use_core_reg = 1;
3764
3765 arg_type = check_typedef (value_type (args[argnum]));
3766 len = TYPE_LENGTH (arg_type);
3767 target_type = TYPE_TARGET_TYPE (arg_type);
3768 typecode = TYPE_CODE (arg_type);
3769 val = value_contents (args[argnum]);
3770
3771 align = arm_type_align (arg_type);
3772 /* Round alignment up to a whole number of words. */
3773 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3774 /* Different ABIs have different maximum alignments. */
3775 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3776 {
3777 /* The APCS ABI only requires word alignment. */
3778 align = INT_REGISTER_SIZE;
3779 }
3780 else
3781 {
3782 /* The AAPCS requires at most doubleword alignment. */
3783 if (align > INT_REGISTER_SIZE * 2)
3784 align = INT_REGISTER_SIZE * 2;
3785 }
3786
3787 if (use_vfp_abi
3788 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3789 &vfp_base_count))
3790 {
3791 int regno;
3792 int unit_length;
3793 int shift;
3794 unsigned mask;
3795
3796 /* Because this is a CPRC it cannot go in a core register or
3797 cause a core register to be skipped for alignment.
3798 Either it goes in VFP registers and the rest of this loop
3799 iteration is skipped for this argument, or it goes on the
3800 stack (and the stack alignment code is correct for this
3801 case). */
3802 may_use_core_reg = 0;
3803
3804 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3805 shift = unit_length / 4;
3806 mask = (1 << (shift * vfp_base_count)) - 1;
3807 for (regno = 0; regno < 16; regno += shift)
3808 if (((vfp_regs_free >> regno) & mask) == mask)
3809 break;
3810
3811 if (regno < 16)
3812 {
3813 int reg_char;
3814 int reg_scaled;
3815 int i;
3816
3817 vfp_regs_free &= ~(mask << regno);
3818 reg_scaled = regno / shift;
3819 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3820 for (i = 0; i < vfp_base_count; i++)
3821 {
3822 char name_buf[4];
3823 int regnum;
3824 if (reg_char == 'q')
3825 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3826 val + i * unit_length);
3827 else
3828 {
3829 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3830 reg_char, reg_scaled + i);
3831 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3832 strlen (name_buf));
3833 regcache_cooked_write (regcache, regnum,
3834 val + i * unit_length);
3835 }
3836 }
3837 continue;
3838 }
3839 else
3840 {
3841 /* This CPRC could not go in VFP registers, so all VFP
3842 registers are now marked as used. */
3843 vfp_regs_free = 0;
3844 }
3845 }
3846
3847 /* Push stack padding for dowubleword alignment. */
3848 if (nstack & (align - 1))
3849 {
3850 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3851 nstack += INT_REGISTER_SIZE;
3852 }
3853
3854 /* Doubleword aligned quantities must go in even register pairs. */
3855 if (may_use_core_reg
3856 && argreg <= ARM_LAST_ARG_REGNUM
3857 && align > INT_REGISTER_SIZE
3858 && argreg & 1)
3859 argreg++;
3860
3861 /* If the argument is a pointer to a function, and it is a
3862 Thumb function, create a LOCAL copy of the value and set
3863 the THUMB bit in it. */
3864 if (TYPE_CODE_PTR == typecode
3865 && target_type != NULL
3866 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3867 {
3868 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3869 if (arm_pc_is_thumb (gdbarch, regval))
3870 {
3871 bfd_byte *copy = (bfd_byte *) alloca (len);
3872 store_unsigned_integer (copy, len, byte_order,
3873 MAKE_THUMB_ADDR (regval));
3874 val = copy;
3875 }
3876 }
3877
3878 /* Copy the argument to general registers or the stack in
3879 register-sized pieces. Large arguments are split between
3880 registers and stack. */
3881 while (len > 0)
3882 {
3883 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3884 CORE_ADDR regval
3885 = extract_unsigned_integer (val, partial_len, byte_order);
3886
3887 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3888 {
3889 /* The argument is being passed in a general purpose
3890 register. */
3891 if (byte_order == BFD_ENDIAN_BIG)
3892 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3893 if (arm_debug)
3894 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3895 argnum,
3896 gdbarch_register_name
3897 (gdbarch, argreg),
3898 phex (regval, INT_REGISTER_SIZE));
3899 regcache_cooked_write_unsigned (regcache, argreg, regval);
3900 argreg++;
3901 }
3902 else
3903 {
3904 gdb_byte buf[INT_REGISTER_SIZE];
3905
3906 memset (buf, 0, sizeof (buf));
3907 store_unsigned_integer (buf, partial_len, byte_order, regval);
3908
3909 /* Push the arguments onto the stack. */
3910 if (arm_debug)
3911 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3912 argnum, nstack);
3913 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3914 nstack += INT_REGISTER_SIZE;
3915 }
3916
3917 len -= partial_len;
3918 val += partial_len;
3919 }
3920 }
3921 /* If we have an odd number of words to push, then decrement the stack
3922 by one word now, so first stack argument will be dword aligned. */
3923 if (nstack & 4)
3924 sp -= 4;
3925
3926 while (si)
3927 {
3928 sp -= si->len;
3929 write_memory (sp, si->data, si->len);
3930 si = pop_stack_item (si);
3931 }
3932
3933 /* Finally, update teh SP register. */
3934 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3935
3936 return sp;
3937 }
3938
3939
3940 /* Always align the frame to an 8-byte boundary. This is required on
3941 some platforms and harmless on the rest. */
3942
3943 static CORE_ADDR
3944 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3945 {
3946 /* Align the stack to eight bytes. */
3947 return sp & ~ (CORE_ADDR) 7;
3948 }
3949
3950 static void
3951 print_fpu_flags (struct ui_file *file, int flags)
3952 {
3953 if (flags & (1 << 0))
3954 fputs_filtered ("IVO ", file);
3955 if (flags & (1 << 1))
3956 fputs_filtered ("DVZ ", file);
3957 if (flags & (1 << 2))
3958 fputs_filtered ("OFL ", file);
3959 if (flags & (1 << 3))
3960 fputs_filtered ("UFL ", file);
3961 if (flags & (1 << 4))
3962 fputs_filtered ("INX ", file);
3963 fputc_filtered ('\n', file);
3964 }
3965
3966 /* Print interesting information about the floating point processor
3967 (if present) or emulator. */
3968 static void
3969 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3970 struct frame_info *frame, const char *args)
3971 {
3972 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3973 int type;
3974
3975 type = (status >> 24) & 127;
3976 if (status & (1 << 31))
3977 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3978 else
3979 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3980 /* i18n: [floating point unit] mask */
3981 fputs_filtered (_("mask: "), file);
3982 print_fpu_flags (file, status >> 16);
3983 /* i18n: [floating point unit] flags */
3984 fputs_filtered (_("flags: "), file);
3985 print_fpu_flags (file, status);
3986 }
3987
3988 /* Construct the ARM extended floating point type. */
3989 static struct type *
3990 arm_ext_type (struct gdbarch *gdbarch)
3991 {
3992 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3993
3994 if (!tdep->arm_ext_type)
3995 tdep->arm_ext_type
3996 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3997 floatformats_arm_ext);
3998
3999 return tdep->arm_ext_type;
4000 }
4001
4002 static struct type *
4003 arm_neon_double_type (struct gdbarch *gdbarch)
4004 {
4005 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4006
4007 if (tdep->neon_double_type == NULL)
4008 {
4009 struct type *t, *elem;
4010
4011 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4012 TYPE_CODE_UNION);
4013 elem = builtin_type (gdbarch)->builtin_uint8;
4014 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4015 elem = builtin_type (gdbarch)->builtin_uint16;
4016 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4017 elem = builtin_type (gdbarch)->builtin_uint32;
4018 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4019 elem = builtin_type (gdbarch)->builtin_uint64;
4020 append_composite_type_field (t, "u64", elem);
4021 elem = builtin_type (gdbarch)->builtin_float;
4022 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4023 elem = builtin_type (gdbarch)->builtin_double;
4024 append_composite_type_field (t, "f64", elem);
4025
4026 TYPE_VECTOR (t) = 1;
4027 TYPE_NAME (t) = "neon_d";
4028 tdep->neon_double_type = t;
4029 }
4030
4031 return tdep->neon_double_type;
4032 }
4033
4034 /* FIXME: The vector types are not correctly ordered on big-endian
4035 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4036 bits of d0 - regardless of what unit size is being held in d0. So
4037 the offset of the first uint8 in d0 is 7, but the offset of the
4038 first float is 4. This code works as-is for little-endian
4039 targets. */
4040
4041 static struct type *
4042 arm_neon_quad_type (struct gdbarch *gdbarch)
4043 {
4044 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4045
4046 if (tdep->neon_quad_type == NULL)
4047 {
4048 struct type *t, *elem;
4049
4050 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4051 TYPE_CODE_UNION);
4052 elem = builtin_type (gdbarch)->builtin_uint8;
4053 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4054 elem = builtin_type (gdbarch)->builtin_uint16;
4055 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4056 elem = builtin_type (gdbarch)->builtin_uint32;
4057 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4058 elem = builtin_type (gdbarch)->builtin_uint64;
4059 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4060 elem = builtin_type (gdbarch)->builtin_float;
4061 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4062 elem = builtin_type (gdbarch)->builtin_double;
4063 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4064
4065 TYPE_VECTOR (t) = 1;
4066 TYPE_NAME (t) = "neon_q";
4067 tdep->neon_quad_type = t;
4068 }
4069
4070 return tdep->neon_quad_type;
4071 }
4072
4073 /* Return the GDB type object for the "standard" data type of data in
4074 register N. */
4075
4076 static struct type *
4077 arm_register_type (struct gdbarch *gdbarch, int regnum)
4078 {
4079 int num_regs = gdbarch_num_regs (gdbarch);
4080
4081 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4082 && regnum >= num_regs && regnum < num_regs + 32)
4083 return builtin_type (gdbarch)->builtin_float;
4084
4085 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4086 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4087 return arm_neon_quad_type (gdbarch);
4088
4089 /* If the target description has register information, we are only
4090 in this function so that we can override the types of
4091 double-precision registers for NEON. */
4092 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4093 {
4094 struct type *t = tdesc_register_type (gdbarch, regnum);
4095
4096 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4097 && TYPE_CODE (t) == TYPE_CODE_FLT
4098 && gdbarch_tdep (gdbarch)->have_neon)
4099 return arm_neon_double_type (gdbarch);
4100 else
4101 return t;
4102 }
4103
4104 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4105 {
4106 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4107 return builtin_type (gdbarch)->builtin_void;
4108
4109 return arm_ext_type (gdbarch);
4110 }
4111 else if (regnum == ARM_SP_REGNUM)
4112 return builtin_type (gdbarch)->builtin_data_ptr;
4113 else if (regnum == ARM_PC_REGNUM)
4114 return builtin_type (gdbarch)->builtin_func_ptr;
4115 else if (regnum >= ARRAY_SIZE (arm_register_names))
4116 /* These registers are only supported on targets which supply
4117 an XML description. */
4118 return builtin_type (gdbarch)->builtin_int0;
4119 else
4120 return builtin_type (gdbarch)->builtin_uint32;
4121 }
4122
4123 /* Map a DWARF register REGNUM onto the appropriate GDB register
4124 number. */
4125
4126 static int
4127 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4128 {
4129 /* Core integer regs. */
4130 if (reg >= 0 && reg <= 15)
4131 return reg;
4132
4133 /* Legacy FPA encoding. These were once used in a way which
4134 overlapped with VFP register numbering, so their use is
4135 discouraged, but GDB doesn't support the ARM toolchain
4136 which used them for VFP. */
4137 if (reg >= 16 && reg <= 23)
4138 return ARM_F0_REGNUM + reg - 16;
4139
4140 /* New assignments for the FPA registers. */
4141 if (reg >= 96 && reg <= 103)
4142 return ARM_F0_REGNUM + reg - 96;
4143
4144 /* WMMX register assignments. */
4145 if (reg >= 104 && reg <= 111)
4146 return ARM_WCGR0_REGNUM + reg - 104;
4147
4148 if (reg >= 112 && reg <= 127)
4149 return ARM_WR0_REGNUM + reg - 112;
4150
4151 if (reg >= 192 && reg <= 199)
4152 return ARM_WC0_REGNUM + reg - 192;
4153
4154 /* VFP v2 registers. A double precision value is actually
4155 in d1 rather than s2, but the ABI only defines numbering
4156 for the single precision registers. This will "just work"
4157 in GDB for little endian targets (we'll read eight bytes,
4158 starting in s0 and then progressing to s1), but will be
4159 reversed on big endian targets with VFP. This won't
4160 be a problem for the new Neon quad registers; you're supposed
4161 to use DW_OP_piece for those. */
4162 if (reg >= 64 && reg <= 95)
4163 {
4164 char name_buf[4];
4165
4166 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4167 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4168 strlen (name_buf));
4169 }
4170
4171 /* VFP v3 / Neon registers. This range is also used for VFP v2
4172 registers, except that it now describes d0 instead of s0. */
4173 if (reg >= 256 && reg <= 287)
4174 {
4175 char name_buf[4];
4176
4177 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4178 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4179 strlen (name_buf));
4180 }
4181
4182 return -1;
4183 }
4184
4185 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4186 static int
4187 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4188 {
4189 int reg = regnum;
4190 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4191
4192 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4193 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4194
4195 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4196 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4197
4198 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4199 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4200
4201 if (reg < NUM_GREGS)
4202 return SIM_ARM_R0_REGNUM + reg;
4203 reg -= NUM_GREGS;
4204
4205 if (reg < NUM_FREGS)
4206 return SIM_ARM_FP0_REGNUM + reg;
4207 reg -= NUM_FREGS;
4208
4209 if (reg < NUM_SREGS)
4210 return SIM_ARM_FPS_REGNUM + reg;
4211 reg -= NUM_SREGS;
4212
4213 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4214 }
4215
4216 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4217 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4218 It is thought that this is is the floating-point register format on
4219 little-endian systems. */
4220
4221 static void
4222 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4223 void *dbl, int endianess)
4224 {
4225 DOUBLEST d;
4226
4227 if (endianess == BFD_ENDIAN_BIG)
4228 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4229 else
4230 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4231 ptr, &d);
4232 floatformat_from_doublest (fmt, &d, dbl);
4233 }
4234
4235 static void
4236 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4237 int endianess)
4238 {
4239 DOUBLEST d;
4240
4241 floatformat_to_doublest (fmt, ptr, &d);
4242 if (endianess == BFD_ENDIAN_BIG)
4243 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4244 else
4245 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4246 &d, dbl);
4247 }
4248
4249 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4250 of the appropriate mode (as encoded in the PC value), even if this
4251 differs from what would be expected according to the symbol tables. */
4252
4253 void
4254 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4255 struct address_space *aspace,
4256 CORE_ADDR pc)
4257 {
4258 struct cleanup *old_chain
4259 = make_cleanup_restore_integer (&arm_override_mode);
4260
4261 arm_override_mode = IS_THUMB_ADDR (pc);
4262 pc = gdbarch_addr_bits_remove (gdbarch, pc);
4263
4264 insert_single_step_breakpoint (gdbarch, aspace, pc);
4265
4266 do_cleanups (old_chain);
4267 }
4268
4269 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4270 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4271 NULL if an error occurs. BUF is freed. */
4272
4273 static gdb_byte *
4274 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4275 int old_len, int new_len)
4276 {
4277 gdb_byte *new_buf;
4278 int bytes_to_read = new_len - old_len;
4279
4280 new_buf = (gdb_byte *) xmalloc (new_len);
4281 memcpy (new_buf + bytes_to_read, buf, old_len);
4282 xfree (buf);
4283 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
4284 {
4285 xfree (new_buf);
4286 return NULL;
4287 }
4288 return new_buf;
4289 }
4290
4291 /* An IT block is at most the 2-byte IT instruction followed by
4292 four 4-byte instructions. The furthest back we must search to
4293 find an IT block that affects the current instruction is thus
4294 2 + 3 * 4 == 14 bytes. */
4295 #define MAX_IT_BLOCK_PREFIX 14
4296
4297 /* Use a quick scan if there are more than this many bytes of
4298 code. */
4299 #define IT_SCAN_THRESHOLD 32
4300
4301 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4302 A breakpoint in an IT block may not be hit, depending on the
4303 condition flags. */
4304 static CORE_ADDR
4305 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4306 {
4307 gdb_byte *buf;
4308 char map_type;
4309 CORE_ADDR boundary, func_start;
4310 int buf_len;
4311 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4312 int i, any, last_it, last_it_count;
4313
4314 /* If we are using BKPT breakpoints, none of this is necessary. */
4315 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4316 return bpaddr;
4317
4318 /* ARM mode does not have this problem. */
4319 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4320 return bpaddr;
4321
4322 /* We are setting a breakpoint in Thumb code that could potentially
4323 contain an IT block. The first step is to find how much Thumb
4324 code there is; we do not need to read outside of known Thumb
4325 sequences. */
4326 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4327 if (map_type == 0)
4328 /* Thumb-2 code must have mapping symbols to have a chance. */
4329 return bpaddr;
4330
4331 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4332
4333 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4334 && func_start > boundary)
4335 boundary = func_start;
4336
4337 /* Search for a candidate IT instruction. We have to do some fancy
4338 footwork to distinguish a real IT instruction from the second
4339 half of a 32-bit instruction, but there is no need for that if
4340 there's no candidate. */
4341 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4342 if (buf_len == 0)
4343 /* No room for an IT instruction. */
4344 return bpaddr;
4345
4346 buf = (gdb_byte *) xmalloc (buf_len);
4347 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
4348 return bpaddr;
4349 any = 0;
4350 for (i = 0; i < buf_len; i += 2)
4351 {
4352 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4353 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4354 {
4355 any = 1;
4356 break;
4357 }
4358 }
4359
4360 if (any == 0)
4361 {
4362 xfree (buf);
4363 return bpaddr;
4364 }
4365
4366 /* OK, the code bytes before this instruction contain at least one
4367 halfword which resembles an IT instruction. We know that it's
4368 Thumb code, but there are still two possibilities. Either the
4369 halfword really is an IT instruction, or it is the second half of
4370 a 32-bit Thumb instruction. The only way we can tell is to
4371 scan forwards from a known instruction boundary. */
4372 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4373 {
4374 int definite;
4375
4376 /* There's a lot of code before this instruction. Start with an
4377 optimistic search; it's easy to recognize halfwords that can
4378 not be the start of a 32-bit instruction, and use that to
4379 lock on to the instruction boundaries. */
4380 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4381 if (buf == NULL)
4382 return bpaddr;
4383 buf_len = IT_SCAN_THRESHOLD;
4384
4385 definite = 0;
4386 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4387 {
4388 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4389 if (thumb_insn_size (inst1) == 2)
4390 {
4391 definite = 1;
4392 break;
4393 }
4394 }
4395
4396 /* At this point, if DEFINITE, BUF[I] is the first place we
4397 are sure that we know the instruction boundaries, and it is far
4398 enough from BPADDR that we could not miss an IT instruction
4399 affecting BPADDR. If ! DEFINITE, give up - start from a
4400 known boundary. */
4401 if (! definite)
4402 {
4403 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4404 bpaddr - boundary);
4405 if (buf == NULL)
4406 return bpaddr;
4407 buf_len = bpaddr - boundary;
4408 i = 0;
4409 }
4410 }
4411 else
4412 {
4413 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4414 if (buf == NULL)
4415 return bpaddr;
4416 buf_len = bpaddr - boundary;
4417 i = 0;
4418 }
4419
4420 /* Scan forwards. Find the last IT instruction before BPADDR. */
4421 last_it = -1;
4422 last_it_count = 0;
4423 while (i < buf_len)
4424 {
4425 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4426 last_it_count--;
4427 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4428 {
4429 last_it = i;
4430 if (inst1 & 0x0001)
4431 last_it_count = 4;
4432 else if (inst1 & 0x0002)
4433 last_it_count = 3;
4434 else if (inst1 & 0x0004)
4435 last_it_count = 2;
4436 else
4437 last_it_count = 1;
4438 }
4439 i += thumb_insn_size (inst1);
4440 }
4441
4442 xfree (buf);
4443
4444 if (last_it == -1)
4445 /* There wasn't really an IT instruction after all. */
4446 return bpaddr;
4447
4448 if (last_it_count < 1)
4449 /* It was too far away. */
4450 return bpaddr;
4451
4452 /* This really is a trouble spot. Move the breakpoint to the IT
4453 instruction. */
4454 return bpaddr - buf_len + last_it;
4455 }
4456
4457 /* ARM displaced stepping support.
4458
4459 Generally ARM displaced stepping works as follows:
4460
4461 1. When an instruction is to be single-stepped, it is first decoded by
4462 arm_process_displaced_insn. Depending on the type of instruction, it is
4463 then copied to a scratch location, possibly in a modified form. The
4464 copy_* set of functions performs such modification, as necessary. A
4465 breakpoint is placed after the modified instruction in the scratch space
4466 to return control to GDB. Note in particular that instructions which
4467 modify the PC will no longer do so after modification.
4468
4469 2. The instruction is single-stepped, by setting the PC to the scratch
4470 location address, and resuming. Control returns to GDB when the
4471 breakpoint is hit.
4472
4473 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4474 function used for the current instruction. This function's job is to
4475 put the CPU/memory state back to what it would have been if the
4476 instruction had been executed unmodified in its original location. */
4477
4478 /* NOP instruction (mov r0, r0). */
4479 #define ARM_NOP 0xe1a00000
4480 #define THUMB_NOP 0x4600
4481
4482 /* Helper for register reads for displaced stepping. In particular, this
4483 returns the PC as it would be seen by the instruction at its original
4484 location. */
4485
4486 ULONGEST
4487 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4488 int regno)
4489 {
4490 ULONGEST ret;
4491 CORE_ADDR from = dsc->insn_addr;
4492
4493 if (regno == ARM_PC_REGNUM)
4494 {
4495 /* Compute pipeline offset:
4496 - When executing an ARM instruction, PC reads as the address of the
4497 current instruction plus 8.
4498 - When executing a Thumb instruction, PC reads as the address of the
4499 current instruction plus 4. */
4500
4501 if (!dsc->is_thumb)
4502 from += 8;
4503 else
4504 from += 4;
4505
4506 if (debug_displaced)
4507 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4508 (unsigned long) from);
4509 return (ULONGEST) from;
4510 }
4511 else
4512 {
4513 regcache_cooked_read_unsigned (regs, regno, &ret);
4514 if (debug_displaced)
4515 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4516 regno, (unsigned long) ret);
4517 return ret;
4518 }
4519 }
4520
4521 static int
4522 displaced_in_arm_mode (struct regcache *regs)
4523 {
4524 ULONGEST ps;
4525 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4526
4527 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4528
4529 return (ps & t_bit) == 0;
4530 }
4531
4532 /* Write to the PC as from a branch instruction. */
4533
4534 static void
4535 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4536 ULONGEST val)
4537 {
4538 if (!dsc->is_thumb)
4539 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4540 architecture versions < 6. */
4541 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4542 val & ~(ULONGEST) 0x3);
4543 else
4544 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4545 val & ~(ULONGEST) 0x1);
4546 }
4547
4548 /* Write to the PC as from a branch-exchange instruction. */
4549
4550 static void
4551 bx_write_pc (struct regcache *regs, ULONGEST val)
4552 {
4553 ULONGEST ps;
4554 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4555
4556 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4557
4558 if ((val & 1) == 1)
4559 {
4560 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4561 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4562 }
4563 else if ((val & 2) == 0)
4564 {
4565 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4566 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4567 }
4568 else
4569 {
4570 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4571 mode, align dest to 4 bytes). */
4572 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4573 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4574 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4575 }
4576 }
4577
4578 /* Write to the PC as if from a load instruction. */
4579
4580 static void
4581 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4582 ULONGEST val)
4583 {
4584 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4585 bx_write_pc (regs, val);
4586 else
4587 branch_write_pc (regs, dsc, val);
4588 }
4589
4590 /* Write to the PC as if from an ALU instruction. */
4591
4592 static void
4593 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4594 ULONGEST val)
4595 {
4596 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4597 bx_write_pc (regs, val);
4598 else
4599 branch_write_pc (regs, dsc, val);
4600 }
4601
4602 /* Helper for writing to registers for displaced stepping. Writing to the PC
4603 has a varying effects depending on the instruction which does the write:
4604 this is controlled by the WRITE_PC argument. */
4605
4606 void
4607 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4608 int regno, ULONGEST val, enum pc_write_style write_pc)
4609 {
4610 if (regno == ARM_PC_REGNUM)
4611 {
4612 if (debug_displaced)
4613 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4614 (unsigned long) val);
4615 switch (write_pc)
4616 {
4617 case BRANCH_WRITE_PC:
4618 branch_write_pc (regs, dsc, val);
4619 break;
4620
4621 case BX_WRITE_PC:
4622 bx_write_pc (regs, val);
4623 break;
4624
4625 case LOAD_WRITE_PC:
4626 load_write_pc (regs, dsc, val);
4627 break;
4628
4629 case ALU_WRITE_PC:
4630 alu_write_pc (regs, dsc, val);
4631 break;
4632
4633 case CANNOT_WRITE_PC:
4634 warning (_("Instruction wrote to PC in an unexpected way when "
4635 "single-stepping"));
4636 break;
4637
4638 default:
4639 internal_error (__FILE__, __LINE__,
4640 _("Invalid argument to displaced_write_reg"));
4641 }
4642
4643 dsc->wrote_to_pc = 1;
4644 }
4645 else
4646 {
4647 if (debug_displaced)
4648 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4649 regno, (unsigned long) val);
4650 regcache_cooked_write_unsigned (regs, regno, val);
4651 }
4652 }
4653
4654 /* This function is used to concisely determine if an instruction INSN
4655 references PC. Register fields of interest in INSN should have the
4656 corresponding fields of BITMASK set to 0b1111. The function
4657 returns return 1 if any of these fields in INSN reference the PC
4658 (also 0b1111, r15), else it returns 0. */
4659
4660 static int
4661 insn_references_pc (uint32_t insn, uint32_t bitmask)
4662 {
4663 uint32_t lowbit = 1;
4664
4665 while (bitmask != 0)
4666 {
4667 uint32_t mask;
4668
4669 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4670 ;
4671
4672 if (!lowbit)
4673 break;
4674
4675 mask = lowbit * 0xf;
4676
4677 if ((insn & mask) == mask)
4678 return 1;
4679
4680 bitmask &= ~mask;
4681 }
4682
4683 return 0;
4684 }
4685
4686 /* The simplest copy function. Many instructions have the same effect no
4687 matter what address they are executed at: in those cases, use this. */
4688
4689 static int
4690 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4691 const char *iname, struct displaced_step_closure *dsc)
4692 {
4693 if (debug_displaced)
4694 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4695 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4696 iname);
4697
4698 dsc->modinsn[0] = insn;
4699
4700 return 0;
4701 }
4702
4703 static int
4704 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4705 uint16_t insn2, const char *iname,
4706 struct displaced_step_closure *dsc)
4707 {
4708 if (debug_displaced)
4709 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4710 "opcode/class '%s' unmodified\n", insn1, insn2,
4711 iname);
4712
4713 dsc->modinsn[0] = insn1;
4714 dsc->modinsn[1] = insn2;
4715 dsc->numinsns = 2;
4716
4717 return 0;
4718 }
4719
4720 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4721 modification. */
4722 static int
4723 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4724 const char *iname,
4725 struct displaced_step_closure *dsc)
4726 {
4727 if (debug_displaced)
4728 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4729 "opcode/class '%s' unmodified\n", insn,
4730 iname);
4731
4732 dsc->modinsn[0] = insn;
4733
4734 return 0;
4735 }
4736
4737 /* Preload instructions with immediate offset. */
4738
4739 static void
4740 cleanup_preload (struct gdbarch *gdbarch,
4741 struct regcache *regs, struct displaced_step_closure *dsc)
4742 {
4743 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4744 if (!dsc->u.preload.immed)
4745 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4746 }
4747
4748 static void
4749 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4750 struct displaced_step_closure *dsc, unsigned int rn)
4751 {
4752 ULONGEST rn_val;
4753 /* Preload instructions:
4754
4755 {pli/pld} [rn, #+/-imm]
4756 ->
4757 {pli/pld} [r0, #+/-imm]. */
4758
4759 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4760 rn_val = displaced_read_reg (regs, dsc, rn);
4761 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4762 dsc->u.preload.immed = 1;
4763
4764 dsc->cleanup = &cleanup_preload;
4765 }
4766
4767 static int
4768 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4769 struct displaced_step_closure *dsc)
4770 {
4771 unsigned int rn = bits (insn, 16, 19);
4772
4773 if (!insn_references_pc (insn, 0x000f0000ul))
4774 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4775
4776 if (debug_displaced)
4777 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4778 (unsigned long) insn);
4779
4780 dsc->modinsn[0] = insn & 0xfff0ffff;
4781
4782 install_preload (gdbarch, regs, dsc, rn);
4783
4784 return 0;
4785 }
4786
4787 static int
4788 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4789 struct regcache *regs, struct displaced_step_closure *dsc)
4790 {
4791 unsigned int rn = bits (insn1, 0, 3);
4792 unsigned int u_bit = bit (insn1, 7);
4793 int imm12 = bits (insn2, 0, 11);
4794 ULONGEST pc_val;
4795
4796 if (rn != ARM_PC_REGNUM)
4797 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4798
4799 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4800 PLD (literal) Encoding T1. */
4801 if (debug_displaced)
4802 fprintf_unfiltered (gdb_stdlog,
4803 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4804 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4805 imm12);
4806
4807 if (!u_bit)
4808 imm12 = -1 * imm12;
4809
4810 /* Rewrite instruction {pli/pld} PC imm12 into:
4811 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4812
4813 {pli/pld} [r0, r1]
4814
4815 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4816
4817 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4818 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4819
4820 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4821
4822 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4823 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4824 dsc->u.preload.immed = 0;
4825
4826 /* {pli/pld} [r0, r1] */
4827 dsc->modinsn[0] = insn1 & 0xfff0;
4828 dsc->modinsn[1] = 0xf001;
4829 dsc->numinsns = 2;
4830
4831 dsc->cleanup = &cleanup_preload;
4832 return 0;
4833 }
4834
4835 /* Preload instructions with register offset. */
4836
4837 static void
4838 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4839 struct displaced_step_closure *dsc, unsigned int rn,
4840 unsigned int rm)
4841 {
4842 ULONGEST rn_val, rm_val;
4843
4844 /* Preload register-offset instructions:
4845
4846 {pli/pld} [rn, rm {, shift}]
4847 ->
4848 {pli/pld} [r0, r1 {, shift}]. */
4849
4850 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4851 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4852 rn_val = displaced_read_reg (regs, dsc, rn);
4853 rm_val = displaced_read_reg (regs, dsc, rm);
4854 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4855 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4856 dsc->u.preload.immed = 0;
4857
4858 dsc->cleanup = &cleanup_preload;
4859 }
4860
4861 static int
4862 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4863 struct regcache *regs,
4864 struct displaced_step_closure *dsc)
4865 {
4866 unsigned int rn = bits (insn, 16, 19);
4867 unsigned int rm = bits (insn, 0, 3);
4868
4869
4870 if (!insn_references_pc (insn, 0x000f000ful))
4871 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4872
4873 if (debug_displaced)
4874 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4875 (unsigned long) insn);
4876
4877 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4878
4879 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4880 return 0;
4881 }
4882
4883 /* Copy/cleanup coprocessor load and store instructions. */
4884
4885 static void
4886 cleanup_copro_load_store (struct gdbarch *gdbarch,
4887 struct regcache *regs,
4888 struct displaced_step_closure *dsc)
4889 {
4890 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4891
4892 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4893
4894 if (dsc->u.ldst.writeback)
4895 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4896 }
4897
4898 static void
4899 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4900 struct displaced_step_closure *dsc,
4901 int writeback, unsigned int rn)
4902 {
4903 ULONGEST rn_val;
4904
4905 /* Coprocessor load/store instructions:
4906
4907 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4908 ->
4909 {stc/stc2} [r0, #+/-imm].
4910
4911 ldc/ldc2 are handled identically. */
4912
4913 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4914 rn_val = displaced_read_reg (regs, dsc, rn);
4915 /* PC should be 4-byte aligned. */
4916 rn_val = rn_val & 0xfffffffc;
4917 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4918
4919 dsc->u.ldst.writeback = writeback;
4920 dsc->u.ldst.rn = rn;
4921
4922 dsc->cleanup = &cleanup_copro_load_store;
4923 }
4924
4925 static int
4926 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4927 struct regcache *regs,
4928 struct displaced_step_closure *dsc)
4929 {
4930 unsigned int rn = bits (insn, 16, 19);
4931
4932 if (!insn_references_pc (insn, 0x000f0000ul))
4933 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4934
4935 if (debug_displaced)
4936 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4937 "load/store insn %.8lx\n", (unsigned long) insn);
4938
4939 dsc->modinsn[0] = insn & 0xfff0ffff;
4940
4941 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4942
4943 return 0;
4944 }
4945
4946 static int
4947 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4948 uint16_t insn2, struct regcache *regs,
4949 struct displaced_step_closure *dsc)
4950 {
4951 unsigned int rn = bits (insn1, 0, 3);
4952
4953 if (rn != ARM_PC_REGNUM)
4954 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4955 "copro load/store", dsc);
4956
4957 if (debug_displaced)
4958 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4959 "load/store insn %.4x%.4x\n", insn1, insn2);
4960
4961 dsc->modinsn[0] = insn1 & 0xfff0;
4962 dsc->modinsn[1] = insn2;
4963 dsc->numinsns = 2;
4964
4965 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4966 doesn't support writeback, so pass 0. */
4967 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4968
4969 return 0;
4970 }
4971
4972 /* Clean up branch instructions (actually perform the branch, by setting
4973 PC). */
4974
4975 static void
4976 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4977 struct displaced_step_closure *dsc)
4978 {
4979 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4980 int branch_taken = condition_true (dsc->u.branch.cond, status);
4981 enum pc_write_style write_pc = dsc->u.branch.exchange
4982 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4983
4984 if (!branch_taken)
4985 return;
4986
4987 if (dsc->u.branch.link)
4988 {
4989 /* The value of LR should be the next insn of current one. In order
4990 not to confuse logic hanlding later insn `bx lr', if current insn mode
4991 is Thumb, the bit 0 of LR value should be set to 1. */
4992 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4993
4994 if (dsc->is_thumb)
4995 next_insn_addr |= 0x1;
4996
4997 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4998 CANNOT_WRITE_PC);
4999 }
5000
5001 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5002 }
5003
5004 /* Copy B/BL/BLX instructions with immediate destinations. */
5005
5006 static void
5007 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5008 struct displaced_step_closure *dsc,
5009 unsigned int cond, int exchange, int link, long offset)
5010 {
5011 /* Implement "BL<cond> <label>" as:
5012
5013 Preparation: cond <- instruction condition
5014 Insn: mov r0, r0 (nop)
5015 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5016
5017 B<cond> similar, but don't set r14 in cleanup. */
5018
5019 dsc->u.branch.cond = cond;
5020 dsc->u.branch.link = link;
5021 dsc->u.branch.exchange = exchange;
5022
5023 dsc->u.branch.dest = dsc->insn_addr;
5024 if (link && exchange)
5025 /* For BLX, offset is computed from the Align (PC, 4). */
5026 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5027
5028 if (dsc->is_thumb)
5029 dsc->u.branch.dest += 4 + offset;
5030 else
5031 dsc->u.branch.dest += 8 + offset;
5032
5033 dsc->cleanup = &cleanup_branch;
5034 }
5035 static int
5036 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5037 struct regcache *regs, struct displaced_step_closure *dsc)
5038 {
5039 unsigned int cond = bits (insn, 28, 31);
5040 int exchange = (cond == 0xf);
5041 int link = exchange || bit (insn, 24);
5042 long offset;
5043
5044 if (debug_displaced)
5045 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5046 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5047 (unsigned long) insn);
5048 if (exchange)
5049 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5050 then arrange the switch into Thumb mode. */
5051 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5052 else
5053 offset = bits (insn, 0, 23) << 2;
5054
5055 if (bit (offset, 25))
5056 offset = offset | ~0x3ffffff;
5057
5058 dsc->modinsn[0] = ARM_NOP;
5059
5060 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5061 return 0;
5062 }
5063
5064 static int
5065 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5066 uint16_t insn2, struct regcache *regs,
5067 struct displaced_step_closure *dsc)
5068 {
5069 int link = bit (insn2, 14);
5070 int exchange = link && !bit (insn2, 12);
5071 int cond = INST_AL;
5072 long offset = 0;
5073 int j1 = bit (insn2, 13);
5074 int j2 = bit (insn2, 11);
5075 int s = sbits (insn1, 10, 10);
5076 int i1 = !(j1 ^ bit (insn1, 10));
5077 int i2 = !(j2 ^ bit (insn1, 10));
5078
5079 if (!link && !exchange) /* B */
5080 {
5081 offset = (bits (insn2, 0, 10) << 1);
5082 if (bit (insn2, 12)) /* Encoding T4 */
5083 {
5084 offset |= (bits (insn1, 0, 9) << 12)
5085 | (i2 << 22)
5086 | (i1 << 23)
5087 | (s << 24);
5088 cond = INST_AL;
5089 }
5090 else /* Encoding T3 */
5091 {
5092 offset |= (bits (insn1, 0, 5) << 12)
5093 | (j1 << 18)
5094 | (j2 << 19)
5095 | (s << 20);
5096 cond = bits (insn1, 6, 9);
5097 }
5098 }
5099 else
5100 {
5101 offset = (bits (insn1, 0, 9) << 12);
5102 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5103 offset |= exchange ?
5104 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5105 }
5106
5107 if (debug_displaced)
5108 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5109 "%.4x %.4x with offset %.8lx\n",
5110 link ? (exchange) ? "blx" : "bl" : "b",
5111 insn1, insn2, offset);
5112
5113 dsc->modinsn[0] = THUMB_NOP;
5114
5115 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5116 return 0;
5117 }
5118
5119 /* Copy B Thumb instructions. */
5120 static int
5121 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5122 struct displaced_step_closure *dsc)
5123 {
5124 unsigned int cond = 0;
5125 int offset = 0;
5126 unsigned short bit_12_15 = bits (insn, 12, 15);
5127 CORE_ADDR from = dsc->insn_addr;
5128
5129 if (bit_12_15 == 0xd)
5130 {
5131 /* offset = SignExtend (imm8:0, 32) */
5132 offset = sbits ((insn << 1), 0, 8);
5133 cond = bits (insn, 8, 11);
5134 }
5135 else if (bit_12_15 == 0xe) /* Encoding T2 */
5136 {
5137 offset = sbits ((insn << 1), 0, 11);
5138 cond = INST_AL;
5139 }
5140
5141 if (debug_displaced)
5142 fprintf_unfiltered (gdb_stdlog,
5143 "displaced: copying b immediate insn %.4x "
5144 "with offset %d\n", insn, offset);
5145
5146 dsc->u.branch.cond = cond;
5147 dsc->u.branch.link = 0;
5148 dsc->u.branch.exchange = 0;
5149 dsc->u.branch.dest = from + 4 + offset;
5150
5151 dsc->modinsn[0] = THUMB_NOP;
5152
5153 dsc->cleanup = &cleanup_branch;
5154
5155 return 0;
5156 }
5157
5158 /* Copy BX/BLX with register-specified destinations. */
5159
5160 static void
5161 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5162 struct displaced_step_closure *dsc, int link,
5163 unsigned int cond, unsigned int rm)
5164 {
5165 /* Implement {BX,BLX}<cond> <reg>" as:
5166
5167 Preparation: cond <- instruction condition
5168 Insn: mov r0, r0 (nop)
5169 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5170
5171 Don't set r14 in cleanup for BX. */
5172
5173 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5174
5175 dsc->u.branch.cond = cond;
5176 dsc->u.branch.link = link;
5177
5178 dsc->u.branch.exchange = 1;
5179
5180 dsc->cleanup = &cleanup_branch;
5181 }
5182
5183 static int
5184 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5185 struct regcache *regs, struct displaced_step_closure *dsc)
5186 {
5187 unsigned int cond = bits (insn, 28, 31);
5188 /* BX: x12xxx1x
5189 BLX: x12xxx3x. */
5190 int link = bit (insn, 5);
5191 unsigned int rm = bits (insn, 0, 3);
5192
5193 if (debug_displaced)
5194 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5195 (unsigned long) insn);
5196
5197 dsc->modinsn[0] = ARM_NOP;
5198
5199 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5200 return 0;
5201 }
5202
5203 static int
5204 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5205 struct regcache *regs,
5206 struct displaced_step_closure *dsc)
5207 {
5208 int link = bit (insn, 7);
5209 unsigned int rm = bits (insn, 3, 6);
5210
5211 if (debug_displaced)
5212 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5213 (unsigned short) insn);
5214
5215 dsc->modinsn[0] = THUMB_NOP;
5216
5217 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5218
5219 return 0;
5220 }
5221
5222
5223 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5224
5225 static void
5226 cleanup_alu_imm (struct gdbarch *gdbarch,
5227 struct regcache *regs, struct displaced_step_closure *dsc)
5228 {
5229 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5230 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5231 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5232 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5233 }
5234
5235 static int
5236 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5237 struct displaced_step_closure *dsc)
5238 {
5239 unsigned int rn = bits (insn, 16, 19);
5240 unsigned int rd = bits (insn, 12, 15);
5241 unsigned int op = bits (insn, 21, 24);
5242 int is_mov = (op == 0xd);
5243 ULONGEST rd_val, rn_val;
5244
5245 if (!insn_references_pc (insn, 0x000ff000ul))
5246 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5247
5248 if (debug_displaced)
5249 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5250 "%.8lx\n", is_mov ? "move" : "ALU",
5251 (unsigned long) insn);
5252
5253 /* Instruction is of form:
5254
5255 <op><cond> rd, [rn,] #imm
5256
5257 Rewrite as:
5258
5259 Preparation: tmp1, tmp2 <- r0, r1;
5260 r0, r1 <- rd, rn
5261 Insn: <op><cond> r0, r1, #imm
5262 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5263 */
5264
5265 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5266 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5267 rn_val = displaced_read_reg (regs, dsc, rn);
5268 rd_val = displaced_read_reg (regs, dsc, rd);
5269 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5270 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5271 dsc->rd = rd;
5272
5273 if (is_mov)
5274 dsc->modinsn[0] = insn & 0xfff00fff;
5275 else
5276 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5277
5278 dsc->cleanup = &cleanup_alu_imm;
5279
5280 return 0;
5281 }
5282
5283 static int
5284 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5285 uint16_t insn2, struct regcache *regs,
5286 struct displaced_step_closure *dsc)
5287 {
5288 unsigned int op = bits (insn1, 5, 8);
5289 unsigned int rn, rm, rd;
5290 ULONGEST rd_val, rn_val;
5291
5292 rn = bits (insn1, 0, 3); /* Rn */
5293 rm = bits (insn2, 0, 3); /* Rm */
5294 rd = bits (insn2, 8, 11); /* Rd */
5295
5296 /* This routine is only called for instruction MOV. */
5297 gdb_assert (op == 0x2 && rn == 0xf);
5298
5299 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5300 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5301
5302 if (debug_displaced)
5303 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5304 "ALU", insn1, insn2);
5305
5306 /* Instruction is of form:
5307
5308 <op><cond> rd, [rn,] #imm
5309
5310 Rewrite as:
5311
5312 Preparation: tmp1, tmp2 <- r0, r1;
5313 r0, r1 <- rd, rn
5314 Insn: <op><cond> r0, r1, #imm
5315 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5316 */
5317
5318 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5319 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5320 rn_val = displaced_read_reg (regs, dsc, rn);
5321 rd_val = displaced_read_reg (regs, dsc, rd);
5322 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5323 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5324 dsc->rd = rd;
5325
5326 dsc->modinsn[0] = insn1;
5327 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5328 dsc->numinsns = 2;
5329
5330 dsc->cleanup = &cleanup_alu_imm;
5331
5332 return 0;
5333 }
5334
5335 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5336
5337 static void
5338 cleanup_alu_reg (struct gdbarch *gdbarch,
5339 struct regcache *regs, struct displaced_step_closure *dsc)
5340 {
5341 ULONGEST rd_val;
5342 int i;
5343
5344 rd_val = displaced_read_reg (regs, dsc, 0);
5345
5346 for (i = 0; i < 3; i++)
5347 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5348
5349 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5350 }
5351
5352 static void
5353 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5354 struct displaced_step_closure *dsc,
5355 unsigned int rd, unsigned int rn, unsigned int rm)
5356 {
5357 ULONGEST rd_val, rn_val, rm_val;
5358
5359 /* Instruction is of form:
5360
5361 <op><cond> rd, [rn,] rm [, <shift>]
5362
5363 Rewrite as:
5364
5365 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5366 r0, r1, r2 <- rd, rn, rm
5367 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5368 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5369 */
5370
5371 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5372 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5373 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5374 rd_val = displaced_read_reg (regs, dsc, rd);
5375 rn_val = displaced_read_reg (regs, dsc, rn);
5376 rm_val = displaced_read_reg (regs, dsc, rm);
5377 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5378 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5379 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5380 dsc->rd = rd;
5381
5382 dsc->cleanup = &cleanup_alu_reg;
5383 }
5384
5385 static int
5386 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5387 struct displaced_step_closure *dsc)
5388 {
5389 unsigned int op = bits (insn, 21, 24);
5390 int is_mov = (op == 0xd);
5391
5392 if (!insn_references_pc (insn, 0x000ff00ful))
5393 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5394
5395 if (debug_displaced)
5396 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5397 is_mov ? "move" : "ALU", (unsigned long) insn);
5398
5399 if (is_mov)
5400 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5401 else
5402 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5403
5404 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5405 bits (insn, 0, 3));
5406 return 0;
5407 }
5408
5409 static int
5410 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5411 struct regcache *regs,
5412 struct displaced_step_closure *dsc)
5413 {
5414 unsigned rm, rd;
5415
5416 rm = bits (insn, 3, 6);
5417 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5418
5419 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5420 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5421
5422 if (debug_displaced)
5423 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5424 (unsigned short) insn);
5425
5426 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5427
5428 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5429
5430 return 0;
5431 }
5432
5433 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5434
5435 static void
5436 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5437 struct regcache *regs,
5438 struct displaced_step_closure *dsc)
5439 {
5440 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5441 int i;
5442
5443 for (i = 0; i < 4; i++)
5444 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5445
5446 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5447 }
5448
5449 static void
5450 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5451 struct displaced_step_closure *dsc,
5452 unsigned int rd, unsigned int rn, unsigned int rm,
5453 unsigned rs)
5454 {
5455 int i;
5456 ULONGEST rd_val, rn_val, rm_val, rs_val;
5457
5458 /* Instruction is of form:
5459
5460 <op><cond> rd, [rn,] rm, <shift> rs
5461
5462 Rewrite as:
5463
5464 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5465 r0, r1, r2, r3 <- rd, rn, rm, rs
5466 Insn: <op><cond> r0, r1, r2, <shift> r3
5467 Cleanup: tmp5 <- r0
5468 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5469 rd <- tmp5
5470 */
5471
5472 for (i = 0; i < 4; i++)
5473 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5474
5475 rd_val = displaced_read_reg (regs, dsc, rd);
5476 rn_val = displaced_read_reg (regs, dsc, rn);
5477 rm_val = displaced_read_reg (regs, dsc, rm);
5478 rs_val = displaced_read_reg (regs, dsc, rs);
5479 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5480 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5481 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5482 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5483 dsc->rd = rd;
5484 dsc->cleanup = &cleanup_alu_shifted_reg;
5485 }
5486
5487 static int
5488 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5489 struct regcache *regs,
5490 struct displaced_step_closure *dsc)
5491 {
5492 unsigned int op = bits (insn, 21, 24);
5493 int is_mov = (op == 0xd);
5494 unsigned int rd, rn, rm, rs;
5495
5496 if (!insn_references_pc (insn, 0x000fff0ful))
5497 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5498
5499 if (debug_displaced)
5500 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5501 "%.8lx\n", is_mov ? "move" : "ALU",
5502 (unsigned long) insn);
5503
5504 rn = bits (insn, 16, 19);
5505 rm = bits (insn, 0, 3);
5506 rs = bits (insn, 8, 11);
5507 rd = bits (insn, 12, 15);
5508
5509 if (is_mov)
5510 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5511 else
5512 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5513
5514 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5515
5516 return 0;
5517 }
5518
5519 /* Clean up load instructions. */
5520
5521 static void
5522 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5523 struct displaced_step_closure *dsc)
5524 {
5525 ULONGEST rt_val, rt_val2 = 0, rn_val;
5526
5527 rt_val = displaced_read_reg (regs, dsc, 0);
5528 if (dsc->u.ldst.xfersize == 8)
5529 rt_val2 = displaced_read_reg (regs, dsc, 1);
5530 rn_val = displaced_read_reg (regs, dsc, 2);
5531
5532 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5533 if (dsc->u.ldst.xfersize > 4)
5534 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5535 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5536 if (!dsc->u.ldst.immed)
5537 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5538
5539 /* Handle register writeback. */
5540 if (dsc->u.ldst.writeback)
5541 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5542 /* Put result in right place. */
5543 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5544 if (dsc->u.ldst.xfersize == 8)
5545 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5546 }
5547
5548 /* Clean up store instructions. */
5549
5550 static void
5551 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5552 struct displaced_step_closure *dsc)
5553 {
5554 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5555
5556 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5557 if (dsc->u.ldst.xfersize > 4)
5558 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5559 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5560 if (!dsc->u.ldst.immed)
5561 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5562 if (!dsc->u.ldst.restore_r4)
5563 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5564
5565 /* Writeback. */
5566 if (dsc->u.ldst.writeback)
5567 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5568 }
5569
5570 /* Copy "extra" load/store instructions. These are halfword/doubleword
5571 transfers, which have a different encoding to byte/word transfers. */
5572
5573 static int
5574 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5575 struct regcache *regs, struct displaced_step_closure *dsc)
5576 {
5577 unsigned int op1 = bits (insn, 20, 24);
5578 unsigned int op2 = bits (insn, 5, 6);
5579 unsigned int rt = bits (insn, 12, 15);
5580 unsigned int rn = bits (insn, 16, 19);
5581 unsigned int rm = bits (insn, 0, 3);
5582 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5583 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5584 int immed = (op1 & 0x4) != 0;
5585 int opcode;
5586 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5587
5588 if (!insn_references_pc (insn, 0x000ff00ful))
5589 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5590
5591 if (debug_displaced)
5592 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5593 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5594 (unsigned long) insn);
5595
5596 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5597
5598 if (opcode < 0)
5599 internal_error (__FILE__, __LINE__,
5600 _("copy_extra_ld_st: instruction decode error"));
5601
5602 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5603 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5604 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5605 if (!immed)
5606 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5607
5608 rt_val = displaced_read_reg (regs, dsc, rt);
5609 if (bytesize[opcode] == 8)
5610 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5611 rn_val = displaced_read_reg (regs, dsc, rn);
5612 if (!immed)
5613 rm_val = displaced_read_reg (regs, dsc, rm);
5614
5615 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5616 if (bytesize[opcode] == 8)
5617 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5618 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5619 if (!immed)
5620 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5621
5622 dsc->rd = rt;
5623 dsc->u.ldst.xfersize = bytesize[opcode];
5624 dsc->u.ldst.rn = rn;
5625 dsc->u.ldst.immed = immed;
5626 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5627 dsc->u.ldst.restore_r4 = 0;
5628
5629 if (immed)
5630 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5631 ->
5632 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5633 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5634 else
5635 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5636 ->
5637 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5638 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5639
5640 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5641
5642 return 0;
5643 }
5644
5645 /* Copy byte/half word/word loads and stores. */
5646
5647 static void
5648 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5649 struct displaced_step_closure *dsc, int load,
5650 int immed, int writeback, int size, int usermode,
5651 int rt, int rm, int rn)
5652 {
5653 ULONGEST rt_val, rn_val, rm_val = 0;
5654
5655 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5656 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5657 if (!immed)
5658 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5659 if (!load)
5660 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5661
5662 rt_val = displaced_read_reg (regs, dsc, rt);
5663 rn_val = displaced_read_reg (regs, dsc, rn);
5664 if (!immed)
5665 rm_val = displaced_read_reg (regs, dsc, rm);
5666
5667 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5668 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5669 if (!immed)
5670 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5671 dsc->rd = rt;
5672 dsc->u.ldst.xfersize = size;
5673 dsc->u.ldst.rn = rn;
5674 dsc->u.ldst.immed = immed;
5675 dsc->u.ldst.writeback = writeback;
5676
5677 /* To write PC we can do:
5678
5679 Before this sequence of instructions:
5680 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5681 r2 is the Rn value got from dispalced_read_reg.
5682
5683 Insn1: push {pc} Write address of STR instruction + offset on stack
5684 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5685 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5686 = addr(Insn1) + offset - addr(Insn3) - 8
5687 = offset - 16
5688 Insn4: add r4, r4, #8 r4 = offset - 8
5689 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5690 = from + offset
5691 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5692
5693 Otherwise we don't know what value to write for PC, since the offset is
5694 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5695 of this can be found in Section "Saving from r15" in
5696 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5697
5698 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5699 }
5700
5701
5702 static int
5703 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5704 uint16_t insn2, struct regcache *regs,
5705 struct displaced_step_closure *dsc, int size)
5706 {
5707 unsigned int u_bit = bit (insn1, 7);
5708 unsigned int rt = bits (insn2, 12, 15);
5709 int imm12 = bits (insn2, 0, 11);
5710 ULONGEST pc_val;
5711
5712 if (debug_displaced)
5713 fprintf_unfiltered (gdb_stdlog,
5714 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5715 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5716 imm12);
5717
5718 if (!u_bit)
5719 imm12 = -1 * imm12;
5720
5721 /* Rewrite instruction LDR Rt imm12 into:
5722
5723 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5724
5725 LDR R0, R2, R3,
5726
5727 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5728
5729
5730 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5731 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5732 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5733
5734 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5735
5736 pc_val = pc_val & 0xfffffffc;
5737
5738 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5739 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5740
5741 dsc->rd = rt;
5742
5743 dsc->u.ldst.xfersize = size;
5744 dsc->u.ldst.immed = 0;
5745 dsc->u.ldst.writeback = 0;
5746 dsc->u.ldst.restore_r4 = 0;
5747
5748 /* LDR R0, R2, R3 */
5749 dsc->modinsn[0] = 0xf852;
5750 dsc->modinsn[1] = 0x3;
5751 dsc->numinsns = 2;
5752
5753 dsc->cleanup = &cleanup_load;
5754
5755 return 0;
5756 }
5757
5758 static int
5759 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5760 uint16_t insn2, struct regcache *regs,
5761 struct displaced_step_closure *dsc,
5762 int writeback, int immed)
5763 {
5764 unsigned int rt = bits (insn2, 12, 15);
5765 unsigned int rn = bits (insn1, 0, 3);
5766 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5767 /* In LDR (register), there is also a register Rm, which is not allowed to
5768 be PC, so we don't have to check it. */
5769
5770 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5771 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5772 dsc);
5773
5774 if (debug_displaced)
5775 fprintf_unfiltered (gdb_stdlog,
5776 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5777 rt, rn, insn1, insn2);
5778
5779 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5780 0, rt, rm, rn);
5781
5782 dsc->u.ldst.restore_r4 = 0;
5783
5784 if (immed)
5785 /* ldr[b]<cond> rt, [rn, #imm], etc.
5786 ->
5787 ldr[b]<cond> r0, [r2, #imm]. */
5788 {
5789 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5790 dsc->modinsn[1] = insn2 & 0x0fff;
5791 }
5792 else
5793 /* ldr[b]<cond> rt, [rn, rm], etc.
5794 ->
5795 ldr[b]<cond> r0, [r2, r3]. */
5796 {
5797 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5798 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5799 }
5800
5801 dsc->numinsns = 2;
5802
5803 return 0;
5804 }
5805
5806
5807 static int
5808 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5809 struct regcache *regs,
5810 struct displaced_step_closure *dsc,
5811 int load, int size, int usermode)
5812 {
5813 int immed = !bit (insn, 25);
5814 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5815 unsigned int rt = bits (insn, 12, 15);
5816 unsigned int rn = bits (insn, 16, 19);
5817 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5818
5819 if (!insn_references_pc (insn, 0x000ff00ful))
5820 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5821
5822 if (debug_displaced)
5823 fprintf_unfiltered (gdb_stdlog,
5824 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5825 load ? (size == 1 ? "ldrb" : "ldr")
5826 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5827 rt, rn,
5828 (unsigned long) insn);
5829
5830 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5831 usermode, rt, rm, rn);
5832
5833 if (load || rt != ARM_PC_REGNUM)
5834 {
5835 dsc->u.ldst.restore_r4 = 0;
5836
5837 if (immed)
5838 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5839 ->
5840 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5841 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5842 else
5843 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5844 ->
5845 {ldr,str}[b]<cond> r0, [r2, r3]. */
5846 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5847 }
5848 else
5849 {
5850 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5851 dsc->u.ldst.restore_r4 = 1;
5852 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5853 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5854 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5855 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5856 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5857
5858 /* As above. */
5859 if (immed)
5860 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5861 else
5862 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5863
5864 dsc->numinsns = 6;
5865 }
5866
5867 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5868
5869 return 0;
5870 }
5871
5872 /* Cleanup LDM instructions with fully-populated register list. This is an
5873 unfortunate corner case: it's impossible to implement correctly by modifying
5874 the instruction. The issue is as follows: we have an instruction,
5875
5876 ldm rN, {r0-r15}
5877
5878 which we must rewrite to avoid loading PC. A possible solution would be to
5879 do the load in two halves, something like (with suitable cleanup
5880 afterwards):
5881
5882 mov r8, rN
5883 ldm[id][ab] r8!, {r0-r7}
5884 str r7, <temp>
5885 ldm[id][ab] r8, {r7-r14}
5886 <bkpt>
5887
5888 but at present there's no suitable place for <temp>, since the scratch space
5889 is overwritten before the cleanup routine is called. For now, we simply
5890 emulate the instruction. */
5891
5892 static void
5893 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5894 struct displaced_step_closure *dsc)
5895 {
5896 int inc = dsc->u.block.increment;
5897 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5898 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5899 uint32_t regmask = dsc->u.block.regmask;
5900 int regno = inc ? 0 : 15;
5901 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5902 int exception_return = dsc->u.block.load && dsc->u.block.user
5903 && (regmask & 0x8000) != 0;
5904 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5905 int do_transfer = condition_true (dsc->u.block.cond, status);
5906 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5907
5908 if (!do_transfer)
5909 return;
5910
5911 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5912 sensible we can do here. Complain loudly. */
5913 if (exception_return)
5914 error (_("Cannot single-step exception return"));
5915
5916 /* We don't handle any stores here for now. */
5917 gdb_assert (dsc->u.block.load != 0);
5918
5919 if (debug_displaced)
5920 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5921 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5922 dsc->u.block.increment ? "inc" : "dec",
5923 dsc->u.block.before ? "before" : "after");
5924
5925 while (regmask)
5926 {
5927 uint32_t memword;
5928
5929 if (inc)
5930 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5931 regno++;
5932 else
5933 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5934 regno--;
5935
5936 xfer_addr += bump_before;
5937
5938 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5939 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5940
5941 xfer_addr += bump_after;
5942
5943 regmask &= ~(1 << regno);
5944 }
5945
5946 if (dsc->u.block.writeback)
5947 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5948 CANNOT_WRITE_PC);
5949 }
5950
5951 /* Clean up an STM which included the PC in the register list. */
5952
5953 static void
5954 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5955 struct displaced_step_closure *dsc)
5956 {
5957 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5958 int store_executed = condition_true (dsc->u.block.cond, status);
5959 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5960 CORE_ADDR stm_insn_addr;
5961 uint32_t pc_val;
5962 long offset;
5963 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5964
5965 /* If condition code fails, there's nothing else to do. */
5966 if (!store_executed)
5967 return;
5968
5969 if (dsc->u.block.increment)
5970 {
5971 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5972
5973 if (dsc->u.block.before)
5974 pc_stored_at += 4;
5975 }
5976 else
5977 {
5978 pc_stored_at = dsc->u.block.xfer_addr;
5979
5980 if (dsc->u.block.before)
5981 pc_stored_at -= 4;
5982 }
5983
5984 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5985 stm_insn_addr = dsc->scratch_base;
5986 offset = pc_val - stm_insn_addr;
5987
5988 if (debug_displaced)
5989 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5990 "STM instruction\n", offset);
5991
5992 /* Rewrite the stored PC to the proper value for the non-displaced original
5993 instruction. */
5994 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5995 dsc->insn_addr + offset);
5996 }
5997
5998 /* Clean up an LDM which includes the PC in the register list. We clumped all
5999 the registers in the transferred list into a contiguous range r0...rX (to
6000 avoid loading PC directly and losing control of the debugged program), so we
6001 must undo that here. */
6002
6003 static void
6004 cleanup_block_load_pc (struct gdbarch *gdbarch,
6005 struct regcache *regs,
6006 struct displaced_step_closure *dsc)
6007 {
6008 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6009 int load_executed = condition_true (dsc->u.block.cond, status);
6010 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6011 unsigned int regs_loaded = bitcount (mask);
6012 unsigned int num_to_shuffle = regs_loaded, clobbered;
6013
6014 /* The method employed here will fail if the register list is fully populated
6015 (we need to avoid loading PC directly). */
6016 gdb_assert (num_to_shuffle < 16);
6017
6018 if (!load_executed)
6019 return;
6020
6021 clobbered = (1 << num_to_shuffle) - 1;
6022
6023 while (num_to_shuffle > 0)
6024 {
6025 if ((mask & (1 << write_reg)) != 0)
6026 {
6027 unsigned int read_reg = num_to_shuffle - 1;
6028
6029 if (read_reg != write_reg)
6030 {
6031 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
6032 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6033 if (debug_displaced)
6034 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
6035 "loaded register r%d to r%d\n"), read_reg,
6036 write_reg);
6037 }
6038 else if (debug_displaced)
6039 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
6040 "r%d already in the right place\n"),
6041 write_reg);
6042
6043 clobbered &= ~(1 << write_reg);
6044
6045 num_to_shuffle--;
6046 }
6047
6048 write_reg--;
6049 }
6050
6051 /* Restore any registers we scribbled over. */
6052 for (write_reg = 0; clobbered != 0; write_reg++)
6053 {
6054 if ((clobbered & (1 << write_reg)) != 0)
6055 {
6056 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6057 CANNOT_WRITE_PC);
6058 if (debug_displaced)
6059 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6060 "clobbered register r%d\n"), write_reg);
6061 clobbered &= ~(1 << write_reg);
6062 }
6063 }
6064
6065 /* Perform register writeback manually. */
6066 if (dsc->u.block.writeback)
6067 {
6068 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6069
6070 if (dsc->u.block.increment)
6071 new_rn_val += regs_loaded * 4;
6072 else
6073 new_rn_val -= regs_loaded * 4;
6074
6075 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6076 CANNOT_WRITE_PC);
6077 }
6078 }
6079
6080 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6081 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6082
6083 static int
6084 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6085 struct regcache *regs,
6086 struct displaced_step_closure *dsc)
6087 {
6088 int load = bit (insn, 20);
6089 int user = bit (insn, 22);
6090 int increment = bit (insn, 23);
6091 int before = bit (insn, 24);
6092 int writeback = bit (insn, 21);
6093 int rn = bits (insn, 16, 19);
6094
6095 /* Block transfers which don't mention PC can be run directly
6096 out-of-line. */
6097 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6098 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6099
6100 if (rn == ARM_PC_REGNUM)
6101 {
6102 warning (_("displaced: Unpredictable LDM or STM with "
6103 "base register r15"));
6104 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6105 }
6106
6107 if (debug_displaced)
6108 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6109 "%.8lx\n", (unsigned long) insn);
6110
6111 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6112 dsc->u.block.rn = rn;
6113
6114 dsc->u.block.load = load;
6115 dsc->u.block.user = user;
6116 dsc->u.block.increment = increment;
6117 dsc->u.block.before = before;
6118 dsc->u.block.writeback = writeback;
6119 dsc->u.block.cond = bits (insn, 28, 31);
6120
6121 dsc->u.block.regmask = insn & 0xffff;
6122
6123 if (load)
6124 {
6125 if ((insn & 0xffff) == 0xffff)
6126 {
6127 /* LDM with a fully-populated register list. This case is
6128 particularly tricky. Implement for now by fully emulating the
6129 instruction (which might not behave perfectly in all cases, but
6130 these instructions should be rare enough for that not to matter
6131 too much). */
6132 dsc->modinsn[0] = ARM_NOP;
6133
6134 dsc->cleanup = &cleanup_block_load_all;
6135 }
6136 else
6137 {
6138 /* LDM of a list of registers which includes PC. Implement by
6139 rewriting the list of registers to be transferred into a
6140 contiguous chunk r0...rX before doing the transfer, then shuffling
6141 registers into the correct places in the cleanup routine. */
6142 unsigned int regmask = insn & 0xffff;
6143 unsigned int num_in_list = bitcount (regmask), new_regmask;
6144 unsigned int i;
6145
6146 for (i = 0; i < num_in_list; i++)
6147 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6148
6149 /* Writeback makes things complicated. We need to avoid clobbering
6150 the base register with one of the registers in our modified
6151 register list, but just using a different register can't work in
6152 all cases, e.g.:
6153
6154 ldm r14!, {r0-r13,pc}
6155
6156 which would need to be rewritten as:
6157
6158 ldm rN!, {r0-r14}
6159
6160 but that can't work, because there's no free register for N.
6161
6162 Solve this by turning off the writeback bit, and emulating
6163 writeback manually in the cleanup routine. */
6164
6165 if (writeback)
6166 insn &= ~(1 << 21);
6167
6168 new_regmask = (1 << num_in_list) - 1;
6169
6170 if (debug_displaced)
6171 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6172 "{..., pc}: original reg list %.4x, modified "
6173 "list %.4x\n"), rn, writeback ? "!" : "",
6174 (int) insn & 0xffff, new_regmask);
6175
6176 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6177
6178 dsc->cleanup = &cleanup_block_load_pc;
6179 }
6180 }
6181 else
6182 {
6183 /* STM of a list of registers which includes PC. Run the instruction
6184 as-is, but out of line: this will store the wrong value for the PC,
6185 so we must manually fix up the memory in the cleanup routine.
6186 Doing things this way has the advantage that we can auto-detect
6187 the offset of the PC write (which is architecture-dependent) in
6188 the cleanup routine. */
6189 dsc->modinsn[0] = insn;
6190
6191 dsc->cleanup = &cleanup_block_store_pc;
6192 }
6193
6194 return 0;
6195 }
6196
6197 static int
6198 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6199 struct regcache *regs,
6200 struct displaced_step_closure *dsc)
6201 {
6202 int rn = bits (insn1, 0, 3);
6203 int load = bit (insn1, 4);
6204 int writeback = bit (insn1, 5);
6205
6206 /* Block transfers which don't mention PC can be run directly
6207 out-of-line. */
6208 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6209 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6210
6211 if (rn == ARM_PC_REGNUM)
6212 {
6213 warning (_("displaced: Unpredictable LDM or STM with "
6214 "base register r15"));
6215 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6216 "unpredictable ldm/stm", dsc);
6217 }
6218
6219 if (debug_displaced)
6220 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6221 "%.4x%.4x\n", insn1, insn2);
6222
6223 /* Clear bit 13, since it should be always zero. */
6224 dsc->u.block.regmask = (insn2 & 0xdfff);
6225 dsc->u.block.rn = rn;
6226
6227 dsc->u.block.load = load;
6228 dsc->u.block.user = 0;
6229 dsc->u.block.increment = bit (insn1, 7);
6230 dsc->u.block.before = bit (insn1, 8);
6231 dsc->u.block.writeback = writeback;
6232 dsc->u.block.cond = INST_AL;
6233 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6234
6235 if (load)
6236 {
6237 if (dsc->u.block.regmask == 0xffff)
6238 {
6239 /* This branch is impossible to happen. */
6240 gdb_assert (0);
6241 }
6242 else
6243 {
6244 unsigned int regmask = dsc->u.block.regmask;
6245 unsigned int num_in_list = bitcount (regmask), new_regmask;
6246 unsigned int i;
6247
6248 for (i = 0; i < num_in_list; i++)
6249 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6250
6251 if (writeback)
6252 insn1 &= ~(1 << 5);
6253
6254 new_regmask = (1 << num_in_list) - 1;
6255
6256 if (debug_displaced)
6257 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6258 "{..., pc}: original reg list %.4x, modified "
6259 "list %.4x\n"), rn, writeback ? "!" : "",
6260 (int) dsc->u.block.regmask, new_regmask);
6261
6262 dsc->modinsn[0] = insn1;
6263 dsc->modinsn[1] = (new_regmask & 0xffff);
6264 dsc->numinsns = 2;
6265
6266 dsc->cleanup = &cleanup_block_load_pc;
6267 }
6268 }
6269 else
6270 {
6271 dsc->modinsn[0] = insn1;
6272 dsc->modinsn[1] = insn2;
6273 dsc->numinsns = 2;
6274 dsc->cleanup = &cleanup_block_store_pc;
6275 }
6276 return 0;
6277 }
6278
6279 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6280 This is used to avoid a dependency on BFD's bfd_endian enum. */
6281
6282 ULONGEST
6283 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6284 int byte_order)
6285 {
6286 return read_memory_unsigned_integer (memaddr, len,
6287 (enum bfd_endian) byte_order);
6288 }
6289
6290 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6291
6292 CORE_ADDR
6293 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6294 CORE_ADDR val)
6295 {
6296 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6297 }
6298
6299 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6300
6301 static CORE_ADDR
6302 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6303 {
6304 return 0;
6305 }
6306
6307 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6308
6309 int
6310 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6311 {
6312 return arm_is_thumb (self->regcache);
6313 }
6314
6315 /* single_step() is called just before we want to resume the inferior,
6316 if we want to single-step it but there is no hardware or kernel
6317 single-step support. We find the target of the coming instructions
6318 and breakpoint them. */
6319
6320 int
6321 arm_software_single_step (struct frame_info *frame)
6322 {
6323 struct regcache *regcache = get_current_regcache ();
6324 struct gdbarch *gdbarch = get_regcache_arch (regcache);
6325 struct address_space *aspace = get_regcache_aspace (regcache);
6326 struct arm_get_next_pcs next_pcs_ctx;
6327 CORE_ADDR pc;
6328 int i;
6329 VEC (CORE_ADDR) *next_pcs = NULL;
6330 struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6331
6332 arm_get_next_pcs_ctor (&next_pcs_ctx,
6333 &arm_get_next_pcs_ops,
6334 gdbarch_byte_order (gdbarch),
6335 gdbarch_byte_order_for_code (gdbarch),
6336 0,
6337 regcache);
6338
6339 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6340
6341 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
6342 arm_insert_single_step_breakpoint (gdbarch, aspace, pc);
6343
6344 do_cleanups (old_chain);
6345
6346 return 1;
6347 }
6348
6349 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6350 for Linux, where some SVC instructions must be treated specially. */
6351
6352 static void
6353 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6354 struct displaced_step_closure *dsc)
6355 {
6356 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6357
6358 if (debug_displaced)
6359 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6360 "%.8lx\n", (unsigned long) resume_addr);
6361
6362 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6363 }
6364
6365
6366 /* Common copy routine for svc instruciton. */
6367
6368 static int
6369 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6370 struct displaced_step_closure *dsc)
6371 {
6372 /* Preparation: none.
6373 Insn: unmodified svc.
6374 Cleanup: pc <- insn_addr + insn_size. */
6375
6376 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6377 instruction. */
6378 dsc->wrote_to_pc = 1;
6379
6380 /* Allow OS-specific code to override SVC handling. */
6381 if (dsc->u.svc.copy_svc_os)
6382 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6383 else
6384 {
6385 dsc->cleanup = &cleanup_svc;
6386 return 0;
6387 }
6388 }
6389
6390 static int
6391 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6392 struct regcache *regs, struct displaced_step_closure *dsc)
6393 {
6394
6395 if (debug_displaced)
6396 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6397 (unsigned long) insn);
6398
6399 dsc->modinsn[0] = insn;
6400
6401 return install_svc (gdbarch, regs, dsc);
6402 }
6403
6404 static int
6405 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6406 struct regcache *regs, struct displaced_step_closure *dsc)
6407 {
6408
6409 if (debug_displaced)
6410 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6411 insn);
6412
6413 dsc->modinsn[0] = insn;
6414
6415 return install_svc (gdbarch, regs, dsc);
6416 }
6417
6418 /* Copy undefined instructions. */
6419
6420 static int
6421 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6422 struct displaced_step_closure *dsc)
6423 {
6424 if (debug_displaced)
6425 fprintf_unfiltered (gdb_stdlog,
6426 "displaced: copying undefined insn %.8lx\n",
6427 (unsigned long) insn);
6428
6429 dsc->modinsn[0] = insn;
6430
6431 return 0;
6432 }
6433
6434 static int
6435 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6436 struct displaced_step_closure *dsc)
6437 {
6438
6439 if (debug_displaced)
6440 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6441 "%.4x %.4x\n", (unsigned short) insn1,
6442 (unsigned short) insn2);
6443
6444 dsc->modinsn[0] = insn1;
6445 dsc->modinsn[1] = insn2;
6446 dsc->numinsns = 2;
6447
6448 return 0;
6449 }
6450
6451 /* Copy unpredictable instructions. */
6452
6453 static int
6454 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6455 struct displaced_step_closure *dsc)
6456 {
6457 if (debug_displaced)
6458 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6459 "%.8lx\n", (unsigned long) insn);
6460
6461 dsc->modinsn[0] = insn;
6462
6463 return 0;
6464 }
6465
6466 /* The decode_* functions are instruction decoding helpers. They mostly follow
6467 the presentation in the ARM ARM. */
6468
6469 static int
6470 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6471 struct regcache *regs,
6472 struct displaced_step_closure *dsc)
6473 {
6474 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6475 unsigned int rn = bits (insn, 16, 19);
6476
6477 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
6478 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6479 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
6480 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6481 else if ((op1 & 0x60) == 0x20)
6482 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6483 else if ((op1 & 0x71) == 0x40)
6484 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6485 dsc);
6486 else if ((op1 & 0x77) == 0x41)
6487 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6488 else if ((op1 & 0x77) == 0x45)
6489 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6490 else if ((op1 & 0x77) == 0x51)
6491 {
6492 if (rn != 0xf)
6493 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6494 else
6495 return arm_copy_unpred (gdbarch, insn, dsc);
6496 }
6497 else if ((op1 & 0x77) == 0x55)
6498 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6499 else if (op1 == 0x57)
6500 switch (op2)
6501 {
6502 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6503 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6504 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6505 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6506 default: return arm_copy_unpred (gdbarch, insn, dsc);
6507 }
6508 else if ((op1 & 0x63) == 0x43)
6509 return arm_copy_unpred (gdbarch, insn, dsc);
6510 else if ((op2 & 0x1) == 0x0)
6511 switch (op1 & ~0x80)
6512 {
6513 case 0x61:
6514 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6515 case 0x65:
6516 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6517 case 0x71: case 0x75:
6518 /* pld/pldw reg. */
6519 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6520 case 0x63: case 0x67: case 0x73: case 0x77:
6521 return arm_copy_unpred (gdbarch, insn, dsc);
6522 default:
6523 return arm_copy_undef (gdbarch, insn, dsc);
6524 }
6525 else
6526 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6527 }
6528
6529 static int
6530 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6531 struct regcache *regs,
6532 struct displaced_step_closure *dsc)
6533 {
6534 if (bit (insn, 27) == 0)
6535 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6536 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6537 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6538 {
6539 case 0x0: case 0x2:
6540 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6541
6542 case 0x1: case 0x3:
6543 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6544
6545 case 0x4: case 0x5: case 0x6: case 0x7:
6546 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6547
6548 case 0x8:
6549 switch ((insn & 0xe00000) >> 21)
6550 {
6551 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6552 /* stc/stc2. */
6553 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6554
6555 case 0x2:
6556 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6557
6558 default:
6559 return arm_copy_undef (gdbarch, insn, dsc);
6560 }
6561
6562 case 0x9:
6563 {
6564 int rn_f = (bits (insn, 16, 19) == 0xf);
6565 switch ((insn & 0xe00000) >> 21)
6566 {
6567 case 0x1: case 0x3:
6568 /* ldc/ldc2 imm (undefined for rn == pc). */
6569 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6570 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6571
6572 case 0x2:
6573 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6574
6575 case 0x4: case 0x5: case 0x6: case 0x7:
6576 /* ldc/ldc2 lit (undefined for rn != pc). */
6577 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6578 : arm_copy_undef (gdbarch, insn, dsc);
6579
6580 default:
6581 return arm_copy_undef (gdbarch, insn, dsc);
6582 }
6583 }
6584
6585 case 0xa:
6586 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6587
6588 case 0xb:
6589 if (bits (insn, 16, 19) == 0xf)
6590 /* ldc/ldc2 lit. */
6591 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6592 else
6593 return arm_copy_undef (gdbarch, insn, dsc);
6594
6595 case 0xc:
6596 if (bit (insn, 4))
6597 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6598 else
6599 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6600
6601 case 0xd:
6602 if (bit (insn, 4))
6603 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6604 else
6605 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6606
6607 default:
6608 return arm_copy_undef (gdbarch, insn, dsc);
6609 }
6610 }
6611
6612 /* Decode miscellaneous instructions in dp/misc encoding space. */
6613
6614 static int
6615 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6616 struct regcache *regs,
6617 struct displaced_step_closure *dsc)
6618 {
6619 unsigned int op2 = bits (insn, 4, 6);
6620 unsigned int op = bits (insn, 21, 22);
6621
6622 switch (op2)
6623 {
6624 case 0x0:
6625 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6626
6627 case 0x1:
6628 if (op == 0x1) /* bx. */
6629 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6630 else if (op == 0x3)
6631 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6632 else
6633 return arm_copy_undef (gdbarch, insn, dsc);
6634
6635 case 0x2:
6636 if (op == 0x1)
6637 /* Not really supported. */
6638 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6639 else
6640 return arm_copy_undef (gdbarch, insn, dsc);
6641
6642 case 0x3:
6643 if (op == 0x1)
6644 return arm_copy_bx_blx_reg (gdbarch, insn,
6645 regs, dsc); /* blx register. */
6646 else
6647 return arm_copy_undef (gdbarch, insn, dsc);
6648
6649 case 0x5:
6650 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6651
6652 case 0x7:
6653 if (op == 0x1)
6654 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6655 else if (op == 0x3)
6656 /* Not really supported. */
6657 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6658
6659 default:
6660 return arm_copy_undef (gdbarch, insn, dsc);
6661 }
6662 }
6663
6664 static int
6665 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6666 struct regcache *regs,
6667 struct displaced_step_closure *dsc)
6668 {
6669 if (bit (insn, 25))
6670 switch (bits (insn, 20, 24))
6671 {
6672 case 0x10:
6673 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6674
6675 case 0x14:
6676 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6677
6678 case 0x12: case 0x16:
6679 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6680
6681 default:
6682 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6683 }
6684 else
6685 {
6686 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6687
6688 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6689 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6690 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6691 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6692 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6693 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6694 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6695 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6696 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6697 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6698 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6699 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6700 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6701 /* 2nd arg means "unprivileged". */
6702 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6703 dsc);
6704 }
6705
6706 /* Should be unreachable. */
6707 return 1;
6708 }
6709
6710 static int
6711 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6712 struct regcache *regs,
6713 struct displaced_step_closure *dsc)
6714 {
6715 int a = bit (insn, 25), b = bit (insn, 4);
6716 uint32_t op1 = bits (insn, 20, 24);
6717
6718 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6719 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6720 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6721 else if ((!a && (op1 & 0x17) == 0x02)
6722 || (a && (op1 & 0x17) == 0x02 && !b))
6723 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6724 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6725 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6726 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6727 else if ((!a && (op1 & 0x17) == 0x03)
6728 || (a && (op1 & 0x17) == 0x03 && !b))
6729 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6730 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6731 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6732 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6733 else if ((!a && (op1 & 0x17) == 0x06)
6734 || (a && (op1 & 0x17) == 0x06 && !b))
6735 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6736 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6737 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6738 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6739 else if ((!a && (op1 & 0x17) == 0x07)
6740 || (a && (op1 & 0x17) == 0x07 && !b))
6741 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6742
6743 /* Should be unreachable. */
6744 return 1;
6745 }
6746
6747 static int
6748 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6749 struct displaced_step_closure *dsc)
6750 {
6751 switch (bits (insn, 20, 24))
6752 {
6753 case 0x00: case 0x01: case 0x02: case 0x03:
6754 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6755
6756 case 0x04: case 0x05: case 0x06: case 0x07:
6757 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6758
6759 case 0x08: case 0x09: case 0x0a: case 0x0b:
6760 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6761 return arm_copy_unmodified (gdbarch, insn,
6762 "decode/pack/unpack/saturate/reverse", dsc);
6763
6764 case 0x18:
6765 if (bits (insn, 5, 7) == 0) /* op2. */
6766 {
6767 if (bits (insn, 12, 15) == 0xf)
6768 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6769 else
6770 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6771 }
6772 else
6773 return arm_copy_undef (gdbarch, insn, dsc);
6774
6775 case 0x1a: case 0x1b:
6776 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6777 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6778 else
6779 return arm_copy_undef (gdbarch, insn, dsc);
6780
6781 case 0x1c: case 0x1d:
6782 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6783 {
6784 if (bits (insn, 0, 3) == 0xf)
6785 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6786 else
6787 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6788 }
6789 else
6790 return arm_copy_undef (gdbarch, insn, dsc);
6791
6792 case 0x1e: case 0x1f:
6793 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6794 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6795 else
6796 return arm_copy_undef (gdbarch, insn, dsc);
6797 }
6798
6799 /* Should be unreachable. */
6800 return 1;
6801 }
6802
6803 static int
6804 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6805 struct regcache *regs,
6806 struct displaced_step_closure *dsc)
6807 {
6808 if (bit (insn, 25))
6809 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6810 else
6811 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6812 }
6813
6814 static int
6815 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6816 struct regcache *regs,
6817 struct displaced_step_closure *dsc)
6818 {
6819 unsigned int opcode = bits (insn, 20, 24);
6820
6821 switch (opcode)
6822 {
6823 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6824 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6825
6826 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6827 case 0x12: case 0x16:
6828 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6829
6830 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6831 case 0x13: case 0x17:
6832 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6833
6834 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6835 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6836 /* Note: no writeback for these instructions. Bit 25 will always be
6837 zero though (via caller), so the following works OK. */
6838 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6839 }
6840
6841 /* Should be unreachable. */
6842 return 1;
6843 }
6844
6845 /* Decode shifted register instructions. */
6846
6847 static int
6848 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6849 uint16_t insn2, struct regcache *regs,
6850 struct displaced_step_closure *dsc)
6851 {
6852 /* PC is only allowed to be used in instruction MOV. */
6853
6854 unsigned int op = bits (insn1, 5, 8);
6855 unsigned int rn = bits (insn1, 0, 3);
6856
6857 if (op == 0x2 && rn == 0xf) /* MOV */
6858 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6859 else
6860 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6861 "dp (shift reg)", dsc);
6862 }
6863
6864
6865 /* Decode extension register load/store. Exactly the same as
6866 arm_decode_ext_reg_ld_st. */
6867
6868 static int
6869 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6870 uint16_t insn2, struct regcache *regs,
6871 struct displaced_step_closure *dsc)
6872 {
6873 unsigned int opcode = bits (insn1, 4, 8);
6874
6875 switch (opcode)
6876 {
6877 case 0x04: case 0x05:
6878 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6879 "vfp/neon vmov", dsc);
6880
6881 case 0x08: case 0x0c: /* 01x00 */
6882 case 0x0a: case 0x0e: /* 01x10 */
6883 case 0x12: case 0x16: /* 10x10 */
6884 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6885 "vfp/neon vstm/vpush", dsc);
6886
6887 case 0x09: case 0x0d: /* 01x01 */
6888 case 0x0b: case 0x0f: /* 01x11 */
6889 case 0x13: case 0x17: /* 10x11 */
6890 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6891 "vfp/neon vldm/vpop", dsc);
6892
6893 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6894 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6895 "vstr", dsc);
6896 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6897 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6898 }
6899
6900 /* Should be unreachable. */
6901 return 1;
6902 }
6903
6904 static int
6905 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6906 struct regcache *regs, struct displaced_step_closure *dsc)
6907 {
6908 unsigned int op1 = bits (insn, 20, 25);
6909 int op = bit (insn, 4);
6910 unsigned int coproc = bits (insn, 8, 11);
6911
6912 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6913 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6914 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6915 && (coproc & 0xe) != 0xa)
6916 /* stc/stc2. */
6917 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6918 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6919 && (coproc & 0xe) != 0xa)
6920 /* ldc/ldc2 imm/lit. */
6921 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6922 else if ((op1 & 0x3e) == 0x00)
6923 return arm_copy_undef (gdbarch, insn, dsc);
6924 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6925 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6926 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6927 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6928 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6929 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6930 else if ((op1 & 0x30) == 0x20 && !op)
6931 {
6932 if ((coproc & 0xe) == 0xa)
6933 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6934 else
6935 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6936 }
6937 else if ((op1 & 0x30) == 0x20 && op)
6938 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6939 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6940 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6941 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6942 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6943 else if ((op1 & 0x30) == 0x30)
6944 return arm_copy_svc (gdbarch, insn, regs, dsc);
6945 else
6946 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6947 }
6948
6949 static int
6950 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6951 uint16_t insn2, struct regcache *regs,
6952 struct displaced_step_closure *dsc)
6953 {
6954 unsigned int coproc = bits (insn2, 8, 11);
6955 unsigned int bit_5_8 = bits (insn1, 5, 8);
6956 unsigned int bit_9 = bit (insn1, 9);
6957 unsigned int bit_4 = bit (insn1, 4);
6958
6959 if (bit_9 == 0)
6960 {
6961 if (bit_5_8 == 2)
6962 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6963 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6964 dsc);
6965 else if (bit_5_8 == 0) /* UNDEFINED. */
6966 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6967 else
6968 {
6969 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6970 if ((coproc & 0xe) == 0xa)
6971 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6972 dsc);
6973 else /* coproc is not 101x. */
6974 {
6975 if (bit_4 == 0) /* STC/STC2. */
6976 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6977 "stc/stc2", dsc);
6978 else /* LDC/LDC2 {literal, immeidate}. */
6979 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6980 regs, dsc);
6981 }
6982 }
6983 }
6984 else
6985 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6986
6987 return 0;
6988 }
6989
6990 static void
6991 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6992 struct displaced_step_closure *dsc, int rd)
6993 {
6994 /* ADR Rd, #imm
6995
6996 Rewrite as:
6997
6998 Preparation: Rd <- PC
6999 Insn: ADD Rd, #imm
7000 Cleanup: Null.
7001 */
7002
7003 /* Rd <- PC */
7004 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7005 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7006 }
7007
7008 static int
7009 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7010 struct displaced_step_closure *dsc,
7011 int rd, unsigned int imm)
7012 {
7013
7014 /* Encoding T2: ADDS Rd, #imm */
7015 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7016
7017 install_pc_relative (gdbarch, regs, dsc, rd);
7018
7019 return 0;
7020 }
7021
7022 static int
7023 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7024 struct regcache *regs,
7025 struct displaced_step_closure *dsc)
7026 {
7027 unsigned int rd = bits (insn, 8, 10);
7028 unsigned int imm8 = bits (insn, 0, 7);
7029
7030 if (debug_displaced)
7031 fprintf_unfiltered (gdb_stdlog,
7032 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
7033 rd, imm8, insn);
7034
7035 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7036 }
7037
7038 static int
7039 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7040 uint16_t insn2, struct regcache *regs,
7041 struct displaced_step_closure *dsc)
7042 {
7043 unsigned int rd = bits (insn2, 8, 11);
7044 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7045 extract raw immediate encoding rather than computing immediate. When
7046 generating ADD or SUB instruction, we can simply perform OR operation to
7047 set immediate into ADD. */
7048 unsigned int imm_3_8 = insn2 & 0x70ff;
7049 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7050
7051 if (debug_displaced)
7052 fprintf_unfiltered (gdb_stdlog,
7053 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7054 rd, imm_i, imm_3_8, insn1, insn2);
7055
7056 if (bit (insn1, 7)) /* Encoding T2 */
7057 {
7058 /* Encoding T3: SUB Rd, Rd, #imm */
7059 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7060 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7061 }
7062 else /* Encoding T3 */
7063 {
7064 /* Encoding T3: ADD Rd, Rd, #imm */
7065 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7066 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7067 }
7068 dsc->numinsns = 2;
7069
7070 install_pc_relative (gdbarch, regs, dsc, rd);
7071
7072 return 0;
7073 }
7074
7075 static int
7076 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7077 struct regcache *regs,
7078 struct displaced_step_closure *dsc)
7079 {
7080 unsigned int rt = bits (insn1, 8, 10);
7081 unsigned int pc;
7082 int imm8 = (bits (insn1, 0, 7) << 2);
7083
7084 /* LDR Rd, #imm8
7085
7086 Rwrite as:
7087
7088 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7089
7090 Insn: LDR R0, [R2, R3];
7091 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7092
7093 if (debug_displaced)
7094 fprintf_unfiltered (gdb_stdlog,
7095 "displaced: copying thumb ldr r%d [pc #%d]\n"
7096 , rt, imm8);
7097
7098 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7099 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7100 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7101 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7102 /* The assembler calculates the required value of the offset from the
7103 Align(PC,4) value of this instruction to the label. */
7104 pc = pc & 0xfffffffc;
7105
7106 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7107 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7108
7109 dsc->rd = rt;
7110 dsc->u.ldst.xfersize = 4;
7111 dsc->u.ldst.rn = 0;
7112 dsc->u.ldst.immed = 0;
7113 dsc->u.ldst.writeback = 0;
7114 dsc->u.ldst.restore_r4 = 0;
7115
7116 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7117
7118 dsc->cleanup = &cleanup_load;
7119
7120 return 0;
7121 }
7122
7123 /* Copy Thumb cbnz/cbz insruction. */
7124
7125 static int
7126 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7127 struct regcache *regs,
7128 struct displaced_step_closure *dsc)
7129 {
7130 int non_zero = bit (insn1, 11);
7131 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7132 CORE_ADDR from = dsc->insn_addr;
7133 int rn = bits (insn1, 0, 2);
7134 int rn_val = displaced_read_reg (regs, dsc, rn);
7135
7136 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7137 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7138 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7139 condition is false, let it be, cleanup_branch will do nothing. */
7140 if (dsc->u.branch.cond)
7141 {
7142 dsc->u.branch.cond = INST_AL;
7143 dsc->u.branch.dest = from + 4 + imm5;
7144 }
7145 else
7146 dsc->u.branch.dest = from + 2;
7147
7148 dsc->u.branch.link = 0;
7149 dsc->u.branch.exchange = 0;
7150
7151 if (debug_displaced)
7152 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7153 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7154 rn, rn_val, insn1, dsc->u.branch.dest);
7155
7156 dsc->modinsn[0] = THUMB_NOP;
7157
7158 dsc->cleanup = &cleanup_branch;
7159 return 0;
7160 }
7161
7162 /* Copy Table Branch Byte/Halfword */
7163 static int
7164 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7165 uint16_t insn2, struct regcache *regs,
7166 struct displaced_step_closure *dsc)
7167 {
7168 ULONGEST rn_val, rm_val;
7169 int is_tbh = bit (insn2, 4);
7170 CORE_ADDR halfwords = 0;
7171 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7172
7173 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7174 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7175
7176 if (is_tbh)
7177 {
7178 gdb_byte buf[2];
7179
7180 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7181 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7182 }
7183 else
7184 {
7185 gdb_byte buf[1];
7186
7187 target_read_memory (rn_val + rm_val, buf, 1);
7188 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7189 }
7190
7191 if (debug_displaced)
7192 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7193 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7194 (unsigned int) rn_val, (unsigned int) rm_val,
7195 (unsigned int) halfwords);
7196
7197 dsc->u.branch.cond = INST_AL;
7198 dsc->u.branch.link = 0;
7199 dsc->u.branch.exchange = 0;
7200 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7201
7202 dsc->cleanup = &cleanup_branch;
7203
7204 return 0;
7205 }
7206
7207 static void
7208 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7209 struct displaced_step_closure *dsc)
7210 {
7211 /* PC <- r7 */
7212 int val = displaced_read_reg (regs, dsc, 7);
7213 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7214
7215 /* r7 <- r8 */
7216 val = displaced_read_reg (regs, dsc, 8);
7217 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7218
7219 /* r8 <- tmp[0] */
7220 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7221
7222 }
7223
7224 static int
7225 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7226 struct regcache *regs,
7227 struct displaced_step_closure *dsc)
7228 {
7229 dsc->u.block.regmask = insn1 & 0x00ff;
7230
7231 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7232 to :
7233
7234 (1) register list is full, that is, r0-r7 are used.
7235 Prepare: tmp[0] <- r8
7236
7237 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7238 MOV r8, r7; Move value of r7 to r8;
7239 POP {r7}; Store PC value into r7.
7240
7241 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7242
7243 (2) register list is not full, supposing there are N registers in
7244 register list (except PC, 0 <= N <= 7).
7245 Prepare: for each i, 0 - N, tmp[i] <- ri.
7246
7247 POP {r0, r1, ...., rN};
7248
7249 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7250 from tmp[] properly.
7251 */
7252 if (debug_displaced)
7253 fprintf_unfiltered (gdb_stdlog,
7254 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7255 dsc->u.block.regmask, insn1);
7256
7257 if (dsc->u.block.regmask == 0xff)
7258 {
7259 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7260
7261 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7262 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7263 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7264
7265 dsc->numinsns = 3;
7266 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7267 }
7268 else
7269 {
7270 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7271 unsigned int i;
7272 unsigned int new_regmask;
7273
7274 for (i = 0; i < num_in_list + 1; i++)
7275 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7276
7277 new_regmask = (1 << (num_in_list + 1)) - 1;
7278
7279 if (debug_displaced)
7280 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7281 "{..., pc}: original reg list %.4x,"
7282 " modified list %.4x\n"),
7283 (int) dsc->u.block.regmask, new_regmask);
7284
7285 dsc->u.block.regmask |= 0x8000;
7286 dsc->u.block.writeback = 0;
7287 dsc->u.block.cond = INST_AL;
7288
7289 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7290
7291 dsc->cleanup = &cleanup_block_load_pc;
7292 }
7293
7294 return 0;
7295 }
7296
7297 static void
7298 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7299 struct regcache *regs,
7300 struct displaced_step_closure *dsc)
7301 {
7302 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7303 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7304 int err = 0;
7305
7306 /* 16-bit thumb instructions. */
7307 switch (op_bit_12_15)
7308 {
7309 /* Shift (imme), add, subtract, move and compare. */
7310 case 0: case 1: case 2: case 3:
7311 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7312 "shift/add/sub/mov/cmp",
7313 dsc);
7314 break;
7315 case 4:
7316 switch (op_bit_10_11)
7317 {
7318 case 0: /* Data-processing */
7319 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7320 "data-processing",
7321 dsc);
7322 break;
7323 case 1: /* Special data instructions and branch and exchange. */
7324 {
7325 unsigned short op = bits (insn1, 7, 9);
7326 if (op == 6 || op == 7) /* BX or BLX */
7327 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7328 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7329 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7330 else
7331 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7332 dsc);
7333 }
7334 break;
7335 default: /* LDR (literal) */
7336 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7337 }
7338 break;
7339 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7340 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7341 break;
7342 case 10:
7343 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7344 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7345 else /* Generate SP-relative address */
7346 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7347 break;
7348 case 11: /* Misc 16-bit instructions */
7349 {
7350 switch (bits (insn1, 8, 11))
7351 {
7352 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7353 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7354 break;
7355 case 12: case 13: /* POP */
7356 if (bit (insn1, 8)) /* PC is in register list. */
7357 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7358 else
7359 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7360 break;
7361 case 15: /* If-Then, and hints */
7362 if (bits (insn1, 0, 3))
7363 /* If-Then makes up to four following instructions conditional.
7364 IT instruction itself is not conditional, so handle it as a
7365 common unmodified instruction. */
7366 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7367 dsc);
7368 else
7369 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7370 break;
7371 default:
7372 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7373 }
7374 }
7375 break;
7376 case 12:
7377 if (op_bit_10_11 < 2) /* Store multiple registers */
7378 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7379 else /* Load multiple registers */
7380 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7381 break;
7382 case 13: /* Conditional branch and supervisor call */
7383 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7384 err = thumb_copy_b (gdbarch, insn1, dsc);
7385 else
7386 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7387 break;
7388 case 14: /* Unconditional branch */
7389 err = thumb_copy_b (gdbarch, insn1, dsc);
7390 break;
7391 default:
7392 err = 1;
7393 }
7394
7395 if (err)
7396 internal_error (__FILE__, __LINE__,
7397 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7398 }
7399
7400 static int
7401 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7402 uint16_t insn1, uint16_t insn2,
7403 struct regcache *regs,
7404 struct displaced_step_closure *dsc)
7405 {
7406 int rt = bits (insn2, 12, 15);
7407 int rn = bits (insn1, 0, 3);
7408 int op1 = bits (insn1, 7, 8);
7409
7410 switch (bits (insn1, 5, 6))
7411 {
7412 case 0: /* Load byte and memory hints */
7413 if (rt == 0xf) /* PLD/PLI */
7414 {
7415 if (rn == 0xf)
7416 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7417 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7418 else
7419 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7420 "pli/pld", dsc);
7421 }
7422 else
7423 {
7424 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7425 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7426 1);
7427 else
7428 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7429 "ldrb{reg, immediate}/ldrbt",
7430 dsc);
7431 }
7432
7433 break;
7434 case 1: /* Load halfword and memory hints. */
7435 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7436 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7437 "pld/unalloc memhint", dsc);
7438 else
7439 {
7440 if (rn == 0xf)
7441 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7442 2);
7443 else
7444 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7445 "ldrh/ldrht", dsc);
7446 }
7447 break;
7448 case 2: /* Load word */
7449 {
7450 int insn2_bit_8_11 = bits (insn2, 8, 11);
7451
7452 if (rn == 0xf)
7453 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7454 else if (op1 == 0x1) /* Encoding T3 */
7455 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7456 0, 1);
7457 else /* op1 == 0x0 */
7458 {
7459 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7460 /* LDR (immediate) */
7461 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7462 dsc, bit (insn2, 8), 1);
7463 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7464 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7465 "ldrt", dsc);
7466 else
7467 /* LDR (register) */
7468 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7469 dsc, 0, 0);
7470 }
7471 break;
7472 }
7473 default:
7474 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7475 break;
7476 }
7477 return 0;
7478 }
7479
7480 static void
7481 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7482 uint16_t insn2, struct regcache *regs,
7483 struct displaced_step_closure *dsc)
7484 {
7485 int err = 0;
7486 unsigned short op = bit (insn2, 15);
7487 unsigned int op1 = bits (insn1, 11, 12);
7488
7489 switch (op1)
7490 {
7491 case 1:
7492 {
7493 switch (bits (insn1, 9, 10))
7494 {
7495 case 0:
7496 if (bit (insn1, 6))
7497 {
7498 /* Load/store {dual, execlusive}, table branch. */
7499 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7500 && bits (insn2, 5, 7) == 0)
7501 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7502 dsc);
7503 else
7504 /* PC is not allowed to use in load/store {dual, exclusive}
7505 instructions. */
7506 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7507 "load/store dual/ex", dsc);
7508 }
7509 else /* load/store multiple */
7510 {
7511 switch (bits (insn1, 7, 8))
7512 {
7513 case 0: case 3: /* SRS, RFE */
7514 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7515 "srs/rfe", dsc);
7516 break;
7517 case 1: case 2: /* LDM/STM/PUSH/POP */
7518 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7519 break;
7520 }
7521 }
7522 break;
7523
7524 case 1:
7525 /* Data-processing (shift register). */
7526 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7527 dsc);
7528 break;
7529 default: /* Coprocessor instructions. */
7530 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7531 break;
7532 }
7533 break;
7534 }
7535 case 2: /* op1 = 2 */
7536 if (op) /* Branch and misc control. */
7537 {
7538 if (bit (insn2, 14) /* BLX/BL */
7539 || bit (insn2, 12) /* Unconditional branch */
7540 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7541 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7542 else
7543 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7544 "misc ctrl", dsc);
7545 }
7546 else
7547 {
7548 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7549 {
7550 int op = bits (insn1, 4, 8);
7551 int rn = bits (insn1, 0, 3);
7552 if ((op == 0 || op == 0xa) && rn == 0xf)
7553 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7554 regs, dsc);
7555 else
7556 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7557 "dp/pb", dsc);
7558 }
7559 else /* Data processing (modified immeidate) */
7560 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7561 "dp/mi", dsc);
7562 }
7563 break;
7564 case 3: /* op1 = 3 */
7565 switch (bits (insn1, 9, 10))
7566 {
7567 case 0:
7568 if (bit (insn1, 4))
7569 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7570 regs, dsc);
7571 else /* NEON Load/Store and Store single data item */
7572 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7573 "neon elt/struct load/store",
7574 dsc);
7575 break;
7576 case 1: /* op1 = 3, bits (9, 10) == 1 */
7577 switch (bits (insn1, 7, 8))
7578 {
7579 case 0: case 1: /* Data processing (register) */
7580 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7581 "dp(reg)", dsc);
7582 break;
7583 case 2: /* Multiply and absolute difference */
7584 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7585 "mul/mua/diff", dsc);
7586 break;
7587 case 3: /* Long multiply and divide */
7588 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7589 "lmul/lmua", dsc);
7590 break;
7591 }
7592 break;
7593 default: /* Coprocessor instructions */
7594 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7595 break;
7596 }
7597 break;
7598 default:
7599 err = 1;
7600 }
7601
7602 if (err)
7603 internal_error (__FILE__, __LINE__,
7604 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7605
7606 }
7607
7608 static void
7609 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7610 struct regcache *regs,
7611 struct displaced_step_closure *dsc)
7612 {
7613 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7614 uint16_t insn1
7615 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7616
7617 if (debug_displaced)
7618 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7619 "at %.8lx\n", insn1, (unsigned long) from);
7620
7621 dsc->is_thumb = 1;
7622 dsc->insn_size = thumb_insn_size (insn1);
7623 if (thumb_insn_size (insn1) == 4)
7624 {
7625 uint16_t insn2
7626 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7627 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7628 }
7629 else
7630 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7631 }
7632
7633 void
7634 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7635 CORE_ADDR to, struct regcache *regs,
7636 struct displaced_step_closure *dsc)
7637 {
7638 int err = 0;
7639 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7640 uint32_t insn;
7641
7642 /* Most displaced instructions use a 1-instruction scratch space, so set this
7643 here and override below if/when necessary. */
7644 dsc->numinsns = 1;
7645 dsc->insn_addr = from;
7646 dsc->scratch_base = to;
7647 dsc->cleanup = NULL;
7648 dsc->wrote_to_pc = 0;
7649
7650 if (!displaced_in_arm_mode (regs))
7651 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7652
7653 dsc->is_thumb = 0;
7654 dsc->insn_size = 4;
7655 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7656 if (debug_displaced)
7657 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7658 "at %.8lx\n", (unsigned long) insn,
7659 (unsigned long) from);
7660
7661 if ((insn & 0xf0000000) == 0xf0000000)
7662 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7663 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7664 {
7665 case 0x0: case 0x1: case 0x2: case 0x3:
7666 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7667 break;
7668
7669 case 0x4: case 0x5: case 0x6:
7670 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7671 break;
7672
7673 case 0x7:
7674 err = arm_decode_media (gdbarch, insn, dsc);
7675 break;
7676
7677 case 0x8: case 0x9: case 0xa: case 0xb:
7678 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7679 break;
7680
7681 case 0xc: case 0xd: case 0xe: case 0xf:
7682 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7683 break;
7684 }
7685
7686 if (err)
7687 internal_error (__FILE__, __LINE__,
7688 _("arm_process_displaced_insn: Instruction decode error"));
7689 }
7690
7691 /* Actually set up the scratch space for a displaced instruction. */
7692
7693 void
7694 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7695 CORE_ADDR to, struct displaced_step_closure *dsc)
7696 {
7697 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7698 unsigned int i, len, offset;
7699 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7700 int size = dsc->is_thumb? 2 : 4;
7701 const gdb_byte *bkp_insn;
7702
7703 offset = 0;
7704 /* Poke modified instruction(s). */
7705 for (i = 0; i < dsc->numinsns; i++)
7706 {
7707 if (debug_displaced)
7708 {
7709 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7710 if (size == 4)
7711 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7712 dsc->modinsn[i]);
7713 else if (size == 2)
7714 fprintf_unfiltered (gdb_stdlog, "%.4x",
7715 (unsigned short)dsc->modinsn[i]);
7716
7717 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7718 (unsigned long) to + offset);
7719
7720 }
7721 write_memory_unsigned_integer (to + offset, size,
7722 byte_order_for_code,
7723 dsc->modinsn[i]);
7724 offset += size;
7725 }
7726
7727 /* Choose the correct breakpoint instruction. */
7728 if (dsc->is_thumb)
7729 {
7730 bkp_insn = tdep->thumb_breakpoint;
7731 len = tdep->thumb_breakpoint_size;
7732 }
7733 else
7734 {
7735 bkp_insn = tdep->arm_breakpoint;
7736 len = tdep->arm_breakpoint_size;
7737 }
7738
7739 /* Put breakpoint afterwards. */
7740 write_memory (to + offset, bkp_insn, len);
7741
7742 if (debug_displaced)
7743 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7744 paddress (gdbarch, from), paddress (gdbarch, to));
7745 }
7746
7747 /* Entry point for cleaning things up after a displaced instruction has been
7748 single-stepped. */
7749
7750 void
7751 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7752 struct displaced_step_closure *dsc,
7753 CORE_ADDR from, CORE_ADDR to,
7754 struct regcache *regs)
7755 {
7756 if (dsc->cleanup)
7757 dsc->cleanup (gdbarch, regs, dsc);
7758
7759 if (!dsc->wrote_to_pc)
7760 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7761 dsc->insn_addr + dsc->insn_size);
7762
7763 }
7764
7765 #include "bfd-in2.h"
7766 #include "libcoff.h"
7767
7768 static int
7769 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7770 {
7771 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
7772
7773 if (arm_pc_is_thumb (gdbarch, memaddr))
7774 {
7775 static asymbol *asym;
7776 static combined_entry_type ce;
7777 static struct coff_symbol_struct csym;
7778 static struct bfd fake_bfd;
7779 static bfd_target fake_target;
7780
7781 if (csym.native == NULL)
7782 {
7783 /* Create a fake symbol vector containing a Thumb symbol.
7784 This is solely so that the code in print_insn_little_arm()
7785 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7786 the presence of a Thumb symbol and switch to decoding
7787 Thumb instructions. */
7788
7789 fake_target.flavour = bfd_target_coff_flavour;
7790 fake_bfd.xvec = &fake_target;
7791 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7792 csym.native = &ce;
7793 csym.symbol.the_bfd = &fake_bfd;
7794 csym.symbol.name = "fake";
7795 asym = (asymbol *) & csym;
7796 }
7797
7798 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7799 info->symbols = &asym;
7800 }
7801 else
7802 info->symbols = NULL;
7803
7804 if (info->endian == BFD_ENDIAN_BIG)
7805 return print_insn_big_arm (memaddr, info);
7806 else
7807 return print_insn_little_arm (memaddr, info);
7808 }
7809
7810 /* The following define instruction sequences that will cause ARM
7811 cpu's to take an undefined instruction trap. These are used to
7812 signal a breakpoint to GDB.
7813
7814 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7815 modes. A different instruction is required for each mode. The ARM
7816 cpu's can also be big or little endian. Thus four different
7817 instructions are needed to support all cases.
7818
7819 Note: ARMv4 defines several new instructions that will take the
7820 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7821 not in fact add the new instructions. The new undefined
7822 instructions in ARMv4 are all instructions that had no defined
7823 behaviour in earlier chips. There is no guarantee that they will
7824 raise an exception, but may be treated as NOP's. In practice, it
7825 may only safe to rely on instructions matching:
7826
7827 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7828 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7829 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7830
7831 Even this may only true if the condition predicate is true. The
7832 following use a condition predicate of ALWAYS so it is always TRUE.
7833
7834 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7835 and NetBSD all use a software interrupt rather than an undefined
7836 instruction to force a trap. This can be handled by by the
7837 abi-specific code during establishment of the gdbarch vector. */
7838
7839 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7840 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7841 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7842 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7843
7844 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7845 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7846 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7847 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7848
7849 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
7850 the program counter value to determine whether a 16-bit or 32-bit
7851 breakpoint should be used. It returns a pointer to a string of
7852 bytes that encode a breakpoint instruction, stores the length of
7853 the string to *lenptr, and adjusts the program counter (if
7854 necessary) to point to the actual memory location where the
7855 breakpoint should be inserted. */
7856
7857 static const unsigned char *
7858 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
7859 {
7860 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7861 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7862
7863 if (arm_pc_is_thumb (gdbarch, *pcptr))
7864 {
7865 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7866
7867 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7868 check whether we are replacing a 32-bit instruction. */
7869 if (tdep->thumb2_breakpoint != NULL)
7870 {
7871 gdb_byte buf[2];
7872 if (target_read_memory (*pcptr, buf, 2) == 0)
7873 {
7874 unsigned short inst1;
7875 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7876 if (thumb_insn_size (inst1) == 4)
7877 {
7878 *lenptr = tdep->thumb2_breakpoint_size;
7879 return tdep->thumb2_breakpoint;
7880 }
7881 }
7882 }
7883
7884 *lenptr = tdep->thumb_breakpoint_size;
7885 return tdep->thumb_breakpoint;
7886 }
7887 else
7888 {
7889 *lenptr = tdep->arm_breakpoint_size;
7890 return tdep->arm_breakpoint;
7891 }
7892 }
7893
7894 static void
7895 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
7896 int *kindptr)
7897 {
7898 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
7899
7900 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
7901 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
7902 that this is not confused with a 32-bit ARM breakpoint. */
7903 *kindptr = ARM_BP_KIND_THUMB2;
7904 }
7905
7906 /* Extract from an array REGBUF containing the (raw) register state a
7907 function return value of type TYPE, and copy that, in virtual
7908 format, into VALBUF. */
7909
7910 static void
7911 arm_extract_return_value (struct type *type, struct regcache *regs,
7912 gdb_byte *valbuf)
7913 {
7914 struct gdbarch *gdbarch = get_regcache_arch (regs);
7915 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7916
7917 if (TYPE_CODE_FLT == TYPE_CODE (type))
7918 {
7919 switch (gdbarch_tdep (gdbarch)->fp_model)
7920 {
7921 case ARM_FLOAT_FPA:
7922 {
7923 /* The value is in register F0 in internal format. We need to
7924 extract the raw value and then convert it to the desired
7925 internal type. */
7926 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7927
7928 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7929 convert_from_extended (floatformat_from_type (type), tmpbuf,
7930 valbuf, gdbarch_byte_order (gdbarch));
7931 }
7932 break;
7933
7934 case ARM_FLOAT_SOFT_FPA:
7935 case ARM_FLOAT_SOFT_VFP:
7936 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7937 not using the VFP ABI code. */
7938 case ARM_FLOAT_VFP:
7939 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7940 if (TYPE_LENGTH (type) > 4)
7941 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7942 valbuf + INT_REGISTER_SIZE);
7943 break;
7944
7945 default:
7946 internal_error (__FILE__, __LINE__,
7947 _("arm_extract_return_value: "
7948 "Floating point model not supported"));
7949 break;
7950 }
7951 }
7952 else if (TYPE_CODE (type) == TYPE_CODE_INT
7953 || TYPE_CODE (type) == TYPE_CODE_CHAR
7954 || TYPE_CODE (type) == TYPE_CODE_BOOL
7955 || TYPE_CODE (type) == TYPE_CODE_PTR
7956 || TYPE_CODE (type) == TYPE_CODE_REF
7957 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7958 {
7959 /* If the type is a plain integer, then the access is
7960 straight-forward. Otherwise we have to play around a bit
7961 more. */
7962 int len = TYPE_LENGTH (type);
7963 int regno = ARM_A1_REGNUM;
7964 ULONGEST tmp;
7965
7966 while (len > 0)
7967 {
7968 /* By using store_unsigned_integer we avoid having to do
7969 anything special for small big-endian values. */
7970 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7971 store_unsigned_integer (valbuf,
7972 (len > INT_REGISTER_SIZE
7973 ? INT_REGISTER_SIZE : len),
7974 byte_order, tmp);
7975 len -= INT_REGISTER_SIZE;
7976 valbuf += INT_REGISTER_SIZE;
7977 }
7978 }
7979 else
7980 {
7981 /* For a structure or union the behaviour is as if the value had
7982 been stored to word-aligned memory and then loaded into
7983 registers with 32-bit load instruction(s). */
7984 int len = TYPE_LENGTH (type);
7985 int regno = ARM_A1_REGNUM;
7986 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7987
7988 while (len > 0)
7989 {
7990 regcache_cooked_read (regs, regno++, tmpbuf);
7991 memcpy (valbuf, tmpbuf,
7992 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7993 len -= INT_REGISTER_SIZE;
7994 valbuf += INT_REGISTER_SIZE;
7995 }
7996 }
7997 }
7998
7999
8000 /* Will a function return an aggregate type in memory or in a
8001 register? Return 0 if an aggregate type can be returned in a
8002 register, 1 if it must be returned in memory. */
8003
8004 static int
8005 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8006 {
8007 enum type_code code;
8008
8009 type = check_typedef (type);
8010
8011 /* Simple, non-aggregate types (ie not including vectors and
8012 complex) are always returned in a register (or registers). */
8013 code = TYPE_CODE (type);
8014 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8015 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8016 return 0;
8017
8018 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
8019 {
8020 /* Vector values should be returned using ARM registers if they
8021 are not over 16 bytes. */
8022 return (TYPE_LENGTH (type) > 16);
8023 }
8024
8025 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
8026 {
8027 /* The AAPCS says all aggregates not larger than a word are returned
8028 in a register. */
8029 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
8030 return 0;
8031
8032 return 1;
8033 }
8034 else
8035 {
8036 int nRc;
8037
8038 /* All aggregate types that won't fit in a register must be returned
8039 in memory. */
8040 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8041 return 1;
8042
8043 /* In the ARM ABI, "integer" like aggregate types are returned in
8044 registers. For an aggregate type to be integer like, its size
8045 must be less than or equal to INT_REGISTER_SIZE and the
8046 offset of each addressable subfield must be zero. Note that bit
8047 fields are not addressable, and all addressable subfields of
8048 unions always start at offset zero.
8049
8050 This function is based on the behaviour of GCC 2.95.1.
8051 See: gcc/arm.c: arm_return_in_memory() for details.
8052
8053 Note: All versions of GCC before GCC 2.95.2 do not set up the
8054 parameters correctly for a function returning the following
8055 structure: struct { float f;}; This should be returned in memory,
8056 not a register. Richard Earnshaw sent me a patch, but I do not
8057 know of any way to detect if a function like the above has been
8058 compiled with the correct calling convention. */
8059
8060 /* Assume all other aggregate types can be returned in a register.
8061 Run a check for structures, unions and arrays. */
8062 nRc = 0;
8063
8064 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8065 {
8066 int i;
8067 /* Need to check if this struct/union is "integer" like. For
8068 this to be true, its size must be less than or equal to
8069 INT_REGISTER_SIZE and the offset of each addressable
8070 subfield must be zero. Note that bit fields are not
8071 addressable, and unions always start at offset zero. If any
8072 of the subfields is a floating point type, the struct/union
8073 cannot be an integer type. */
8074
8075 /* For each field in the object, check:
8076 1) Is it FP? --> yes, nRc = 1;
8077 2) Is it addressable (bitpos != 0) and
8078 not packed (bitsize == 0)?
8079 --> yes, nRc = 1
8080 */
8081
8082 for (i = 0; i < TYPE_NFIELDS (type); i++)
8083 {
8084 enum type_code field_type_code;
8085
8086 field_type_code
8087 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8088 i)));
8089
8090 /* Is it a floating point type field? */
8091 if (field_type_code == TYPE_CODE_FLT)
8092 {
8093 nRc = 1;
8094 break;
8095 }
8096
8097 /* If bitpos != 0, then we have to care about it. */
8098 if (TYPE_FIELD_BITPOS (type, i) != 0)
8099 {
8100 /* Bitfields are not addressable. If the field bitsize is
8101 zero, then the field is not packed. Hence it cannot be
8102 a bitfield or any other packed type. */
8103 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8104 {
8105 nRc = 1;
8106 break;
8107 }
8108 }
8109 }
8110 }
8111
8112 return nRc;
8113 }
8114 }
8115
8116 /* Write into appropriate registers a function return value of type
8117 TYPE, given in virtual format. */
8118
8119 static void
8120 arm_store_return_value (struct type *type, struct regcache *regs,
8121 const gdb_byte *valbuf)
8122 {
8123 struct gdbarch *gdbarch = get_regcache_arch (regs);
8124 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8125
8126 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8127 {
8128 gdb_byte buf[MAX_REGISTER_SIZE];
8129
8130 switch (gdbarch_tdep (gdbarch)->fp_model)
8131 {
8132 case ARM_FLOAT_FPA:
8133
8134 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8135 gdbarch_byte_order (gdbarch));
8136 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
8137 break;
8138
8139 case ARM_FLOAT_SOFT_FPA:
8140 case ARM_FLOAT_SOFT_VFP:
8141 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8142 not using the VFP ABI code. */
8143 case ARM_FLOAT_VFP:
8144 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8145 if (TYPE_LENGTH (type) > 4)
8146 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
8147 valbuf + INT_REGISTER_SIZE);
8148 break;
8149
8150 default:
8151 internal_error (__FILE__, __LINE__,
8152 _("arm_store_return_value: Floating "
8153 "point model not supported"));
8154 break;
8155 }
8156 }
8157 else if (TYPE_CODE (type) == TYPE_CODE_INT
8158 || TYPE_CODE (type) == TYPE_CODE_CHAR
8159 || TYPE_CODE (type) == TYPE_CODE_BOOL
8160 || TYPE_CODE (type) == TYPE_CODE_PTR
8161 || TYPE_CODE (type) == TYPE_CODE_REF
8162 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8163 {
8164 if (TYPE_LENGTH (type) <= 4)
8165 {
8166 /* Values of one word or less are zero/sign-extended and
8167 returned in r0. */
8168 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8169 LONGEST val = unpack_long (type, valbuf);
8170
8171 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8172 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8173 }
8174 else
8175 {
8176 /* Integral values greater than one word are stored in consecutive
8177 registers starting with r0. This will always be a multiple of
8178 the regiser size. */
8179 int len = TYPE_LENGTH (type);
8180 int regno = ARM_A1_REGNUM;
8181
8182 while (len > 0)
8183 {
8184 regcache_cooked_write (regs, regno++, valbuf);
8185 len -= INT_REGISTER_SIZE;
8186 valbuf += INT_REGISTER_SIZE;
8187 }
8188 }
8189 }
8190 else
8191 {
8192 /* For a structure or union the behaviour is as if the value had
8193 been stored to word-aligned memory and then loaded into
8194 registers with 32-bit load instruction(s). */
8195 int len = TYPE_LENGTH (type);
8196 int regno = ARM_A1_REGNUM;
8197 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8198
8199 while (len > 0)
8200 {
8201 memcpy (tmpbuf, valbuf,
8202 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8203 regcache_cooked_write (regs, regno++, tmpbuf);
8204 len -= INT_REGISTER_SIZE;
8205 valbuf += INT_REGISTER_SIZE;
8206 }
8207 }
8208 }
8209
8210
8211 /* Handle function return values. */
8212
8213 static enum return_value_convention
8214 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8215 struct type *valtype, struct regcache *regcache,
8216 gdb_byte *readbuf, const gdb_byte *writebuf)
8217 {
8218 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8219 struct type *func_type = function ? value_type (function) : NULL;
8220 enum arm_vfp_cprc_base_type vfp_base_type;
8221 int vfp_base_count;
8222
8223 if (arm_vfp_abi_for_function (gdbarch, func_type)
8224 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8225 {
8226 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8227 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8228 int i;
8229 for (i = 0; i < vfp_base_count; i++)
8230 {
8231 if (reg_char == 'q')
8232 {
8233 if (writebuf)
8234 arm_neon_quad_write (gdbarch, regcache, i,
8235 writebuf + i * unit_length);
8236
8237 if (readbuf)
8238 arm_neon_quad_read (gdbarch, regcache, i,
8239 readbuf + i * unit_length);
8240 }
8241 else
8242 {
8243 char name_buf[4];
8244 int regnum;
8245
8246 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8247 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8248 strlen (name_buf));
8249 if (writebuf)
8250 regcache_cooked_write (regcache, regnum,
8251 writebuf + i * unit_length);
8252 if (readbuf)
8253 regcache_cooked_read (regcache, regnum,
8254 readbuf + i * unit_length);
8255 }
8256 }
8257 return RETURN_VALUE_REGISTER_CONVENTION;
8258 }
8259
8260 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8261 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8262 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8263 {
8264 if (tdep->struct_return == pcc_struct_return
8265 || arm_return_in_memory (gdbarch, valtype))
8266 return RETURN_VALUE_STRUCT_CONVENTION;
8267 }
8268 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8269 {
8270 if (arm_return_in_memory (gdbarch, valtype))
8271 return RETURN_VALUE_STRUCT_CONVENTION;
8272 }
8273
8274 if (writebuf)
8275 arm_store_return_value (valtype, regcache, writebuf);
8276
8277 if (readbuf)
8278 arm_extract_return_value (valtype, regcache, readbuf);
8279
8280 return RETURN_VALUE_REGISTER_CONVENTION;
8281 }
8282
8283
8284 static int
8285 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8286 {
8287 struct gdbarch *gdbarch = get_frame_arch (frame);
8288 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8289 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8290 CORE_ADDR jb_addr;
8291 gdb_byte buf[INT_REGISTER_SIZE];
8292
8293 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8294
8295 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8296 INT_REGISTER_SIZE))
8297 return 0;
8298
8299 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8300 return 1;
8301 }
8302
8303 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8304 return the target PC. Otherwise return 0. */
8305
8306 CORE_ADDR
8307 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8308 {
8309 const char *name;
8310 int namelen;
8311 CORE_ADDR start_addr;
8312
8313 /* Find the starting address and name of the function containing the PC. */
8314 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8315 {
8316 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8317 check here. */
8318 start_addr = arm_skip_bx_reg (frame, pc);
8319 if (start_addr != 0)
8320 return start_addr;
8321
8322 return 0;
8323 }
8324
8325 /* If PC is in a Thumb call or return stub, return the address of the
8326 target PC, which is in a register. The thunk functions are called
8327 _call_via_xx, where x is the register name. The possible names
8328 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8329 functions, named __ARM_call_via_r[0-7]. */
8330 if (startswith (name, "_call_via_")
8331 || startswith (name, "__ARM_call_via_"))
8332 {
8333 /* Use the name suffix to determine which register contains the
8334 target PC. */
8335 static char *table[15] =
8336 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8337 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8338 };
8339 int regno;
8340 int offset = strlen (name) - 2;
8341
8342 for (regno = 0; regno <= 14; regno++)
8343 if (strcmp (&name[offset], table[regno]) == 0)
8344 return get_frame_register_unsigned (frame, regno);
8345 }
8346
8347 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8348 non-interworking calls to foo. We could decode the stubs
8349 to find the target but it's easier to use the symbol table. */
8350 namelen = strlen (name);
8351 if (name[0] == '_' && name[1] == '_'
8352 && ((namelen > 2 + strlen ("_from_thumb")
8353 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8354 || (namelen > 2 + strlen ("_from_arm")
8355 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8356 {
8357 char *target_name;
8358 int target_len = namelen - 2;
8359 struct bound_minimal_symbol minsym;
8360 struct objfile *objfile;
8361 struct obj_section *sec;
8362
8363 if (name[namelen - 1] == 'b')
8364 target_len -= strlen ("_from_thumb");
8365 else
8366 target_len -= strlen ("_from_arm");
8367
8368 target_name = (char *) alloca (target_len + 1);
8369 memcpy (target_name, name + 2, target_len);
8370 target_name[target_len] = '\0';
8371
8372 sec = find_pc_section (pc);
8373 objfile = (sec == NULL) ? NULL : sec->objfile;
8374 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8375 if (minsym.minsym != NULL)
8376 return BMSYMBOL_VALUE_ADDRESS (minsym);
8377 else
8378 return 0;
8379 }
8380
8381 return 0; /* not a stub */
8382 }
8383
8384 static void
8385 set_arm_command (char *args, int from_tty)
8386 {
8387 printf_unfiltered (_("\
8388 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8389 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8390 }
8391
8392 static void
8393 show_arm_command (char *args, int from_tty)
8394 {
8395 cmd_show_list (showarmcmdlist, from_tty, "");
8396 }
8397
8398 static void
8399 arm_update_current_architecture (void)
8400 {
8401 struct gdbarch_info info;
8402
8403 /* If the current architecture is not ARM, we have nothing to do. */
8404 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8405 return;
8406
8407 /* Update the architecture. */
8408 gdbarch_info_init (&info);
8409
8410 if (!gdbarch_update_p (info))
8411 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8412 }
8413
8414 static void
8415 set_fp_model_sfunc (char *args, int from_tty,
8416 struct cmd_list_element *c)
8417 {
8418 int fp_model;
8419
8420 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8421 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8422 {
8423 arm_fp_model = (enum arm_float_model) fp_model;
8424 break;
8425 }
8426
8427 if (fp_model == ARM_FLOAT_LAST)
8428 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8429 current_fp_model);
8430
8431 arm_update_current_architecture ();
8432 }
8433
8434 static void
8435 show_fp_model (struct ui_file *file, int from_tty,
8436 struct cmd_list_element *c, const char *value)
8437 {
8438 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8439
8440 if (arm_fp_model == ARM_FLOAT_AUTO
8441 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8442 fprintf_filtered (file, _("\
8443 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8444 fp_model_strings[tdep->fp_model]);
8445 else
8446 fprintf_filtered (file, _("\
8447 The current ARM floating point model is \"%s\".\n"),
8448 fp_model_strings[arm_fp_model]);
8449 }
8450
8451 static void
8452 arm_set_abi (char *args, int from_tty,
8453 struct cmd_list_element *c)
8454 {
8455 int arm_abi;
8456
8457 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8458 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8459 {
8460 arm_abi_global = (enum arm_abi_kind) arm_abi;
8461 break;
8462 }
8463
8464 if (arm_abi == ARM_ABI_LAST)
8465 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8466 arm_abi_string);
8467
8468 arm_update_current_architecture ();
8469 }
8470
8471 static void
8472 arm_show_abi (struct ui_file *file, int from_tty,
8473 struct cmd_list_element *c, const char *value)
8474 {
8475 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8476
8477 if (arm_abi_global == ARM_ABI_AUTO
8478 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8479 fprintf_filtered (file, _("\
8480 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8481 arm_abi_strings[tdep->arm_abi]);
8482 else
8483 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8484 arm_abi_string);
8485 }
8486
8487 static void
8488 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8489 struct cmd_list_element *c, const char *value)
8490 {
8491 fprintf_filtered (file,
8492 _("The current execution mode assumed "
8493 "(when symbols are unavailable) is \"%s\".\n"),
8494 arm_fallback_mode_string);
8495 }
8496
8497 static void
8498 arm_show_force_mode (struct ui_file *file, int from_tty,
8499 struct cmd_list_element *c, const char *value)
8500 {
8501 fprintf_filtered (file,
8502 _("The current execution mode assumed "
8503 "(even when symbols are available) is \"%s\".\n"),
8504 arm_force_mode_string);
8505 }
8506
8507 /* If the user changes the register disassembly style used for info
8508 register and other commands, we have to also switch the style used
8509 in opcodes for disassembly output. This function is run in the "set
8510 arm disassembly" command, and does that. */
8511
8512 static void
8513 set_disassembly_style_sfunc (char *args, int from_tty,
8514 struct cmd_list_element *c)
8515 {
8516 set_disassembly_style ();
8517 }
8518 \f
8519 /* Return the ARM register name corresponding to register I. */
8520 static const char *
8521 arm_register_name (struct gdbarch *gdbarch, int i)
8522 {
8523 const int num_regs = gdbarch_num_regs (gdbarch);
8524
8525 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8526 && i >= num_regs && i < num_regs + 32)
8527 {
8528 static const char *const vfp_pseudo_names[] = {
8529 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8530 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8531 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8532 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8533 };
8534
8535 return vfp_pseudo_names[i - num_regs];
8536 }
8537
8538 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8539 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8540 {
8541 static const char *const neon_pseudo_names[] = {
8542 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8543 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8544 };
8545
8546 return neon_pseudo_names[i - num_regs - 32];
8547 }
8548
8549 if (i >= ARRAY_SIZE (arm_register_names))
8550 /* These registers are only supported on targets which supply
8551 an XML description. */
8552 return "";
8553
8554 return arm_register_names[i];
8555 }
8556
8557 static void
8558 set_disassembly_style (void)
8559 {
8560 int current;
8561
8562 /* Find the style that the user wants. */
8563 for (current = 0; current < num_disassembly_options; current++)
8564 if (disassembly_style == valid_disassembly_styles[current])
8565 break;
8566 gdb_assert (current < num_disassembly_options);
8567
8568 /* Synchronize the disassembler. */
8569 set_arm_regname_option (current);
8570 }
8571
8572 /* Test whether the coff symbol specific value corresponds to a Thumb
8573 function. */
8574
8575 static int
8576 coff_sym_is_thumb (int val)
8577 {
8578 return (val == C_THUMBEXT
8579 || val == C_THUMBSTAT
8580 || val == C_THUMBEXTFUNC
8581 || val == C_THUMBSTATFUNC
8582 || val == C_THUMBLABEL);
8583 }
8584
8585 /* arm_coff_make_msymbol_special()
8586 arm_elf_make_msymbol_special()
8587
8588 These functions test whether the COFF or ELF symbol corresponds to
8589 an address in thumb code, and set a "special" bit in a minimal
8590 symbol to indicate that it does. */
8591
8592 static void
8593 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8594 {
8595 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8596
8597 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8598 == ST_BRANCH_TO_THUMB)
8599 MSYMBOL_SET_SPECIAL (msym);
8600 }
8601
8602 static void
8603 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8604 {
8605 if (coff_sym_is_thumb (val))
8606 MSYMBOL_SET_SPECIAL (msym);
8607 }
8608
8609 static void
8610 arm_objfile_data_free (struct objfile *objfile, void *arg)
8611 {
8612 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8613 unsigned int i;
8614
8615 for (i = 0; i < objfile->obfd->section_count; i++)
8616 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8617 }
8618
8619 static void
8620 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8621 asymbol *sym)
8622 {
8623 const char *name = bfd_asymbol_name (sym);
8624 struct arm_per_objfile *data;
8625 VEC(arm_mapping_symbol_s) **map_p;
8626 struct arm_mapping_symbol new_map_sym;
8627
8628 gdb_assert (name[0] == '$');
8629 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8630 return;
8631
8632 data = (struct arm_per_objfile *) objfile_data (objfile,
8633 arm_objfile_data_key);
8634 if (data == NULL)
8635 {
8636 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8637 struct arm_per_objfile);
8638 set_objfile_data (objfile, arm_objfile_data_key, data);
8639 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8640 objfile->obfd->section_count,
8641 VEC(arm_mapping_symbol_s) *);
8642 }
8643 map_p = &data->section_maps[bfd_get_section (sym)->index];
8644
8645 new_map_sym.value = sym->value;
8646 new_map_sym.type = name[1];
8647
8648 /* Assume that most mapping symbols appear in order of increasing
8649 value. If they were randomly distributed, it would be faster to
8650 always push here and then sort at first use. */
8651 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8652 {
8653 struct arm_mapping_symbol *prev_map_sym;
8654
8655 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8656 if (prev_map_sym->value >= sym->value)
8657 {
8658 unsigned int idx;
8659 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8660 arm_compare_mapping_symbols);
8661 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8662 return;
8663 }
8664 }
8665
8666 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8667 }
8668
8669 static void
8670 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8671 {
8672 struct gdbarch *gdbarch = get_regcache_arch (regcache);
8673 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8674
8675 /* If necessary, set the T bit. */
8676 if (arm_apcs_32)
8677 {
8678 ULONGEST val, t_bit;
8679 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8680 t_bit = arm_psr_thumb_bit (gdbarch);
8681 if (arm_pc_is_thumb (gdbarch, pc))
8682 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8683 val | t_bit);
8684 else
8685 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8686 val & ~t_bit);
8687 }
8688 }
8689
8690 /* Read the contents of a NEON quad register, by reading from two
8691 double registers. This is used to implement the quad pseudo
8692 registers, and for argument passing in case the quad registers are
8693 missing; vectors are passed in quad registers when using the VFP
8694 ABI, even if a NEON unit is not present. REGNUM is the index of
8695 the quad register, in [0, 15]. */
8696
8697 static enum register_status
8698 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8699 int regnum, gdb_byte *buf)
8700 {
8701 char name_buf[4];
8702 gdb_byte reg_buf[8];
8703 int offset, double_regnum;
8704 enum register_status status;
8705
8706 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8707 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8708 strlen (name_buf));
8709
8710 /* d0 is always the least significant half of q0. */
8711 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8712 offset = 8;
8713 else
8714 offset = 0;
8715
8716 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8717 if (status != REG_VALID)
8718 return status;
8719 memcpy (buf + offset, reg_buf, 8);
8720
8721 offset = 8 - offset;
8722 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8723 if (status != REG_VALID)
8724 return status;
8725 memcpy (buf + offset, reg_buf, 8);
8726
8727 return REG_VALID;
8728 }
8729
8730 static enum register_status
8731 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8732 int regnum, gdb_byte *buf)
8733 {
8734 const int num_regs = gdbarch_num_regs (gdbarch);
8735 char name_buf[4];
8736 gdb_byte reg_buf[8];
8737 int offset, double_regnum;
8738
8739 gdb_assert (regnum >= num_regs);
8740 regnum -= num_regs;
8741
8742 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8743 /* Quad-precision register. */
8744 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8745 else
8746 {
8747 enum register_status status;
8748
8749 /* Single-precision register. */
8750 gdb_assert (regnum < 32);
8751
8752 /* s0 is always the least significant half of d0. */
8753 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8754 offset = (regnum & 1) ? 0 : 4;
8755 else
8756 offset = (regnum & 1) ? 4 : 0;
8757
8758 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8759 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8760 strlen (name_buf));
8761
8762 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8763 if (status == REG_VALID)
8764 memcpy (buf, reg_buf + offset, 4);
8765 return status;
8766 }
8767 }
8768
8769 /* Store the contents of BUF to a NEON quad register, by writing to
8770 two double registers. This is used to implement the quad pseudo
8771 registers, and for argument passing in case the quad registers are
8772 missing; vectors are passed in quad registers when using the VFP
8773 ABI, even if a NEON unit is not present. REGNUM is the index
8774 of the quad register, in [0, 15]. */
8775
8776 static void
8777 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8778 int regnum, const gdb_byte *buf)
8779 {
8780 char name_buf[4];
8781 int offset, double_regnum;
8782
8783 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8784 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8785 strlen (name_buf));
8786
8787 /* d0 is always the least significant half of q0. */
8788 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8789 offset = 8;
8790 else
8791 offset = 0;
8792
8793 regcache_raw_write (regcache, double_regnum, buf + offset);
8794 offset = 8 - offset;
8795 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8796 }
8797
8798 static void
8799 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8800 int regnum, const gdb_byte *buf)
8801 {
8802 const int num_regs = gdbarch_num_regs (gdbarch);
8803 char name_buf[4];
8804 gdb_byte reg_buf[8];
8805 int offset, double_regnum;
8806
8807 gdb_assert (regnum >= num_regs);
8808 regnum -= num_regs;
8809
8810 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8811 /* Quad-precision register. */
8812 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8813 else
8814 {
8815 /* Single-precision register. */
8816 gdb_assert (regnum < 32);
8817
8818 /* s0 is always the least significant half of d0. */
8819 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8820 offset = (regnum & 1) ? 0 : 4;
8821 else
8822 offset = (regnum & 1) ? 4 : 0;
8823
8824 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8825 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8826 strlen (name_buf));
8827
8828 regcache_raw_read (regcache, double_regnum, reg_buf);
8829 memcpy (reg_buf + offset, buf, 4);
8830 regcache_raw_write (regcache, double_regnum, reg_buf);
8831 }
8832 }
8833
8834 static struct value *
8835 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8836 {
8837 const int *reg_p = (const int *) baton;
8838 return value_of_register (*reg_p, frame);
8839 }
8840 \f
8841 static enum gdb_osabi
8842 arm_elf_osabi_sniffer (bfd *abfd)
8843 {
8844 unsigned int elfosabi;
8845 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8846
8847 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8848
8849 if (elfosabi == ELFOSABI_ARM)
8850 /* GNU tools use this value. Check note sections in this case,
8851 as well. */
8852 bfd_map_over_sections (abfd,
8853 generic_elf_osabi_sniff_abi_tag_sections,
8854 &osabi);
8855
8856 /* Anything else will be handled by the generic ELF sniffer. */
8857 return osabi;
8858 }
8859
8860 static int
8861 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8862 struct reggroup *group)
8863 {
8864 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8865 this, FPS register belongs to save_regroup, restore_reggroup, and
8866 all_reggroup, of course. */
8867 if (regnum == ARM_FPS_REGNUM)
8868 return (group == float_reggroup
8869 || group == save_reggroup
8870 || group == restore_reggroup
8871 || group == all_reggroup);
8872 else
8873 return default_register_reggroup_p (gdbarch, regnum, group);
8874 }
8875
8876 \f
8877 /* For backward-compatibility we allow two 'g' packet lengths with
8878 the remote protocol depending on whether FPA registers are
8879 supplied. M-profile targets do not have FPA registers, but some
8880 stubs already exist in the wild which use a 'g' packet which
8881 supplies them albeit with dummy values. The packet format which
8882 includes FPA registers should be considered deprecated for
8883 M-profile targets. */
8884
8885 static void
8886 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8887 {
8888 if (gdbarch_tdep (gdbarch)->is_m)
8889 {
8890 /* If we know from the executable this is an M-profile target,
8891 cater for remote targets whose register set layout is the
8892 same as the FPA layout. */
8893 register_remote_g_packet_guess (gdbarch,
8894 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8895 (16 * INT_REGISTER_SIZE)
8896 + (8 * FP_REGISTER_SIZE)
8897 + (2 * INT_REGISTER_SIZE),
8898 tdesc_arm_with_m_fpa_layout);
8899
8900 /* The regular M-profile layout. */
8901 register_remote_g_packet_guess (gdbarch,
8902 /* r0-r12,sp,lr,pc; xpsr */
8903 (16 * INT_REGISTER_SIZE)
8904 + INT_REGISTER_SIZE,
8905 tdesc_arm_with_m);
8906
8907 /* M-profile plus M4F VFP. */
8908 register_remote_g_packet_guess (gdbarch,
8909 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8910 (16 * INT_REGISTER_SIZE)
8911 + (16 * VFP_REGISTER_SIZE)
8912 + (2 * INT_REGISTER_SIZE),
8913 tdesc_arm_with_m_vfp_d16);
8914 }
8915
8916 /* Otherwise we don't have a useful guess. */
8917 }
8918
8919 /* Implement the code_of_frame_writable gdbarch method. */
8920
8921 static int
8922 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8923 {
8924 if (gdbarch_tdep (gdbarch)->is_m
8925 && get_frame_type (frame) == SIGTRAMP_FRAME)
8926 {
8927 /* M-profile exception frames return to some magic PCs, where
8928 isn't writable at all. */
8929 return 0;
8930 }
8931 else
8932 return 1;
8933 }
8934
8935 \f
8936 /* Initialize the current architecture based on INFO. If possible,
8937 re-use an architecture from ARCHES, which is a list of
8938 architectures already created during this debugging session.
8939
8940 Called e.g. at program startup, when reading a core file, and when
8941 reading a binary file. */
8942
8943 static struct gdbarch *
8944 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8945 {
8946 struct gdbarch_tdep *tdep;
8947 struct gdbarch *gdbarch;
8948 struct gdbarch_list *best_arch;
8949 enum arm_abi_kind arm_abi = arm_abi_global;
8950 enum arm_float_model fp_model = arm_fp_model;
8951 struct tdesc_arch_data *tdesc_data = NULL;
8952 int i, is_m = 0;
8953 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8954 int have_wmmx_registers = 0;
8955 int have_neon = 0;
8956 int have_fpa_registers = 1;
8957 const struct target_desc *tdesc = info.target_desc;
8958
8959 /* If we have an object to base this architecture on, try to determine
8960 its ABI. */
8961
8962 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8963 {
8964 int ei_osabi, e_flags;
8965
8966 switch (bfd_get_flavour (info.abfd))
8967 {
8968 case bfd_target_aout_flavour:
8969 /* Assume it's an old APCS-style ABI. */
8970 arm_abi = ARM_ABI_APCS;
8971 break;
8972
8973 case bfd_target_coff_flavour:
8974 /* Assume it's an old APCS-style ABI. */
8975 /* XXX WinCE? */
8976 arm_abi = ARM_ABI_APCS;
8977 break;
8978
8979 case bfd_target_elf_flavour:
8980 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8981 e_flags = elf_elfheader (info.abfd)->e_flags;
8982
8983 if (ei_osabi == ELFOSABI_ARM)
8984 {
8985 /* GNU tools used to use this value, but do not for EABI
8986 objects. There's nowhere to tag an EABI version
8987 anyway, so assume APCS. */
8988 arm_abi = ARM_ABI_APCS;
8989 }
8990 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8991 {
8992 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8993 int attr_arch, attr_profile;
8994
8995 switch (eabi_ver)
8996 {
8997 case EF_ARM_EABI_UNKNOWN:
8998 /* Assume GNU tools. */
8999 arm_abi = ARM_ABI_APCS;
9000 break;
9001
9002 case EF_ARM_EABI_VER4:
9003 case EF_ARM_EABI_VER5:
9004 arm_abi = ARM_ABI_AAPCS;
9005 /* EABI binaries default to VFP float ordering.
9006 They may also contain build attributes that can
9007 be used to identify if the VFP argument-passing
9008 ABI is in use. */
9009 if (fp_model == ARM_FLOAT_AUTO)
9010 {
9011 #ifdef HAVE_ELF
9012 switch (bfd_elf_get_obj_attr_int (info.abfd,
9013 OBJ_ATTR_PROC,
9014 Tag_ABI_VFP_args))
9015 {
9016 case AEABI_VFP_args_base:
9017 /* "The user intended FP parameter/result
9018 passing to conform to AAPCS, base
9019 variant". */
9020 fp_model = ARM_FLOAT_SOFT_VFP;
9021 break;
9022 case AEABI_VFP_args_vfp:
9023 /* "The user intended FP parameter/result
9024 passing to conform to AAPCS, VFP
9025 variant". */
9026 fp_model = ARM_FLOAT_VFP;
9027 break;
9028 case AEABI_VFP_args_toolchain:
9029 /* "The user intended FP parameter/result
9030 passing to conform to tool chain-specific
9031 conventions" - we don't know any such
9032 conventions, so leave it as "auto". */
9033 break;
9034 case AEABI_VFP_args_compatible:
9035 /* "Code is compatible with both the base
9036 and VFP variants; the user did not permit
9037 non-variadic functions to pass FP
9038 parameters/results" - leave it as
9039 "auto". */
9040 break;
9041 default:
9042 /* Attribute value not mentioned in the
9043 November 2012 ABI, so leave it as
9044 "auto". */
9045 break;
9046 }
9047 #else
9048 fp_model = ARM_FLOAT_SOFT_VFP;
9049 #endif
9050 }
9051 break;
9052
9053 default:
9054 /* Leave it as "auto". */
9055 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9056 break;
9057 }
9058
9059 #ifdef HAVE_ELF
9060 /* Detect M-profile programs. This only works if the
9061 executable file includes build attributes; GCC does
9062 copy them to the executable, but e.g. RealView does
9063 not. */
9064 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9065 Tag_CPU_arch);
9066 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9067 OBJ_ATTR_PROC,
9068 Tag_CPU_arch_profile);
9069 /* GCC specifies the profile for v6-M; RealView only
9070 specifies the profile for architectures starting with
9071 V7 (as opposed to architectures with a tag
9072 numerically greater than TAG_CPU_ARCH_V7). */
9073 if (!tdesc_has_registers (tdesc)
9074 && (attr_arch == TAG_CPU_ARCH_V6_M
9075 || attr_arch == TAG_CPU_ARCH_V6S_M
9076 || attr_profile == 'M'))
9077 is_m = 1;
9078 #endif
9079 }
9080
9081 if (fp_model == ARM_FLOAT_AUTO)
9082 {
9083 int e_flags = elf_elfheader (info.abfd)->e_flags;
9084
9085 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9086 {
9087 case 0:
9088 /* Leave it as "auto". Strictly speaking this case
9089 means FPA, but almost nobody uses that now, and
9090 many toolchains fail to set the appropriate bits
9091 for the floating-point model they use. */
9092 break;
9093 case EF_ARM_SOFT_FLOAT:
9094 fp_model = ARM_FLOAT_SOFT_FPA;
9095 break;
9096 case EF_ARM_VFP_FLOAT:
9097 fp_model = ARM_FLOAT_VFP;
9098 break;
9099 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9100 fp_model = ARM_FLOAT_SOFT_VFP;
9101 break;
9102 }
9103 }
9104
9105 if (e_flags & EF_ARM_BE8)
9106 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9107
9108 break;
9109
9110 default:
9111 /* Leave it as "auto". */
9112 break;
9113 }
9114 }
9115
9116 /* Check any target description for validity. */
9117 if (tdesc_has_registers (tdesc))
9118 {
9119 /* For most registers we require GDB's default names; but also allow
9120 the numeric names for sp / lr / pc, as a convenience. */
9121 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9122 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9123 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9124
9125 const struct tdesc_feature *feature;
9126 int valid_p;
9127
9128 feature = tdesc_find_feature (tdesc,
9129 "org.gnu.gdb.arm.core");
9130 if (feature == NULL)
9131 {
9132 feature = tdesc_find_feature (tdesc,
9133 "org.gnu.gdb.arm.m-profile");
9134 if (feature == NULL)
9135 return NULL;
9136 else
9137 is_m = 1;
9138 }
9139
9140 tdesc_data = tdesc_data_alloc ();
9141
9142 valid_p = 1;
9143 for (i = 0; i < ARM_SP_REGNUM; i++)
9144 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9145 arm_register_names[i]);
9146 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9147 ARM_SP_REGNUM,
9148 arm_sp_names);
9149 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9150 ARM_LR_REGNUM,
9151 arm_lr_names);
9152 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9153 ARM_PC_REGNUM,
9154 arm_pc_names);
9155 if (is_m)
9156 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9157 ARM_PS_REGNUM, "xpsr");
9158 else
9159 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9160 ARM_PS_REGNUM, "cpsr");
9161
9162 if (!valid_p)
9163 {
9164 tdesc_data_cleanup (tdesc_data);
9165 return NULL;
9166 }
9167
9168 feature = tdesc_find_feature (tdesc,
9169 "org.gnu.gdb.arm.fpa");
9170 if (feature != NULL)
9171 {
9172 valid_p = 1;
9173 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9174 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9175 arm_register_names[i]);
9176 if (!valid_p)
9177 {
9178 tdesc_data_cleanup (tdesc_data);
9179 return NULL;
9180 }
9181 }
9182 else
9183 have_fpa_registers = 0;
9184
9185 feature = tdesc_find_feature (tdesc,
9186 "org.gnu.gdb.xscale.iwmmxt");
9187 if (feature != NULL)
9188 {
9189 static const char *const iwmmxt_names[] = {
9190 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9191 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9192 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9193 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9194 };
9195
9196 valid_p = 1;
9197 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9198 valid_p
9199 &= tdesc_numbered_register (feature, tdesc_data, i,
9200 iwmmxt_names[i - ARM_WR0_REGNUM]);
9201
9202 /* Check for the control registers, but do not fail if they
9203 are missing. */
9204 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9205 tdesc_numbered_register (feature, tdesc_data, i,
9206 iwmmxt_names[i - ARM_WR0_REGNUM]);
9207
9208 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9209 valid_p
9210 &= tdesc_numbered_register (feature, tdesc_data, i,
9211 iwmmxt_names[i - ARM_WR0_REGNUM]);
9212
9213 if (!valid_p)
9214 {
9215 tdesc_data_cleanup (tdesc_data);
9216 return NULL;
9217 }
9218
9219 have_wmmx_registers = 1;
9220 }
9221
9222 /* If we have a VFP unit, check whether the single precision registers
9223 are present. If not, then we will synthesize them as pseudo
9224 registers. */
9225 feature = tdesc_find_feature (tdesc,
9226 "org.gnu.gdb.arm.vfp");
9227 if (feature != NULL)
9228 {
9229 static const char *const vfp_double_names[] = {
9230 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9231 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9232 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9233 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9234 };
9235
9236 /* Require the double precision registers. There must be either
9237 16 or 32. */
9238 valid_p = 1;
9239 for (i = 0; i < 32; i++)
9240 {
9241 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9242 ARM_D0_REGNUM + i,
9243 vfp_double_names[i]);
9244 if (!valid_p)
9245 break;
9246 }
9247 if (!valid_p && i == 16)
9248 valid_p = 1;
9249
9250 /* Also require FPSCR. */
9251 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9252 ARM_FPSCR_REGNUM, "fpscr");
9253 if (!valid_p)
9254 {
9255 tdesc_data_cleanup (tdesc_data);
9256 return NULL;
9257 }
9258
9259 if (tdesc_unnumbered_register (feature, "s0") == 0)
9260 have_vfp_pseudos = 1;
9261
9262 vfp_register_count = i;
9263
9264 /* If we have VFP, also check for NEON. The architecture allows
9265 NEON without VFP (integer vector operations only), but GDB
9266 does not support that. */
9267 feature = tdesc_find_feature (tdesc,
9268 "org.gnu.gdb.arm.neon");
9269 if (feature != NULL)
9270 {
9271 /* NEON requires 32 double-precision registers. */
9272 if (i != 32)
9273 {
9274 tdesc_data_cleanup (tdesc_data);
9275 return NULL;
9276 }
9277
9278 /* If there are quad registers defined by the stub, use
9279 their type; otherwise (normally) provide them with
9280 the default type. */
9281 if (tdesc_unnumbered_register (feature, "q0") == 0)
9282 have_neon_pseudos = 1;
9283
9284 have_neon = 1;
9285 }
9286 }
9287 }
9288
9289 /* If there is already a candidate, use it. */
9290 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9291 best_arch != NULL;
9292 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9293 {
9294 if (arm_abi != ARM_ABI_AUTO
9295 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9296 continue;
9297
9298 if (fp_model != ARM_FLOAT_AUTO
9299 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9300 continue;
9301
9302 /* There are various other properties in tdep that we do not
9303 need to check here: those derived from a target description,
9304 since gdbarches with a different target description are
9305 automatically disqualified. */
9306
9307 /* Do check is_m, though, since it might come from the binary. */
9308 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9309 continue;
9310
9311 /* Found a match. */
9312 break;
9313 }
9314
9315 if (best_arch != NULL)
9316 {
9317 if (tdesc_data != NULL)
9318 tdesc_data_cleanup (tdesc_data);
9319 return best_arch->gdbarch;
9320 }
9321
9322 tdep = XCNEW (struct gdbarch_tdep);
9323 gdbarch = gdbarch_alloc (&info, tdep);
9324
9325 /* Record additional information about the architecture we are defining.
9326 These are gdbarch discriminators, like the OSABI. */
9327 tdep->arm_abi = arm_abi;
9328 tdep->fp_model = fp_model;
9329 tdep->is_m = is_m;
9330 tdep->have_fpa_registers = have_fpa_registers;
9331 tdep->have_wmmx_registers = have_wmmx_registers;
9332 gdb_assert (vfp_register_count == 0
9333 || vfp_register_count == 16
9334 || vfp_register_count == 32);
9335 tdep->vfp_register_count = vfp_register_count;
9336 tdep->have_vfp_pseudos = have_vfp_pseudos;
9337 tdep->have_neon_pseudos = have_neon_pseudos;
9338 tdep->have_neon = have_neon;
9339
9340 arm_register_g_packet_guesses (gdbarch);
9341
9342 /* Breakpoints. */
9343 switch (info.byte_order_for_code)
9344 {
9345 case BFD_ENDIAN_BIG:
9346 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9347 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9348 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9349 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9350
9351 break;
9352
9353 case BFD_ENDIAN_LITTLE:
9354 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9355 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9356 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9357 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9358
9359 break;
9360
9361 default:
9362 internal_error (__FILE__, __LINE__,
9363 _("arm_gdbarch_init: bad byte order for float format"));
9364 }
9365
9366 /* On ARM targets char defaults to unsigned. */
9367 set_gdbarch_char_signed (gdbarch, 0);
9368
9369 /* Note: for displaced stepping, this includes the breakpoint, and one word
9370 of additional scratch space. This setting isn't used for anything beside
9371 displaced stepping at present. */
9372 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9373
9374 /* This should be low enough for everything. */
9375 tdep->lowest_pc = 0x20;
9376 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9377
9378 /* The default, for both APCS and AAPCS, is to return small
9379 structures in registers. */
9380 tdep->struct_return = reg_struct_return;
9381
9382 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9383 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9384
9385 if (is_m)
9386 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9387
9388 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9389
9390 /* Frame handling. */
9391 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
9392 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9393 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9394
9395 frame_base_set_default (gdbarch, &arm_normal_base);
9396
9397 /* Address manipulation. */
9398 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9399
9400 /* Advance PC across function entry code. */
9401 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9402
9403 /* Detect whether PC is at a point where the stack has been destroyed. */
9404 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9405
9406 /* Skip trampolines. */
9407 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9408
9409 /* The stack grows downward. */
9410 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9411
9412 /* Breakpoint manipulation. */
9413 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
9414 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
9415 arm_remote_breakpoint_from_pc);
9416
9417 /* Information about registers, etc. */
9418 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9419 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9420 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9421 set_gdbarch_register_type (gdbarch, arm_register_type);
9422 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9423
9424 /* This "info float" is FPA-specific. Use the generic version if we
9425 do not have FPA. */
9426 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9427 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9428
9429 /* Internal <-> external register number maps. */
9430 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9431 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9432
9433 set_gdbarch_register_name (gdbarch, arm_register_name);
9434
9435 /* Returning results. */
9436 set_gdbarch_return_value (gdbarch, arm_return_value);
9437
9438 /* Disassembly. */
9439 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9440
9441 /* Minsymbol frobbing. */
9442 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9443 set_gdbarch_coff_make_msymbol_special (gdbarch,
9444 arm_coff_make_msymbol_special);
9445 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9446
9447 /* Thumb-2 IT block support. */
9448 set_gdbarch_adjust_breakpoint_address (gdbarch,
9449 arm_adjust_breakpoint_address);
9450
9451 /* Virtual tables. */
9452 set_gdbarch_vbit_in_delta (gdbarch, 1);
9453
9454 /* Hook in the ABI-specific overrides, if they have been registered. */
9455 gdbarch_init_osabi (info, gdbarch);
9456
9457 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9458
9459 /* Add some default predicates. */
9460 if (is_m)
9461 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9462 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9463 dwarf2_append_unwinders (gdbarch);
9464 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9465 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9466 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9467
9468 /* Now we have tuned the configuration, set a few final things,
9469 based on what the OS ABI has told us. */
9470
9471 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9472 binaries are always marked. */
9473 if (tdep->arm_abi == ARM_ABI_AUTO)
9474 tdep->arm_abi = ARM_ABI_APCS;
9475
9476 /* Watchpoints are not steppable. */
9477 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9478
9479 /* We used to default to FPA for generic ARM, but almost nobody
9480 uses that now, and we now provide a way for the user to force
9481 the model. So default to the most useful variant. */
9482 if (tdep->fp_model == ARM_FLOAT_AUTO)
9483 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9484
9485 if (tdep->jb_pc >= 0)
9486 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9487
9488 /* Floating point sizes and format. */
9489 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9490 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9491 {
9492 set_gdbarch_double_format
9493 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9494 set_gdbarch_long_double_format
9495 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9496 }
9497 else
9498 {
9499 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9500 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9501 }
9502
9503 if (have_vfp_pseudos)
9504 {
9505 /* NOTE: These are the only pseudo registers used by
9506 the ARM target at the moment. If more are added, a
9507 little more care in numbering will be needed. */
9508
9509 int num_pseudos = 32;
9510 if (have_neon_pseudos)
9511 num_pseudos += 16;
9512 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9513 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9514 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9515 }
9516
9517 if (tdesc_data)
9518 {
9519 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9520
9521 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9522
9523 /* Override tdesc_register_type to adjust the types of VFP
9524 registers for NEON. */
9525 set_gdbarch_register_type (gdbarch, arm_register_type);
9526 }
9527
9528 /* Add standard register aliases. We add aliases even for those
9529 nanes which are used by the current architecture - it's simpler,
9530 and does no harm, since nothing ever lists user registers. */
9531 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9532 user_reg_add (gdbarch, arm_register_aliases[i].name,
9533 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9534
9535 return gdbarch;
9536 }
9537
9538 static void
9539 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9540 {
9541 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9542
9543 if (tdep == NULL)
9544 return;
9545
9546 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9547 (unsigned long) tdep->lowest_pc);
9548 }
9549
9550 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9551
9552 void
9553 _initialize_arm_tdep (void)
9554 {
9555 struct ui_file *stb;
9556 long length;
9557 const char *setname;
9558 const char *setdesc;
9559 const char *const *regnames;
9560 int i;
9561 static char *helptext;
9562 char regdesc[1024], *rdptr = regdesc;
9563 size_t rest = sizeof (regdesc);
9564
9565 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9566
9567 arm_objfile_data_key
9568 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9569
9570 /* Add ourselves to objfile event chain. */
9571 observer_attach_new_objfile (arm_exidx_new_objfile);
9572 arm_exidx_data_key
9573 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9574
9575 /* Register an ELF OS ABI sniffer for ARM binaries. */
9576 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9577 bfd_target_elf_flavour,
9578 arm_elf_osabi_sniffer);
9579
9580 /* Initialize the standard target descriptions. */
9581 initialize_tdesc_arm_with_m ();
9582 initialize_tdesc_arm_with_m_fpa_layout ();
9583 initialize_tdesc_arm_with_m_vfp_d16 ();
9584 initialize_tdesc_arm_with_iwmmxt ();
9585 initialize_tdesc_arm_with_vfpv2 ();
9586 initialize_tdesc_arm_with_vfpv3 ();
9587 initialize_tdesc_arm_with_neon ();
9588
9589 /* Get the number of possible sets of register names defined in opcodes. */
9590 num_disassembly_options = get_arm_regname_num_options ();
9591
9592 /* Add root prefix command for all "set arm"/"show arm" commands. */
9593 add_prefix_cmd ("arm", no_class, set_arm_command,
9594 _("Various ARM-specific commands."),
9595 &setarmcmdlist, "set arm ", 0, &setlist);
9596
9597 add_prefix_cmd ("arm", no_class, show_arm_command,
9598 _("Various ARM-specific commands."),
9599 &showarmcmdlist, "show arm ", 0, &showlist);
9600
9601 /* Sync the opcode insn printer with our register viewer. */
9602 parse_arm_disassembler_option ("reg-names-std");
9603
9604 /* Initialize the array that will be passed to
9605 add_setshow_enum_cmd(). */
9606 valid_disassembly_styles = XNEWVEC (const char *,
9607 num_disassembly_options + 1);
9608 for (i = 0; i < num_disassembly_options; i++)
9609 {
9610 get_arm_regnames (i, &setname, &setdesc, &regnames);
9611 valid_disassembly_styles[i] = setname;
9612 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
9613 rdptr += length;
9614 rest -= length;
9615 /* When we find the default names, tell the disassembler to use
9616 them. */
9617 if (!strcmp (setname, "std"))
9618 {
9619 disassembly_style = setname;
9620 set_arm_regname_option (i);
9621 }
9622 }
9623 /* Mark the end of valid options. */
9624 valid_disassembly_styles[num_disassembly_options] = NULL;
9625
9626 /* Create the help text. */
9627 stb = mem_fileopen ();
9628 fprintf_unfiltered (stb, "%s%s%s",
9629 _("The valid values are:\n"),
9630 regdesc,
9631 _("The default is \"std\"."));
9632 helptext = ui_file_xstrdup (stb, NULL);
9633 ui_file_delete (stb);
9634
9635 add_setshow_enum_cmd("disassembler", no_class,
9636 valid_disassembly_styles, &disassembly_style,
9637 _("Set the disassembly style."),
9638 _("Show the disassembly style."),
9639 helptext,
9640 set_disassembly_style_sfunc,
9641 NULL, /* FIXME: i18n: The disassembly style is
9642 \"%s\". */
9643 &setarmcmdlist, &showarmcmdlist);
9644
9645 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9646 _("Set usage of ARM 32-bit mode."),
9647 _("Show usage of ARM 32-bit mode."),
9648 _("When off, a 26-bit PC will be used."),
9649 NULL,
9650 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9651 mode is %s. */
9652 &setarmcmdlist, &showarmcmdlist);
9653
9654 /* Add a command to allow the user to force the FPU model. */
9655 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9656 _("Set the floating point type."),
9657 _("Show the floating point type."),
9658 _("auto - Determine the FP typefrom the OS-ABI.\n\
9659 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9660 fpa - FPA co-processor (GCC compiled).\n\
9661 softvfp - Software FP with pure-endian doubles.\n\
9662 vfp - VFP co-processor."),
9663 set_fp_model_sfunc, show_fp_model,
9664 &setarmcmdlist, &showarmcmdlist);
9665
9666 /* Add a command to allow the user to force the ABI. */
9667 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9668 _("Set the ABI."),
9669 _("Show the ABI."),
9670 NULL, arm_set_abi, arm_show_abi,
9671 &setarmcmdlist, &showarmcmdlist);
9672
9673 /* Add two commands to allow the user to force the assumed
9674 execution mode. */
9675 add_setshow_enum_cmd ("fallback-mode", class_support,
9676 arm_mode_strings, &arm_fallback_mode_string,
9677 _("Set the mode assumed when symbols are unavailable."),
9678 _("Show the mode assumed when symbols are unavailable."),
9679 NULL, NULL, arm_show_fallback_mode,
9680 &setarmcmdlist, &showarmcmdlist);
9681 add_setshow_enum_cmd ("force-mode", class_support,
9682 arm_mode_strings, &arm_force_mode_string,
9683 _("Set the mode assumed even when symbols are available."),
9684 _("Show the mode assumed even when symbols are available."),
9685 NULL, NULL, arm_show_force_mode,
9686 &setarmcmdlist, &showarmcmdlist);
9687
9688 /* Debugging flag. */
9689 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9690 _("Set ARM debugging."),
9691 _("Show ARM debugging."),
9692 _("When on, arm-specific debugging is enabled."),
9693 NULL,
9694 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9695 &setdebuglist, &showdebuglist);
9696 }
9697
9698 /* ARM-reversible process record data structures. */
9699
9700 #define ARM_INSN_SIZE_BYTES 4
9701 #define THUMB_INSN_SIZE_BYTES 2
9702 #define THUMB2_INSN_SIZE_BYTES 4
9703
9704
9705 /* Position of the bit within a 32-bit ARM instruction
9706 that defines whether the instruction is a load or store. */
9707 #define INSN_S_L_BIT_NUM 20
9708
9709 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9710 do \
9711 { \
9712 unsigned int reg_len = LENGTH; \
9713 if (reg_len) \
9714 { \
9715 REGS = XNEWVEC (uint32_t, reg_len); \
9716 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9717 } \
9718 } \
9719 while (0)
9720
9721 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9722 do \
9723 { \
9724 unsigned int mem_len = LENGTH; \
9725 if (mem_len) \
9726 { \
9727 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9728 memcpy(&MEMS->len, &RECORD_BUF[0], \
9729 sizeof(struct arm_mem_r) * LENGTH); \
9730 } \
9731 } \
9732 while (0)
9733
9734 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9735 #define INSN_RECORDED(ARM_RECORD) \
9736 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9737
9738 /* ARM memory record structure. */
9739 struct arm_mem_r
9740 {
9741 uint32_t len; /* Record length. */
9742 uint32_t addr; /* Memory address. */
9743 };
9744
9745 /* ARM instruction record contains opcode of current insn
9746 and execution state (before entry to decode_insn()),
9747 contains list of to-be-modified registers and
9748 memory blocks (on return from decode_insn()). */
9749
9750 typedef struct insn_decode_record_t
9751 {
9752 struct gdbarch *gdbarch;
9753 struct regcache *regcache;
9754 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9755 uint32_t arm_insn; /* Should accommodate thumb. */
9756 uint32_t cond; /* Condition code. */
9757 uint32_t opcode; /* Insn opcode. */
9758 uint32_t decode; /* Insn decode bits. */
9759 uint32_t mem_rec_count; /* No of mem records. */
9760 uint32_t reg_rec_count; /* No of reg records. */
9761 uint32_t *arm_regs; /* Registers to be saved for this record. */
9762 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9763 } insn_decode_record;
9764
9765
9766 /* Checks ARM SBZ and SBO mandatory fields. */
9767
9768 static int
9769 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9770 {
9771 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9772
9773 if (!len)
9774 return 1;
9775
9776 if (!sbo)
9777 ones = ~ones;
9778
9779 while (ones)
9780 {
9781 if (!(ones & sbo))
9782 {
9783 return 0;
9784 }
9785 ones = ones >> 1;
9786 }
9787 return 1;
9788 }
9789
9790 enum arm_record_result
9791 {
9792 ARM_RECORD_SUCCESS = 0,
9793 ARM_RECORD_FAILURE = 1
9794 };
9795
9796 typedef enum
9797 {
9798 ARM_RECORD_STRH=1,
9799 ARM_RECORD_STRD
9800 } arm_record_strx_t;
9801
9802 typedef enum
9803 {
9804 ARM_RECORD=1,
9805 THUMB_RECORD,
9806 THUMB2_RECORD
9807 } record_type_t;
9808
9809
9810 static int
9811 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9812 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9813 {
9814
9815 struct regcache *reg_cache = arm_insn_r->regcache;
9816 ULONGEST u_regval[2]= {0};
9817
9818 uint32_t reg_src1 = 0, reg_src2 = 0;
9819 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9820
9821 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9822 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9823
9824 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9825 {
9826 /* 1) Handle misc store, immediate offset. */
9827 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9828 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9829 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9830 regcache_raw_read_unsigned (reg_cache, reg_src1,
9831 &u_regval[0]);
9832 if (ARM_PC_REGNUM == reg_src1)
9833 {
9834 /* If R15 was used as Rn, hence current PC+8. */
9835 u_regval[0] = u_regval[0] + 8;
9836 }
9837 offset_8 = (immed_high << 4) | immed_low;
9838 /* Calculate target store address. */
9839 if (14 == arm_insn_r->opcode)
9840 {
9841 tgt_mem_addr = u_regval[0] + offset_8;
9842 }
9843 else
9844 {
9845 tgt_mem_addr = u_regval[0] - offset_8;
9846 }
9847 if (ARM_RECORD_STRH == str_type)
9848 {
9849 record_buf_mem[0] = 2;
9850 record_buf_mem[1] = tgt_mem_addr;
9851 arm_insn_r->mem_rec_count = 1;
9852 }
9853 else if (ARM_RECORD_STRD == str_type)
9854 {
9855 record_buf_mem[0] = 4;
9856 record_buf_mem[1] = tgt_mem_addr;
9857 record_buf_mem[2] = 4;
9858 record_buf_mem[3] = tgt_mem_addr + 4;
9859 arm_insn_r->mem_rec_count = 2;
9860 }
9861 }
9862 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9863 {
9864 /* 2) Store, register offset. */
9865 /* Get Rm. */
9866 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9867 /* Get Rn. */
9868 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9869 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9870 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9871 if (15 == reg_src2)
9872 {
9873 /* If R15 was used as Rn, hence current PC+8. */
9874 u_regval[0] = u_regval[0] + 8;
9875 }
9876 /* Calculate target store address, Rn +/- Rm, register offset. */
9877 if (12 == arm_insn_r->opcode)
9878 {
9879 tgt_mem_addr = u_regval[0] + u_regval[1];
9880 }
9881 else
9882 {
9883 tgt_mem_addr = u_regval[1] - u_regval[0];
9884 }
9885 if (ARM_RECORD_STRH == str_type)
9886 {
9887 record_buf_mem[0] = 2;
9888 record_buf_mem[1] = tgt_mem_addr;
9889 arm_insn_r->mem_rec_count = 1;
9890 }
9891 else if (ARM_RECORD_STRD == str_type)
9892 {
9893 record_buf_mem[0] = 4;
9894 record_buf_mem[1] = tgt_mem_addr;
9895 record_buf_mem[2] = 4;
9896 record_buf_mem[3] = tgt_mem_addr + 4;
9897 arm_insn_r->mem_rec_count = 2;
9898 }
9899 }
9900 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9901 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9902 {
9903 /* 3) Store, immediate pre-indexed. */
9904 /* 5) Store, immediate post-indexed. */
9905 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9906 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9907 offset_8 = (immed_high << 4) | immed_low;
9908 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9909 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9910 /* Calculate target store address, Rn +/- Rm, register offset. */
9911 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9912 {
9913 tgt_mem_addr = u_regval[0] + offset_8;
9914 }
9915 else
9916 {
9917 tgt_mem_addr = u_regval[0] - offset_8;
9918 }
9919 if (ARM_RECORD_STRH == str_type)
9920 {
9921 record_buf_mem[0] = 2;
9922 record_buf_mem[1] = tgt_mem_addr;
9923 arm_insn_r->mem_rec_count = 1;
9924 }
9925 else if (ARM_RECORD_STRD == str_type)
9926 {
9927 record_buf_mem[0] = 4;
9928 record_buf_mem[1] = tgt_mem_addr;
9929 record_buf_mem[2] = 4;
9930 record_buf_mem[3] = tgt_mem_addr + 4;
9931 arm_insn_r->mem_rec_count = 2;
9932 }
9933 /* Record Rn also as it changes. */
9934 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9935 arm_insn_r->reg_rec_count = 1;
9936 }
9937 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9938 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9939 {
9940 /* 4) Store, register pre-indexed. */
9941 /* 6) Store, register post -indexed. */
9942 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9943 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9944 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9945 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9946 /* Calculate target store address, Rn +/- Rm, register offset. */
9947 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9948 {
9949 tgt_mem_addr = u_regval[0] + u_regval[1];
9950 }
9951 else
9952 {
9953 tgt_mem_addr = u_regval[1] - u_regval[0];
9954 }
9955 if (ARM_RECORD_STRH == str_type)
9956 {
9957 record_buf_mem[0] = 2;
9958 record_buf_mem[1] = tgt_mem_addr;
9959 arm_insn_r->mem_rec_count = 1;
9960 }
9961 else if (ARM_RECORD_STRD == str_type)
9962 {
9963 record_buf_mem[0] = 4;
9964 record_buf_mem[1] = tgt_mem_addr;
9965 record_buf_mem[2] = 4;
9966 record_buf_mem[3] = tgt_mem_addr + 4;
9967 arm_insn_r->mem_rec_count = 2;
9968 }
9969 /* Record Rn also as it changes. */
9970 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9971 arm_insn_r->reg_rec_count = 1;
9972 }
9973 return 0;
9974 }
9975
9976 /* Handling ARM extension space insns. */
9977
9978 static int
9979 arm_record_extension_space (insn_decode_record *arm_insn_r)
9980 {
9981 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
9982 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9983 uint32_t record_buf[8], record_buf_mem[8];
9984 uint32_t reg_src1 = 0;
9985 struct regcache *reg_cache = arm_insn_r->regcache;
9986 ULONGEST u_regval = 0;
9987
9988 gdb_assert (!INSN_RECORDED(arm_insn_r));
9989 /* Handle unconditional insn extension space. */
9990
9991 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9992 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9993 if (arm_insn_r->cond)
9994 {
9995 /* PLD has no affect on architectural state, it just affects
9996 the caches. */
9997 if (5 == ((opcode1 & 0xE0) >> 5))
9998 {
9999 /* BLX(1) */
10000 record_buf[0] = ARM_PS_REGNUM;
10001 record_buf[1] = ARM_LR_REGNUM;
10002 arm_insn_r->reg_rec_count = 2;
10003 }
10004 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10005 }
10006
10007
10008 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10009 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10010 {
10011 ret = -1;
10012 /* Undefined instruction on ARM V5; need to handle if later
10013 versions define it. */
10014 }
10015
10016 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10017 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10018 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10019
10020 /* Handle arithmetic insn extension space. */
10021 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10022 && !INSN_RECORDED(arm_insn_r))
10023 {
10024 /* Handle MLA(S) and MUL(S). */
10025 if (0 <= insn_op1 && 3 >= insn_op1)
10026 {
10027 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10028 record_buf[1] = ARM_PS_REGNUM;
10029 arm_insn_r->reg_rec_count = 2;
10030 }
10031 else if (4 <= insn_op1 && 15 >= insn_op1)
10032 {
10033 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10034 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10035 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10036 record_buf[2] = ARM_PS_REGNUM;
10037 arm_insn_r->reg_rec_count = 3;
10038 }
10039 }
10040
10041 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10042 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10043 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10044
10045 /* Handle control insn extension space. */
10046
10047 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10048 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10049 {
10050 if (!bit (arm_insn_r->arm_insn,25))
10051 {
10052 if (!bits (arm_insn_r->arm_insn, 4, 7))
10053 {
10054 if ((0 == insn_op1) || (2 == insn_op1))
10055 {
10056 /* MRS. */
10057 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10058 arm_insn_r->reg_rec_count = 1;
10059 }
10060 else if (1 == insn_op1)
10061 {
10062 /* CSPR is going to be changed. */
10063 record_buf[0] = ARM_PS_REGNUM;
10064 arm_insn_r->reg_rec_count = 1;
10065 }
10066 else if (3 == insn_op1)
10067 {
10068 /* SPSR is going to be changed. */
10069 /* We need to get SPSR value, which is yet to be done. */
10070 return -1;
10071 }
10072 }
10073 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10074 {
10075 if (1 == insn_op1)
10076 {
10077 /* BX. */
10078 record_buf[0] = ARM_PS_REGNUM;
10079 arm_insn_r->reg_rec_count = 1;
10080 }
10081 else if (3 == insn_op1)
10082 {
10083 /* CLZ. */
10084 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10085 arm_insn_r->reg_rec_count = 1;
10086 }
10087 }
10088 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10089 {
10090 /* BLX. */
10091 record_buf[0] = ARM_PS_REGNUM;
10092 record_buf[1] = ARM_LR_REGNUM;
10093 arm_insn_r->reg_rec_count = 2;
10094 }
10095 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10096 {
10097 /* QADD, QSUB, QDADD, QDSUB */
10098 record_buf[0] = ARM_PS_REGNUM;
10099 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10100 arm_insn_r->reg_rec_count = 2;
10101 }
10102 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10103 {
10104 /* BKPT. */
10105 record_buf[0] = ARM_PS_REGNUM;
10106 record_buf[1] = ARM_LR_REGNUM;
10107 arm_insn_r->reg_rec_count = 2;
10108
10109 /* Save SPSR also;how? */
10110 return -1;
10111 }
10112 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10113 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10114 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10115 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10116 )
10117 {
10118 if (0 == insn_op1 || 1 == insn_op1)
10119 {
10120 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10121 /* We dont do optimization for SMULW<y> where we
10122 need only Rd. */
10123 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10124 record_buf[1] = ARM_PS_REGNUM;
10125 arm_insn_r->reg_rec_count = 2;
10126 }
10127 else if (2 == insn_op1)
10128 {
10129 /* SMLAL<x><y>. */
10130 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10131 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10132 arm_insn_r->reg_rec_count = 2;
10133 }
10134 else if (3 == insn_op1)
10135 {
10136 /* SMUL<x><y>. */
10137 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10138 arm_insn_r->reg_rec_count = 1;
10139 }
10140 }
10141 }
10142 else
10143 {
10144 /* MSR : immediate form. */
10145 if (1 == insn_op1)
10146 {
10147 /* CSPR is going to be changed. */
10148 record_buf[0] = ARM_PS_REGNUM;
10149 arm_insn_r->reg_rec_count = 1;
10150 }
10151 else if (3 == insn_op1)
10152 {
10153 /* SPSR is going to be changed. */
10154 /* we need to get SPSR value, which is yet to be done */
10155 return -1;
10156 }
10157 }
10158 }
10159
10160 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10161 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10162 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10163
10164 /* Handle load/store insn extension space. */
10165
10166 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10167 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10168 && !INSN_RECORDED(arm_insn_r))
10169 {
10170 /* SWP/SWPB. */
10171 if (0 == insn_op1)
10172 {
10173 /* These insn, changes register and memory as well. */
10174 /* SWP or SWPB insn. */
10175 /* Get memory address given by Rn. */
10176 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10177 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10178 /* SWP insn ?, swaps word. */
10179 if (8 == arm_insn_r->opcode)
10180 {
10181 record_buf_mem[0] = 4;
10182 }
10183 else
10184 {
10185 /* SWPB insn, swaps only byte. */
10186 record_buf_mem[0] = 1;
10187 }
10188 record_buf_mem[1] = u_regval;
10189 arm_insn_r->mem_rec_count = 1;
10190 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10191 arm_insn_r->reg_rec_count = 1;
10192 }
10193 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10194 {
10195 /* STRH. */
10196 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10197 ARM_RECORD_STRH);
10198 }
10199 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10200 {
10201 /* LDRD. */
10202 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10203 record_buf[1] = record_buf[0] + 1;
10204 arm_insn_r->reg_rec_count = 2;
10205 }
10206 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10207 {
10208 /* STRD. */
10209 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10210 ARM_RECORD_STRD);
10211 }
10212 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10213 {
10214 /* LDRH, LDRSB, LDRSH. */
10215 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10216 arm_insn_r->reg_rec_count = 1;
10217 }
10218
10219 }
10220
10221 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10222 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10223 && !INSN_RECORDED(arm_insn_r))
10224 {
10225 ret = -1;
10226 /* Handle coprocessor insn extension space. */
10227 }
10228
10229 /* To be done for ARMv5 and later; as of now we return -1. */
10230 if (-1 == ret)
10231 return ret;
10232
10233 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10234 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10235
10236 return ret;
10237 }
10238
10239 /* Handling opcode 000 insns. */
10240
10241 static int
10242 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10243 {
10244 struct regcache *reg_cache = arm_insn_r->regcache;
10245 uint32_t record_buf[8], record_buf_mem[8];
10246 ULONGEST u_regval[2] = {0};
10247
10248 uint32_t reg_src1 = 0, reg_dest = 0;
10249 uint32_t opcode1 = 0;
10250
10251 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10252 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10253 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10254
10255 /* Data processing insn /multiply insn. */
10256 if (9 == arm_insn_r->decode
10257 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10258 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10259 {
10260 /* Handle multiply instructions. */
10261 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10262 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10263 {
10264 /* Handle MLA and MUL. */
10265 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10266 record_buf[1] = ARM_PS_REGNUM;
10267 arm_insn_r->reg_rec_count = 2;
10268 }
10269 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10270 {
10271 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10272 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10273 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10274 record_buf[2] = ARM_PS_REGNUM;
10275 arm_insn_r->reg_rec_count = 3;
10276 }
10277 }
10278 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10279 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10280 {
10281 /* Handle misc load insns, as 20th bit (L = 1). */
10282 /* LDR insn has a capability to do branching, if
10283 MOV LR, PC is precceded by LDR insn having Rn as R15
10284 in that case, it emulates branch and link insn, and hence we
10285 need to save CSPR and PC as well. I am not sure this is right
10286 place; as opcode = 010 LDR insn make this happen, if R15 was
10287 used. */
10288 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10289 if (15 != reg_dest)
10290 {
10291 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10292 arm_insn_r->reg_rec_count = 1;
10293 }
10294 else
10295 {
10296 record_buf[0] = reg_dest;
10297 record_buf[1] = ARM_PS_REGNUM;
10298 arm_insn_r->reg_rec_count = 2;
10299 }
10300 }
10301 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10302 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10303 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10304 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10305 {
10306 /* Handle MSR insn. */
10307 if (9 == arm_insn_r->opcode)
10308 {
10309 /* CSPR is going to be changed. */
10310 record_buf[0] = ARM_PS_REGNUM;
10311 arm_insn_r->reg_rec_count = 1;
10312 }
10313 else
10314 {
10315 /* SPSR is going to be changed. */
10316 /* How to read SPSR value? */
10317 return -1;
10318 }
10319 }
10320 else if (9 == arm_insn_r->decode
10321 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10322 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10323 {
10324 /* Handling SWP, SWPB. */
10325 /* These insn, changes register and memory as well. */
10326 /* SWP or SWPB insn. */
10327
10328 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10329 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10330 /* SWP insn ?, swaps word. */
10331 if (8 == arm_insn_r->opcode)
10332 {
10333 record_buf_mem[0] = 4;
10334 }
10335 else
10336 {
10337 /* SWPB insn, swaps only byte. */
10338 record_buf_mem[0] = 1;
10339 }
10340 record_buf_mem[1] = u_regval[0];
10341 arm_insn_r->mem_rec_count = 1;
10342 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10343 arm_insn_r->reg_rec_count = 1;
10344 }
10345 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10346 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10347 {
10348 /* Handle BLX, branch and link/exchange. */
10349 if (9 == arm_insn_r->opcode)
10350 {
10351 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10352 and R14 stores the return address. */
10353 record_buf[0] = ARM_PS_REGNUM;
10354 record_buf[1] = ARM_LR_REGNUM;
10355 arm_insn_r->reg_rec_count = 2;
10356 }
10357 }
10358 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10359 {
10360 /* Handle enhanced software breakpoint insn, BKPT. */
10361 /* CPSR is changed to be executed in ARM state, disabling normal
10362 interrupts, entering abort mode. */
10363 /* According to high vector configuration PC is set. */
10364 /* user hit breakpoint and type reverse, in
10365 that case, we need to go back with previous CPSR and
10366 Program Counter. */
10367 record_buf[0] = ARM_PS_REGNUM;
10368 record_buf[1] = ARM_LR_REGNUM;
10369 arm_insn_r->reg_rec_count = 2;
10370
10371 /* Save SPSR also; how? */
10372 return -1;
10373 }
10374 else if (11 == arm_insn_r->decode
10375 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10376 {
10377 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10378
10379 /* Handle str(x) insn */
10380 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10381 ARM_RECORD_STRH);
10382 }
10383 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10384 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10385 {
10386 /* Handle BX, branch and link/exchange. */
10387 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10388 record_buf[0] = ARM_PS_REGNUM;
10389 arm_insn_r->reg_rec_count = 1;
10390 }
10391 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10392 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10393 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10394 {
10395 /* Count leading zeros: CLZ. */
10396 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10397 arm_insn_r->reg_rec_count = 1;
10398 }
10399 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10400 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10401 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10402 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10403 )
10404 {
10405 /* Handle MRS insn. */
10406 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10407 arm_insn_r->reg_rec_count = 1;
10408 }
10409 else if (arm_insn_r->opcode <= 15)
10410 {
10411 /* Normal data processing insns. */
10412 /* Out of 11 shifter operands mode, all the insn modifies destination
10413 register, which is specified by 13-16 decode. */
10414 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10415 record_buf[1] = ARM_PS_REGNUM;
10416 arm_insn_r->reg_rec_count = 2;
10417 }
10418 else
10419 {
10420 return -1;
10421 }
10422
10423 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10424 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10425 return 0;
10426 }
10427
10428 /* Handling opcode 001 insns. */
10429
10430 static int
10431 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10432 {
10433 uint32_t record_buf[8], record_buf_mem[8];
10434
10435 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10436 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10437
10438 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10439 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10440 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10441 )
10442 {
10443 /* Handle MSR insn. */
10444 if (9 == arm_insn_r->opcode)
10445 {
10446 /* CSPR is going to be changed. */
10447 record_buf[0] = ARM_PS_REGNUM;
10448 arm_insn_r->reg_rec_count = 1;
10449 }
10450 else
10451 {
10452 /* SPSR is going to be changed. */
10453 }
10454 }
10455 else if (arm_insn_r->opcode <= 15)
10456 {
10457 /* Normal data processing insns. */
10458 /* Out of 11 shifter operands mode, all the insn modifies destination
10459 register, which is specified by 13-16 decode. */
10460 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10461 record_buf[1] = ARM_PS_REGNUM;
10462 arm_insn_r->reg_rec_count = 2;
10463 }
10464 else
10465 {
10466 return -1;
10467 }
10468
10469 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10470 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10471 return 0;
10472 }
10473
10474 static int
10475 arm_record_media (insn_decode_record *arm_insn_r)
10476 {
10477 uint32_t record_buf[8];
10478
10479 switch (bits (arm_insn_r->arm_insn, 22, 24))
10480 {
10481 case 0:
10482 /* Parallel addition and subtraction, signed */
10483 case 1:
10484 /* Parallel addition and subtraction, unsigned */
10485 case 2:
10486 case 3:
10487 /* Packing, unpacking, saturation and reversal */
10488 {
10489 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10490
10491 record_buf[arm_insn_r->reg_rec_count++] = rd;
10492 }
10493 break;
10494
10495 case 4:
10496 case 5:
10497 /* Signed multiplies */
10498 {
10499 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10500 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10501
10502 record_buf[arm_insn_r->reg_rec_count++] = rd;
10503 if (op1 == 0x0)
10504 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10505 else if (op1 == 0x4)
10506 record_buf[arm_insn_r->reg_rec_count++]
10507 = bits (arm_insn_r->arm_insn, 12, 15);
10508 }
10509 break;
10510
10511 case 6:
10512 {
10513 if (bit (arm_insn_r->arm_insn, 21)
10514 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10515 {
10516 /* SBFX */
10517 record_buf[arm_insn_r->reg_rec_count++]
10518 = bits (arm_insn_r->arm_insn, 12, 15);
10519 }
10520 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10521 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10522 {
10523 /* USAD8 and USADA8 */
10524 record_buf[arm_insn_r->reg_rec_count++]
10525 = bits (arm_insn_r->arm_insn, 16, 19);
10526 }
10527 }
10528 break;
10529
10530 case 7:
10531 {
10532 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10533 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10534 {
10535 /* Permanently UNDEFINED */
10536 return -1;
10537 }
10538 else
10539 {
10540 /* BFC, BFI and UBFX */
10541 record_buf[arm_insn_r->reg_rec_count++]
10542 = bits (arm_insn_r->arm_insn, 12, 15);
10543 }
10544 }
10545 break;
10546
10547 default:
10548 return -1;
10549 }
10550
10551 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10552
10553 return 0;
10554 }
10555
10556 /* Handle ARM mode instructions with opcode 010. */
10557
10558 static int
10559 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10560 {
10561 struct regcache *reg_cache = arm_insn_r->regcache;
10562
10563 uint32_t reg_base , reg_dest;
10564 uint32_t offset_12, tgt_mem_addr;
10565 uint32_t record_buf[8], record_buf_mem[8];
10566 unsigned char wback;
10567 ULONGEST u_regval;
10568
10569 /* Calculate wback. */
10570 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10571 || (bit (arm_insn_r->arm_insn, 21) == 1);
10572
10573 arm_insn_r->reg_rec_count = 0;
10574 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10575
10576 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10577 {
10578 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10579 and LDRT. */
10580
10581 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10582 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10583
10584 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10585 preceeds a LDR instruction having R15 as reg_base, it
10586 emulates a branch and link instruction, and hence we need to save
10587 CPSR and PC as well. */
10588 if (ARM_PC_REGNUM == reg_dest)
10589 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10590
10591 /* If wback is true, also save the base register, which is going to be
10592 written to. */
10593 if (wback)
10594 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10595 }
10596 else
10597 {
10598 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10599
10600 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10601 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10602
10603 /* Handle bit U. */
10604 if (bit (arm_insn_r->arm_insn, 23))
10605 {
10606 /* U == 1: Add the offset. */
10607 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10608 }
10609 else
10610 {
10611 /* U == 0: subtract the offset. */
10612 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10613 }
10614
10615 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10616 bytes. */
10617 if (bit (arm_insn_r->arm_insn, 22))
10618 {
10619 /* STRB and STRBT: 1 byte. */
10620 record_buf_mem[0] = 1;
10621 }
10622 else
10623 {
10624 /* STR and STRT: 4 bytes. */
10625 record_buf_mem[0] = 4;
10626 }
10627
10628 /* Handle bit P. */
10629 if (bit (arm_insn_r->arm_insn, 24))
10630 record_buf_mem[1] = tgt_mem_addr;
10631 else
10632 record_buf_mem[1] = (uint32_t) u_regval;
10633
10634 arm_insn_r->mem_rec_count = 1;
10635
10636 /* If wback is true, also save the base register, which is going to be
10637 written to. */
10638 if (wback)
10639 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10640 }
10641
10642 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10643 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10644 return 0;
10645 }
10646
10647 /* Handling opcode 011 insns. */
10648
10649 static int
10650 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10651 {
10652 struct regcache *reg_cache = arm_insn_r->regcache;
10653
10654 uint32_t shift_imm = 0;
10655 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10656 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10657 uint32_t record_buf[8], record_buf_mem[8];
10658
10659 LONGEST s_word;
10660 ULONGEST u_regval[2];
10661
10662 if (bit (arm_insn_r->arm_insn, 4))
10663 return arm_record_media (arm_insn_r);
10664
10665 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10666 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10667
10668 /* Handle enhanced store insns and LDRD DSP insn,
10669 order begins according to addressing modes for store insns
10670 STRH insn. */
10671
10672 /* LDR or STR? */
10673 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10674 {
10675 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10676 /* LDR insn has a capability to do branching, if
10677 MOV LR, PC is precedded by LDR insn having Rn as R15
10678 in that case, it emulates branch and link insn, and hence we
10679 need to save CSPR and PC as well. */
10680 if (15 != reg_dest)
10681 {
10682 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10683 arm_insn_r->reg_rec_count = 1;
10684 }
10685 else
10686 {
10687 record_buf[0] = reg_dest;
10688 record_buf[1] = ARM_PS_REGNUM;
10689 arm_insn_r->reg_rec_count = 2;
10690 }
10691 }
10692 else
10693 {
10694 if (! bits (arm_insn_r->arm_insn, 4, 11))
10695 {
10696 /* Store insn, register offset and register pre-indexed,
10697 register post-indexed. */
10698 /* Get Rm. */
10699 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10700 /* Get Rn. */
10701 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10702 regcache_raw_read_unsigned (reg_cache, reg_src1
10703 , &u_regval[0]);
10704 regcache_raw_read_unsigned (reg_cache, reg_src2
10705 , &u_regval[1]);
10706 if (15 == reg_src2)
10707 {
10708 /* If R15 was used as Rn, hence current PC+8. */
10709 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10710 u_regval[0] = u_regval[0] + 8;
10711 }
10712 /* Calculate target store address, Rn +/- Rm, register offset. */
10713 /* U == 1. */
10714 if (bit (arm_insn_r->arm_insn, 23))
10715 {
10716 tgt_mem_addr = u_regval[0] + u_regval[1];
10717 }
10718 else
10719 {
10720 tgt_mem_addr = u_regval[1] - u_regval[0];
10721 }
10722
10723 switch (arm_insn_r->opcode)
10724 {
10725 /* STR. */
10726 case 8:
10727 case 12:
10728 /* STR. */
10729 case 9:
10730 case 13:
10731 /* STRT. */
10732 case 1:
10733 case 5:
10734 /* STR. */
10735 case 0:
10736 case 4:
10737 record_buf_mem[0] = 4;
10738 break;
10739
10740 /* STRB. */
10741 case 10:
10742 case 14:
10743 /* STRB. */
10744 case 11:
10745 case 15:
10746 /* STRBT. */
10747 case 3:
10748 case 7:
10749 /* STRB. */
10750 case 2:
10751 case 6:
10752 record_buf_mem[0] = 1;
10753 break;
10754
10755 default:
10756 gdb_assert_not_reached ("no decoding pattern found");
10757 break;
10758 }
10759 record_buf_mem[1] = tgt_mem_addr;
10760 arm_insn_r->mem_rec_count = 1;
10761
10762 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10763 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10764 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10765 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10766 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10767 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10768 )
10769 {
10770 /* Rn is going to be changed in pre-indexed mode and
10771 post-indexed mode as well. */
10772 record_buf[0] = reg_src2;
10773 arm_insn_r->reg_rec_count = 1;
10774 }
10775 }
10776 else
10777 {
10778 /* Store insn, scaled register offset; scaled pre-indexed. */
10779 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10780 /* Get Rm. */
10781 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10782 /* Get Rn. */
10783 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10784 /* Get shift_imm. */
10785 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10786 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10787 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10788 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10789 /* Offset_12 used as shift. */
10790 switch (offset_12)
10791 {
10792 case 0:
10793 /* Offset_12 used as index. */
10794 offset_12 = u_regval[0] << shift_imm;
10795 break;
10796
10797 case 1:
10798 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10799 break;
10800
10801 case 2:
10802 if (!shift_imm)
10803 {
10804 if (bit (u_regval[0], 31))
10805 {
10806 offset_12 = 0xFFFFFFFF;
10807 }
10808 else
10809 {
10810 offset_12 = 0;
10811 }
10812 }
10813 else
10814 {
10815 /* This is arithmetic shift. */
10816 offset_12 = s_word >> shift_imm;
10817 }
10818 break;
10819
10820 case 3:
10821 if (!shift_imm)
10822 {
10823 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10824 &u_regval[1]);
10825 /* Get C flag value and shift it by 31. */
10826 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10827 | (u_regval[0]) >> 1);
10828 }
10829 else
10830 {
10831 offset_12 = (u_regval[0] >> shift_imm) \
10832 | (u_regval[0] <<
10833 (sizeof(uint32_t) - shift_imm));
10834 }
10835 break;
10836
10837 default:
10838 gdb_assert_not_reached ("no decoding pattern found");
10839 break;
10840 }
10841
10842 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10843 /* bit U set. */
10844 if (bit (arm_insn_r->arm_insn, 23))
10845 {
10846 tgt_mem_addr = u_regval[1] + offset_12;
10847 }
10848 else
10849 {
10850 tgt_mem_addr = u_regval[1] - offset_12;
10851 }
10852
10853 switch (arm_insn_r->opcode)
10854 {
10855 /* STR. */
10856 case 8:
10857 case 12:
10858 /* STR. */
10859 case 9:
10860 case 13:
10861 /* STRT. */
10862 case 1:
10863 case 5:
10864 /* STR. */
10865 case 0:
10866 case 4:
10867 record_buf_mem[0] = 4;
10868 break;
10869
10870 /* STRB. */
10871 case 10:
10872 case 14:
10873 /* STRB. */
10874 case 11:
10875 case 15:
10876 /* STRBT. */
10877 case 3:
10878 case 7:
10879 /* STRB. */
10880 case 2:
10881 case 6:
10882 record_buf_mem[0] = 1;
10883 break;
10884
10885 default:
10886 gdb_assert_not_reached ("no decoding pattern found");
10887 break;
10888 }
10889 record_buf_mem[1] = tgt_mem_addr;
10890 arm_insn_r->mem_rec_count = 1;
10891
10892 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10893 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10894 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10895 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10896 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10897 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10898 )
10899 {
10900 /* Rn is going to be changed in register scaled pre-indexed
10901 mode,and scaled post indexed mode. */
10902 record_buf[0] = reg_src2;
10903 arm_insn_r->reg_rec_count = 1;
10904 }
10905 }
10906 }
10907
10908 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10909 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10910 return 0;
10911 }
10912
10913 /* Handle ARM mode instructions with opcode 100. */
10914
10915 static int
10916 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10917 {
10918 struct regcache *reg_cache = arm_insn_r->regcache;
10919 uint32_t register_count = 0, register_bits;
10920 uint32_t reg_base, addr_mode;
10921 uint32_t record_buf[24], record_buf_mem[48];
10922 uint32_t wback;
10923 ULONGEST u_regval;
10924
10925 /* Fetch the list of registers. */
10926 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10927 arm_insn_r->reg_rec_count = 0;
10928
10929 /* Fetch the base register that contains the address we are loading data
10930 to. */
10931 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10932
10933 /* Calculate wback. */
10934 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10935
10936 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10937 {
10938 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10939
10940 /* Find out which registers are going to be loaded from memory. */
10941 while (register_bits)
10942 {
10943 if (register_bits & 0x00000001)
10944 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10945 register_bits = register_bits >> 1;
10946 register_count++;
10947 }
10948
10949
10950 /* If wback is true, also save the base register, which is going to be
10951 written to. */
10952 if (wback)
10953 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10954
10955 /* Save the CPSR register. */
10956 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10957 }
10958 else
10959 {
10960 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10961
10962 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10963
10964 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10965
10966 /* Find out how many registers are going to be stored to memory. */
10967 while (register_bits)
10968 {
10969 if (register_bits & 0x00000001)
10970 register_count++;
10971 register_bits = register_bits >> 1;
10972 }
10973
10974 switch (addr_mode)
10975 {
10976 /* STMDA (STMED): Decrement after. */
10977 case 0:
10978 record_buf_mem[1] = (uint32_t) u_regval
10979 - register_count * INT_REGISTER_SIZE + 4;
10980 break;
10981 /* STM (STMIA, STMEA): Increment after. */
10982 case 1:
10983 record_buf_mem[1] = (uint32_t) u_regval;
10984 break;
10985 /* STMDB (STMFD): Decrement before. */
10986 case 2:
10987 record_buf_mem[1] = (uint32_t) u_regval
10988 - register_count * INT_REGISTER_SIZE;
10989 break;
10990 /* STMIB (STMFA): Increment before. */
10991 case 3:
10992 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10993 break;
10994 default:
10995 gdb_assert_not_reached ("no decoding pattern found");
10996 break;
10997 }
10998
10999 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11000 arm_insn_r->mem_rec_count = 1;
11001
11002 /* If wback is true, also save the base register, which is going to be
11003 written to. */
11004 if (wback)
11005 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11006 }
11007
11008 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11009 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11010 return 0;
11011 }
11012
11013 /* Handling opcode 101 insns. */
11014
11015 static int
11016 arm_record_b_bl (insn_decode_record *arm_insn_r)
11017 {
11018 uint32_t record_buf[8];
11019
11020 /* Handle B, BL, BLX(1) insns. */
11021 /* B simply branches so we do nothing here. */
11022 /* Note: BLX(1) doesnt fall here but instead it falls into
11023 extension space. */
11024 if (bit (arm_insn_r->arm_insn, 24))
11025 {
11026 record_buf[0] = ARM_LR_REGNUM;
11027 arm_insn_r->reg_rec_count = 1;
11028 }
11029
11030 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11031
11032 return 0;
11033 }
11034
11035 static int
11036 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11037 {
11038 printf_unfiltered (_("Process record does not support instruction "
11039 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11040 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11041
11042 return -1;
11043 }
11044
11045 /* Record handler for vector data transfer instructions. */
11046
11047 static int
11048 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11049 {
11050 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11051 uint32_t record_buf[4];
11052
11053 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11054 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11055 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11056 bit_l = bit (arm_insn_r->arm_insn, 20);
11057 bit_c = bit (arm_insn_r->arm_insn, 8);
11058
11059 /* Handle VMOV instruction. */
11060 if (bit_l && bit_c)
11061 {
11062 record_buf[0] = reg_t;
11063 arm_insn_r->reg_rec_count = 1;
11064 }
11065 else if (bit_l && !bit_c)
11066 {
11067 /* Handle VMOV instruction. */
11068 if (bits_a == 0x00)
11069 {
11070 record_buf[0] = reg_t;
11071 arm_insn_r->reg_rec_count = 1;
11072 }
11073 /* Handle VMRS instruction. */
11074 else if (bits_a == 0x07)
11075 {
11076 if (reg_t == 15)
11077 reg_t = ARM_PS_REGNUM;
11078
11079 record_buf[0] = reg_t;
11080 arm_insn_r->reg_rec_count = 1;
11081 }
11082 }
11083 else if (!bit_l && !bit_c)
11084 {
11085 /* Handle VMOV instruction. */
11086 if (bits_a == 0x00)
11087 {
11088 record_buf[0] = ARM_D0_REGNUM + reg_v;
11089
11090 arm_insn_r->reg_rec_count = 1;
11091 }
11092 /* Handle VMSR instruction. */
11093 else if (bits_a == 0x07)
11094 {
11095 record_buf[0] = ARM_FPSCR_REGNUM;
11096 arm_insn_r->reg_rec_count = 1;
11097 }
11098 }
11099 else if (!bit_l && bit_c)
11100 {
11101 /* Handle VMOV instruction. */
11102 if (!(bits_a & 0x04))
11103 {
11104 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11105 + ARM_D0_REGNUM;
11106 arm_insn_r->reg_rec_count = 1;
11107 }
11108 /* Handle VDUP instruction. */
11109 else
11110 {
11111 if (bit (arm_insn_r->arm_insn, 21))
11112 {
11113 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11114 record_buf[0] = reg_v + ARM_D0_REGNUM;
11115 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11116 arm_insn_r->reg_rec_count = 2;
11117 }
11118 else
11119 {
11120 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11121 record_buf[0] = reg_v + ARM_D0_REGNUM;
11122 arm_insn_r->reg_rec_count = 1;
11123 }
11124 }
11125 }
11126
11127 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11128 return 0;
11129 }
11130
11131 /* Record handler for extension register load/store instructions. */
11132
11133 static int
11134 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11135 {
11136 uint32_t opcode, single_reg;
11137 uint8_t op_vldm_vstm;
11138 uint32_t record_buf[8], record_buf_mem[128];
11139 ULONGEST u_regval = 0;
11140
11141 struct regcache *reg_cache = arm_insn_r->regcache;
11142
11143 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11144 single_reg = !bit (arm_insn_r->arm_insn, 8);
11145 op_vldm_vstm = opcode & 0x1b;
11146
11147 /* Handle VMOV instructions. */
11148 if ((opcode & 0x1e) == 0x04)
11149 {
11150 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11151 {
11152 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11153 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11154 arm_insn_r->reg_rec_count = 2;
11155 }
11156 else
11157 {
11158 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11159 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11160
11161 if (single_reg)
11162 {
11163 /* The first S register number m is REG_M:M (M is bit 5),
11164 the corresponding D register number is REG_M:M / 2, which
11165 is REG_M. */
11166 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11167 /* The second S register number is REG_M:M + 1, the
11168 corresponding D register number is (REG_M:M + 1) / 2.
11169 IOW, if bit M is 1, the first and second S registers
11170 are mapped to different D registers, otherwise, they are
11171 in the same D register. */
11172 if (bit_m)
11173 {
11174 record_buf[arm_insn_r->reg_rec_count++]
11175 = ARM_D0_REGNUM + reg_m + 1;
11176 }
11177 }
11178 else
11179 {
11180 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11181 arm_insn_r->reg_rec_count = 1;
11182 }
11183 }
11184 }
11185 /* Handle VSTM and VPUSH instructions. */
11186 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11187 || op_vldm_vstm == 0x12)
11188 {
11189 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11190 uint32_t memory_index = 0;
11191
11192 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11193 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11194 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11195 imm_off32 = imm_off8 << 2;
11196 memory_count = imm_off8;
11197
11198 if (bit (arm_insn_r->arm_insn, 23))
11199 start_address = u_regval;
11200 else
11201 start_address = u_regval - imm_off32;
11202
11203 if (bit (arm_insn_r->arm_insn, 21))
11204 {
11205 record_buf[0] = reg_rn;
11206 arm_insn_r->reg_rec_count = 1;
11207 }
11208
11209 while (memory_count > 0)
11210 {
11211 if (single_reg)
11212 {
11213 record_buf_mem[memory_index] = 4;
11214 record_buf_mem[memory_index + 1] = start_address;
11215 start_address = start_address + 4;
11216 memory_index = memory_index + 2;
11217 }
11218 else
11219 {
11220 record_buf_mem[memory_index] = 4;
11221 record_buf_mem[memory_index + 1] = start_address;
11222 record_buf_mem[memory_index + 2] = 4;
11223 record_buf_mem[memory_index + 3] = start_address + 4;
11224 start_address = start_address + 8;
11225 memory_index = memory_index + 4;
11226 }
11227 memory_count--;
11228 }
11229 arm_insn_r->mem_rec_count = (memory_index >> 1);
11230 }
11231 /* Handle VLDM instructions. */
11232 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11233 || op_vldm_vstm == 0x13)
11234 {
11235 uint32_t reg_count, reg_vd;
11236 uint32_t reg_index = 0;
11237 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11238
11239 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11240 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11241
11242 /* REG_VD is the first D register number. If the instruction
11243 loads memory to S registers (SINGLE_REG is TRUE), the register
11244 number is (REG_VD << 1 | bit D), so the corresponding D
11245 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11246 if (!single_reg)
11247 reg_vd = reg_vd | (bit_d << 4);
11248
11249 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11250 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11251
11252 /* If the instruction loads memory to D register, REG_COUNT should
11253 be divided by 2, according to the ARM Architecture Reference
11254 Manual. If the instruction loads memory to S register, divide by
11255 2 as well because two S registers are mapped to D register. */
11256 reg_count = reg_count / 2;
11257 if (single_reg && bit_d)
11258 {
11259 /* Increase the register count if S register list starts from
11260 an odd number (bit d is one). */
11261 reg_count++;
11262 }
11263
11264 while (reg_count > 0)
11265 {
11266 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11267 reg_count--;
11268 }
11269 arm_insn_r->reg_rec_count = reg_index;
11270 }
11271 /* VSTR Vector store register. */
11272 else if ((opcode & 0x13) == 0x10)
11273 {
11274 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11275 uint32_t memory_index = 0;
11276
11277 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11278 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11279 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11280 imm_off32 = imm_off8 << 2;
11281
11282 if (bit (arm_insn_r->arm_insn, 23))
11283 start_address = u_regval + imm_off32;
11284 else
11285 start_address = u_regval - imm_off32;
11286
11287 if (single_reg)
11288 {
11289 record_buf_mem[memory_index] = 4;
11290 record_buf_mem[memory_index + 1] = start_address;
11291 arm_insn_r->mem_rec_count = 1;
11292 }
11293 else
11294 {
11295 record_buf_mem[memory_index] = 4;
11296 record_buf_mem[memory_index + 1] = start_address;
11297 record_buf_mem[memory_index + 2] = 4;
11298 record_buf_mem[memory_index + 3] = start_address + 4;
11299 arm_insn_r->mem_rec_count = 2;
11300 }
11301 }
11302 /* VLDR Vector load register. */
11303 else if ((opcode & 0x13) == 0x11)
11304 {
11305 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11306
11307 if (!single_reg)
11308 {
11309 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11310 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11311 }
11312 else
11313 {
11314 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11315 /* Record register D rather than pseudo register S. */
11316 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11317 }
11318 arm_insn_r->reg_rec_count = 1;
11319 }
11320
11321 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11322 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11323 return 0;
11324 }
11325
11326 /* Record handler for arm/thumb mode VFP data processing instructions. */
11327
11328 static int
11329 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11330 {
11331 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11332 uint32_t record_buf[4];
11333 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11334 enum insn_types curr_insn_type = INSN_INV;
11335
11336 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11337 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11338 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11339 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11340 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11341 bit_d = bit (arm_insn_r->arm_insn, 22);
11342 opc1 = opc1 & 0x04;
11343
11344 /* Handle VMLA, VMLS. */
11345 if (opc1 == 0x00)
11346 {
11347 if (bit (arm_insn_r->arm_insn, 10))
11348 {
11349 if (bit (arm_insn_r->arm_insn, 6))
11350 curr_insn_type = INSN_T0;
11351 else
11352 curr_insn_type = INSN_T1;
11353 }
11354 else
11355 {
11356 if (dp_op_sz)
11357 curr_insn_type = INSN_T1;
11358 else
11359 curr_insn_type = INSN_T2;
11360 }
11361 }
11362 /* Handle VNMLA, VNMLS, VNMUL. */
11363 else if (opc1 == 0x01)
11364 {
11365 if (dp_op_sz)
11366 curr_insn_type = INSN_T1;
11367 else
11368 curr_insn_type = INSN_T2;
11369 }
11370 /* Handle VMUL. */
11371 else if (opc1 == 0x02 && !(opc3 & 0x01))
11372 {
11373 if (bit (arm_insn_r->arm_insn, 10))
11374 {
11375 if (bit (arm_insn_r->arm_insn, 6))
11376 curr_insn_type = INSN_T0;
11377 else
11378 curr_insn_type = INSN_T1;
11379 }
11380 else
11381 {
11382 if (dp_op_sz)
11383 curr_insn_type = INSN_T1;
11384 else
11385 curr_insn_type = INSN_T2;
11386 }
11387 }
11388 /* Handle VADD, VSUB. */
11389 else if (opc1 == 0x03)
11390 {
11391 if (!bit (arm_insn_r->arm_insn, 9))
11392 {
11393 if (bit (arm_insn_r->arm_insn, 6))
11394 curr_insn_type = INSN_T0;
11395 else
11396 curr_insn_type = INSN_T1;
11397 }
11398 else
11399 {
11400 if (dp_op_sz)
11401 curr_insn_type = INSN_T1;
11402 else
11403 curr_insn_type = INSN_T2;
11404 }
11405 }
11406 /* Handle VDIV. */
11407 else if (opc1 == 0x0b)
11408 {
11409 if (dp_op_sz)
11410 curr_insn_type = INSN_T1;
11411 else
11412 curr_insn_type = INSN_T2;
11413 }
11414 /* Handle all other vfp data processing instructions. */
11415 else if (opc1 == 0x0b)
11416 {
11417 /* Handle VMOV. */
11418 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11419 {
11420 if (bit (arm_insn_r->arm_insn, 4))
11421 {
11422 if (bit (arm_insn_r->arm_insn, 6))
11423 curr_insn_type = INSN_T0;
11424 else
11425 curr_insn_type = INSN_T1;
11426 }
11427 else
11428 {
11429 if (dp_op_sz)
11430 curr_insn_type = INSN_T1;
11431 else
11432 curr_insn_type = INSN_T2;
11433 }
11434 }
11435 /* Handle VNEG and VABS. */
11436 else if ((opc2 == 0x01 && opc3 == 0x01)
11437 || (opc2 == 0x00 && opc3 == 0x03))
11438 {
11439 if (!bit (arm_insn_r->arm_insn, 11))
11440 {
11441 if (bit (arm_insn_r->arm_insn, 6))
11442 curr_insn_type = INSN_T0;
11443 else
11444 curr_insn_type = INSN_T1;
11445 }
11446 else
11447 {
11448 if (dp_op_sz)
11449 curr_insn_type = INSN_T1;
11450 else
11451 curr_insn_type = INSN_T2;
11452 }
11453 }
11454 /* Handle VSQRT. */
11455 else if (opc2 == 0x01 && opc3 == 0x03)
11456 {
11457 if (dp_op_sz)
11458 curr_insn_type = INSN_T1;
11459 else
11460 curr_insn_type = INSN_T2;
11461 }
11462 /* Handle VCVT. */
11463 else if (opc2 == 0x07 && opc3 == 0x03)
11464 {
11465 if (!dp_op_sz)
11466 curr_insn_type = INSN_T1;
11467 else
11468 curr_insn_type = INSN_T2;
11469 }
11470 else if (opc3 & 0x01)
11471 {
11472 /* Handle VCVT. */
11473 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11474 {
11475 if (!bit (arm_insn_r->arm_insn, 18))
11476 curr_insn_type = INSN_T2;
11477 else
11478 {
11479 if (dp_op_sz)
11480 curr_insn_type = INSN_T1;
11481 else
11482 curr_insn_type = INSN_T2;
11483 }
11484 }
11485 /* Handle VCVT. */
11486 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11487 {
11488 if (dp_op_sz)
11489 curr_insn_type = INSN_T1;
11490 else
11491 curr_insn_type = INSN_T2;
11492 }
11493 /* Handle VCVTB, VCVTT. */
11494 else if ((opc2 & 0x0e) == 0x02)
11495 curr_insn_type = INSN_T2;
11496 /* Handle VCMP, VCMPE. */
11497 else if ((opc2 & 0x0e) == 0x04)
11498 curr_insn_type = INSN_T3;
11499 }
11500 }
11501
11502 switch (curr_insn_type)
11503 {
11504 case INSN_T0:
11505 reg_vd = reg_vd | (bit_d << 4);
11506 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11507 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11508 arm_insn_r->reg_rec_count = 2;
11509 break;
11510
11511 case INSN_T1:
11512 reg_vd = reg_vd | (bit_d << 4);
11513 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11514 arm_insn_r->reg_rec_count = 1;
11515 break;
11516
11517 case INSN_T2:
11518 reg_vd = (reg_vd << 1) | bit_d;
11519 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11520 arm_insn_r->reg_rec_count = 1;
11521 break;
11522
11523 case INSN_T3:
11524 record_buf[0] = ARM_FPSCR_REGNUM;
11525 arm_insn_r->reg_rec_count = 1;
11526 break;
11527
11528 default:
11529 gdb_assert_not_reached ("no decoding pattern found");
11530 break;
11531 }
11532
11533 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11534 return 0;
11535 }
11536
11537 /* Handling opcode 110 insns. */
11538
11539 static int
11540 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11541 {
11542 uint32_t op1, op1_ebit, coproc;
11543
11544 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11545 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11546 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11547
11548 if ((coproc & 0x0e) == 0x0a)
11549 {
11550 /* Handle extension register ld/st instructions. */
11551 if (!(op1 & 0x20))
11552 return arm_record_exreg_ld_st_insn (arm_insn_r);
11553
11554 /* 64-bit transfers between arm core and extension registers. */
11555 if ((op1 & 0x3e) == 0x04)
11556 return arm_record_exreg_ld_st_insn (arm_insn_r);
11557 }
11558 else
11559 {
11560 /* Handle coprocessor ld/st instructions. */
11561 if (!(op1 & 0x3a))
11562 {
11563 /* Store. */
11564 if (!op1_ebit)
11565 return arm_record_unsupported_insn (arm_insn_r);
11566 else
11567 /* Load. */
11568 return arm_record_unsupported_insn (arm_insn_r);
11569 }
11570
11571 /* Move to coprocessor from two arm core registers. */
11572 if (op1 == 0x4)
11573 return arm_record_unsupported_insn (arm_insn_r);
11574
11575 /* Move to two arm core registers from coprocessor. */
11576 if (op1 == 0x5)
11577 {
11578 uint32_t reg_t[2];
11579
11580 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11581 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11582 arm_insn_r->reg_rec_count = 2;
11583
11584 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11585 return 0;
11586 }
11587 }
11588 return arm_record_unsupported_insn (arm_insn_r);
11589 }
11590
11591 /* Handling opcode 111 insns. */
11592
11593 static int
11594 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11595 {
11596 uint32_t op, op1_sbit, op1_ebit, coproc;
11597 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11598 struct regcache *reg_cache = arm_insn_r->regcache;
11599
11600 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11601 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11602 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11603 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11604 op = bit (arm_insn_r->arm_insn, 4);
11605
11606 /* Handle arm SWI/SVC system call instructions. */
11607 if (op1_sbit)
11608 {
11609 if (tdep->arm_syscall_record != NULL)
11610 {
11611 ULONGEST svc_operand, svc_number;
11612
11613 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11614
11615 if (svc_operand) /* OABI. */
11616 svc_number = svc_operand - 0x900000;
11617 else /* EABI. */
11618 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11619
11620 return tdep->arm_syscall_record (reg_cache, svc_number);
11621 }
11622 else
11623 {
11624 printf_unfiltered (_("no syscall record support\n"));
11625 return -1;
11626 }
11627 }
11628
11629 if ((coproc & 0x0e) == 0x0a)
11630 {
11631 /* VFP data-processing instructions. */
11632 if (!op1_sbit && !op)
11633 return arm_record_vfp_data_proc_insn (arm_insn_r);
11634
11635 /* Advanced SIMD, VFP instructions. */
11636 if (!op1_sbit && op)
11637 return arm_record_vdata_transfer_insn (arm_insn_r);
11638 }
11639 else
11640 {
11641 /* Coprocessor data operations. */
11642 if (!op1_sbit && !op)
11643 return arm_record_unsupported_insn (arm_insn_r);
11644
11645 /* Move to Coprocessor from ARM core register. */
11646 if (!op1_sbit && !op1_ebit && op)
11647 return arm_record_unsupported_insn (arm_insn_r);
11648
11649 /* Move to arm core register from coprocessor. */
11650 if (!op1_sbit && op1_ebit && op)
11651 {
11652 uint32_t record_buf[1];
11653
11654 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11655 if (record_buf[0] == 15)
11656 record_buf[0] = ARM_PS_REGNUM;
11657
11658 arm_insn_r->reg_rec_count = 1;
11659 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11660 record_buf);
11661 return 0;
11662 }
11663 }
11664
11665 return arm_record_unsupported_insn (arm_insn_r);
11666 }
11667
11668 /* Handling opcode 000 insns. */
11669
11670 static int
11671 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11672 {
11673 uint32_t record_buf[8];
11674 uint32_t reg_src1 = 0;
11675
11676 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11677
11678 record_buf[0] = ARM_PS_REGNUM;
11679 record_buf[1] = reg_src1;
11680 thumb_insn_r->reg_rec_count = 2;
11681
11682 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11683
11684 return 0;
11685 }
11686
11687
11688 /* Handling opcode 001 insns. */
11689
11690 static int
11691 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11692 {
11693 uint32_t record_buf[8];
11694 uint32_t reg_src1 = 0;
11695
11696 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11697
11698 record_buf[0] = ARM_PS_REGNUM;
11699 record_buf[1] = reg_src1;
11700 thumb_insn_r->reg_rec_count = 2;
11701
11702 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11703
11704 return 0;
11705 }
11706
11707 /* Handling opcode 010 insns. */
11708
11709 static int
11710 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11711 {
11712 struct regcache *reg_cache = thumb_insn_r->regcache;
11713 uint32_t record_buf[8], record_buf_mem[8];
11714
11715 uint32_t reg_src1 = 0, reg_src2 = 0;
11716 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11717
11718 ULONGEST u_regval[2] = {0};
11719
11720 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11721
11722 if (bit (thumb_insn_r->arm_insn, 12))
11723 {
11724 /* Handle load/store register offset. */
11725 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11726 if (opcode2 >= 12 && opcode2 <= 15)
11727 {
11728 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11729 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11730 record_buf[0] = reg_src1;
11731 thumb_insn_r->reg_rec_count = 1;
11732 }
11733 else if (opcode2 >= 8 && opcode2 <= 10)
11734 {
11735 /* STR(2), STRB(2), STRH(2) . */
11736 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11737 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11738 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11739 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11740 if (8 == opcode2)
11741 record_buf_mem[0] = 4; /* STR (2). */
11742 else if (10 == opcode2)
11743 record_buf_mem[0] = 1; /* STRB (2). */
11744 else if (9 == opcode2)
11745 record_buf_mem[0] = 2; /* STRH (2). */
11746 record_buf_mem[1] = u_regval[0] + u_regval[1];
11747 thumb_insn_r->mem_rec_count = 1;
11748 }
11749 }
11750 else if (bit (thumb_insn_r->arm_insn, 11))
11751 {
11752 /* Handle load from literal pool. */
11753 /* LDR(3). */
11754 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11755 record_buf[0] = reg_src1;
11756 thumb_insn_r->reg_rec_count = 1;
11757 }
11758 else if (opcode1)
11759 {
11760 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11761 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11762 if ((3 == opcode2) && (!opcode3))
11763 {
11764 /* Branch with exchange. */
11765 record_buf[0] = ARM_PS_REGNUM;
11766 thumb_insn_r->reg_rec_count = 1;
11767 }
11768 else
11769 {
11770 /* Format 8; special data processing insns. */
11771 record_buf[0] = ARM_PS_REGNUM;
11772 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11773 | bits (thumb_insn_r->arm_insn, 0, 2));
11774 thumb_insn_r->reg_rec_count = 2;
11775 }
11776 }
11777 else
11778 {
11779 /* Format 5; data processing insns. */
11780 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11781 if (bit (thumb_insn_r->arm_insn, 7))
11782 {
11783 reg_src1 = reg_src1 + 8;
11784 }
11785 record_buf[0] = ARM_PS_REGNUM;
11786 record_buf[1] = reg_src1;
11787 thumb_insn_r->reg_rec_count = 2;
11788 }
11789
11790 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11791 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11792 record_buf_mem);
11793
11794 return 0;
11795 }
11796
11797 /* Handling opcode 001 insns. */
11798
11799 static int
11800 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11801 {
11802 struct regcache *reg_cache = thumb_insn_r->regcache;
11803 uint32_t record_buf[8], record_buf_mem[8];
11804
11805 uint32_t reg_src1 = 0;
11806 uint32_t opcode = 0, immed_5 = 0;
11807
11808 ULONGEST u_regval = 0;
11809
11810 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11811
11812 if (opcode)
11813 {
11814 /* LDR(1). */
11815 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11816 record_buf[0] = reg_src1;
11817 thumb_insn_r->reg_rec_count = 1;
11818 }
11819 else
11820 {
11821 /* STR(1). */
11822 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11823 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11824 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11825 record_buf_mem[0] = 4;
11826 record_buf_mem[1] = u_regval + (immed_5 * 4);
11827 thumb_insn_r->mem_rec_count = 1;
11828 }
11829
11830 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11831 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11832 record_buf_mem);
11833
11834 return 0;
11835 }
11836
11837 /* Handling opcode 100 insns. */
11838
11839 static int
11840 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11841 {
11842 struct regcache *reg_cache = thumb_insn_r->regcache;
11843 uint32_t record_buf[8], record_buf_mem[8];
11844
11845 uint32_t reg_src1 = 0;
11846 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11847
11848 ULONGEST u_regval = 0;
11849
11850 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11851
11852 if (3 == opcode)
11853 {
11854 /* LDR(4). */
11855 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11856 record_buf[0] = reg_src1;
11857 thumb_insn_r->reg_rec_count = 1;
11858 }
11859 else if (1 == opcode)
11860 {
11861 /* LDRH(1). */
11862 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11863 record_buf[0] = reg_src1;
11864 thumb_insn_r->reg_rec_count = 1;
11865 }
11866 else if (2 == opcode)
11867 {
11868 /* STR(3). */
11869 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11870 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11871 record_buf_mem[0] = 4;
11872 record_buf_mem[1] = u_regval + (immed_8 * 4);
11873 thumb_insn_r->mem_rec_count = 1;
11874 }
11875 else if (0 == opcode)
11876 {
11877 /* STRH(1). */
11878 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11879 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11880 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11881 record_buf_mem[0] = 2;
11882 record_buf_mem[1] = u_regval + (immed_5 * 2);
11883 thumb_insn_r->mem_rec_count = 1;
11884 }
11885
11886 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11887 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11888 record_buf_mem);
11889
11890 return 0;
11891 }
11892
11893 /* Handling opcode 101 insns. */
11894
11895 static int
11896 thumb_record_misc (insn_decode_record *thumb_insn_r)
11897 {
11898 struct regcache *reg_cache = thumb_insn_r->regcache;
11899
11900 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
11901 uint32_t register_bits = 0, register_count = 0;
11902 uint32_t index = 0, start_address = 0;
11903 uint32_t record_buf[24], record_buf_mem[48];
11904 uint32_t reg_src1;
11905
11906 ULONGEST u_regval = 0;
11907
11908 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11909 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11910 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
11911
11912 if (14 == opcode2)
11913 {
11914 /* POP. */
11915 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11916 while (register_bits)
11917 {
11918 if (register_bits & 0x00000001)
11919 record_buf[index++] = register_count;
11920 register_bits = register_bits >> 1;
11921 register_count++;
11922 }
11923 record_buf[index++] = ARM_PS_REGNUM;
11924 record_buf[index++] = ARM_SP_REGNUM;
11925 thumb_insn_r->reg_rec_count = index;
11926 }
11927 else if (10 == opcode2)
11928 {
11929 /* PUSH. */
11930 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11931 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11932 while (register_bits)
11933 {
11934 if (register_bits & 0x00000001)
11935 register_count++;
11936 register_bits = register_bits >> 1;
11937 }
11938 start_address = u_regval - \
11939 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11940 thumb_insn_r->mem_rec_count = register_count;
11941 while (register_count)
11942 {
11943 record_buf_mem[(register_count * 2) - 1] = start_address;
11944 record_buf_mem[(register_count * 2) - 2] = 4;
11945 start_address = start_address + 4;
11946 register_count--;
11947 }
11948 record_buf[0] = ARM_SP_REGNUM;
11949 thumb_insn_r->reg_rec_count = 1;
11950 }
11951 else if (0x1E == opcode1)
11952 {
11953 /* BKPT insn. */
11954 /* Handle enhanced software breakpoint insn, BKPT. */
11955 /* CPSR is changed to be executed in ARM state, disabling normal
11956 interrupts, entering abort mode. */
11957 /* According to high vector configuration PC is set. */
11958 /* User hits breakpoint and type reverse, in that case, we need to go back with
11959 previous CPSR and Program Counter. */
11960 record_buf[0] = ARM_PS_REGNUM;
11961 record_buf[1] = ARM_LR_REGNUM;
11962 thumb_insn_r->reg_rec_count = 2;
11963 /* We need to save SPSR value, which is not yet done. */
11964 printf_unfiltered (_("Process record does not support instruction "
11965 "0x%0x at address %s.\n"),
11966 thumb_insn_r->arm_insn,
11967 paddress (thumb_insn_r->gdbarch,
11968 thumb_insn_r->this_addr));
11969 return -1;
11970 }
11971 else if ((0 == opcode) || (1 == opcode))
11972 {
11973 /* ADD(5), ADD(6). */
11974 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11975 record_buf[0] = reg_src1;
11976 thumb_insn_r->reg_rec_count = 1;
11977 }
11978 else if (2 == opcode)
11979 {
11980 /* ADD(7), SUB(4). */
11981 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11982 record_buf[0] = ARM_SP_REGNUM;
11983 thumb_insn_r->reg_rec_count = 1;
11984 }
11985
11986 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11987 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11988 record_buf_mem);
11989
11990 return 0;
11991 }
11992
11993 /* Handling opcode 110 insns. */
11994
11995 static int
11996 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
11997 {
11998 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
11999 struct regcache *reg_cache = thumb_insn_r->regcache;
12000
12001 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12002 uint32_t reg_src1 = 0;
12003 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12004 uint32_t index = 0, start_address = 0;
12005 uint32_t record_buf[24], record_buf_mem[48];
12006
12007 ULONGEST u_regval = 0;
12008
12009 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12010 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12011
12012 if (1 == opcode2)
12013 {
12014
12015 /* LDMIA. */
12016 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12017 /* Get Rn. */
12018 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12019 while (register_bits)
12020 {
12021 if (register_bits & 0x00000001)
12022 record_buf[index++] = register_count;
12023 register_bits = register_bits >> 1;
12024 register_count++;
12025 }
12026 record_buf[index++] = reg_src1;
12027 thumb_insn_r->reg_rec_count = index;
12028 }
12029 else if (0 == opcode2)
12030 {
12031 /* It handles both STMIA. */
12032 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12033 /* Get Rn. */
12034 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12035 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12036 while (register_bits)
12037 {
12038 if (register_bits & 0x00000001)
12039 register_count++;
12040 register_bits = register_bits >> 1;
12041 }
12042 start_address = u_regval;
12043 thumb_insn_r->mem_rec_count = register_count;
12044 while (register_count)
12045 {
12046 record_buf_mem[(register_count * 2) - 1] = start_address;
12047 record_buf_mem[(register_count * 2) - 2] = 4;
12048 start_address = start_address + 4;
12049 register_count--;
12050 }
12051 }
12052 else if (0x1F == opcode1)
12053 {
12054 /* Handle arm syscall insn. */
12055 if (tdep->arm_syscall_record != NULL)
12056 {
12057 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12058 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12059 }
12060 else
12061 {
12062 printf_unfiltered (_("no syscall record support\n"));
12063 return -1;
12064 }
12065 }
12066
12067 /* B (1), conditional branch is automatically taken care in process_record,
12068 as PC is saved there. */
12069
12070 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12071 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12072 record_buf_mem);
12073
12074 return ret;
12075 }
12076
12077 /* Handling opcode 111 insns. */
12078
12079 static int
12080 thumb_record_branch (insn_decode_record *thumb_insn_r)
12081 {
12082 uint32_t record_buf[8];
12083 uint32_t bits_h = 0;
12084
12085 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12086
12087 if (2 == bits_h || 3 == bits_h)
12088 {
12089 /* BL */
12090 record_buf[0] = ARM_LR_REGNUM;
12091 thumb_insn_r->reg_rec_count = 1;
12092 }
12093 else if (1 == bits_h)
12094 {
12095 /* BLX(1). */
12096 record_buf[0] = ARM_PS_REGNUM;
12097 record_buf[1] = ARM_LR_REGNUM;
12098 thumb_insn_r->reg_rec_count = 2;
12099 }
12100
12101 /* B(2) is automatically taken care in process_record, as PC is
12102 saved there. */
12103
12104 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12105
12106 return 0;
12107 }
12108
12109 /* Handler for thumb2 load/store multiple instructions. */
12110
12111 static int
12112 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12113 {
12114 struct regcache *reg_cache = thumb2_insn_r->regcache;
12115
12116 uint32_t reg_rn, op;
12117 uint32_t register_bits = 0, register_count = 0;
12118 uint32_t index = 0, start_address = 0;
12119 uint32_t record_buf[24], record_buf_mem[48];
12120
12121 ULONGEST u_regval = 0;
12122
12123 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12124 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12125
12126 if (0 == op || 3 == op)
12127 {
12128 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12129 {
12130 /* Handle RFE instruction. */
12131 record_buf[0] = ARM_PS_REGNUM;
12132 thumb2_insn_r->reg_rec_count = 1;
12133 }
12134 else
12135 {
12136 /* Handle SRS instruction after reading banked SP. */
12137 return arm_record_unsupported_insn (thumb2_insn_r);
12138 }
12139 }
12140 else if (1 == op || 2 == op)
12141 {
12142 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12143 {
12144 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12145 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12146 while (register_bits)
12147 {
12148 if (register_bits & 0x00000001)
12149 record_buf[index++] = register_count;
12150
12151 register_count++;
12152 register_bits = register_bits >> 1;
12153 }
12154 record_buf[index++] = reg_rn;
12155 record_buf[index++] = ARM_PS_REGNUM;
12156 thumb2_insn_r->reg_rec_count = index;
12157 }
12158 else
12159 {
12160 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12161 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12162 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12163 while (register_bits)
12164 {
12165 if (register_bits & 0x00000001)
12166 register_count++;
12167
12168 register_bits = register_bits >> 1;
12169 }
12170
12171 if (1 == op)
12172 {
12173 /* Start address calculation for LDMDB/LDMEA. */
12174 start_address = u_regval;
12175 }
12176 else if (2 == op)
12177 {
12178 /* Start address calculation for LDMDB/LDMEA. */
12179 start_address = u_regval - register_count * 4;
12180 }
12181
12182 thumb2_insn_r->mem_rec_count = register_count;
12183 while (register_count)
12184 {
12185 record_buf_mem[register_count * 2 - 1] = start_address;
12186 record_buf_mem[register_count * 2 - 2] = 4;
12187 start_address = start_address + 4;
12188 register_count--;
12189 }
12190 record_buf[0] = reg_rn;
12191 record_buf[1] = ARM_PS_REGNUM;
12192 thumb2_insn_r->reg_rec_count = 2;
12193 }
12194 }
12195
12196 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12197 record_buf_mem);
12198 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12199 record_buf);
12200 return ARM_RECORD_SUCCESS;
12201 }
12202
12203 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12204 instructions. */
12205
12206 static int
12207 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12208 {
12209 struct regcache *reg_cache = thumb2_insn_r->regcache;
12210
12211 uint32_t reg_rd, reg_rn, offset_imm;
12212 uint32_t reg_dest1, reg_dest2;
12213 uint32_t address, offset_addr;
12214 uint32_t record_buf[8], record_buf_mem[8];
12215 uint32_t op1, op2, op3;
12216
12217 ULONGEST u_regval[2];
12218
12219 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12220 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12221 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12222
12223 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12224 {
12225 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12226 {
12227 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12228 record_buf[0] = reg_dest1;
12229 record_buf[1] = ARM_PS_REGNUM;
12230 thumb2_insn_r->reg_rec_count = 2;
12231 }
12232
12233 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12234 {
12235 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12236 record_buf[2] = reg_dest2;
12237 thumb2_insn_r->reg_rec_count = 3;
12238 }
12239 }
12240 else
12241 {
12242 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12243 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12244
12245 if (0 == op1 && 0 == op2)
12246 {
12247 /* Handle STREX. */
12248 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12249 address = u_regval[0] + (offset_imm * 4);
12250 record_buf_mem[0] = 4;
12251 record_buf_mem[1] = address;
12252 thumb2_insn_r->mem_rec_count = 1;
12253 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12254 record_buf[0] = reg_rd;
12255 thumb2_insn_r->reg_rec_count = 1;
12256 }
12257 else if (1 == op1 && 0 == op2)
12258 {
12259 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12260 record_buf[0] = reg_rd;
12261 thumb2_insn_r->reg_rec_count = 1;
12262 address = u_regval[0];
12263 record_buf_mem[1] = address;
12264
12265 if (4 == op3)
12266 {
12267 /* Handle STREXB. */
12268 record_buf_mem[0] = 1;
12269 thumb2_insn_r->mem_rec_count = 1;
12270 }
12271 else if (5 == op3)
12272 {
12273 /* Handle STREXH. */
12274 record_buf_mem[0] = 2 ;
12275 thumb2_insn_r->mem_rec_count = 1;
12276 }
12277 else if (7 == op3)
12278 {
12279 /* Handle STREXD. */
12280 address = u_regval[0];
12281 record_buf_mem[0] = 4;
12282 record_buf_mem[2] = 4;
12283 record_buf_mem[3] = address + 4;
12284 thumb2_insn_r->mem_rec_count = 2;
12285 }
12286 }
12287 else
12288 {
12289 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12290
12291 if (bit (thumb2_insn_r->arm_insn, 24))
12292 {
12293 if (bit (thumb2_insn_r->arm_insn, 23))
12294 offset_addr = u_regval[0] + (offset_imm * 4);
12295 else
12296 offset_addr = u_regval[0] - (offset_imm * 4);
12297
12298 address = offset_addr;
12299 }
12300 else
12301 address = u_regval[0];
12302
12303 record_buf_mem[0] = 4;
12304 record_buf_mem[1] = address;
12305 record_buf_mem[2] = 4;
12306 record_buf_mem[3] = address + 4;
12307 thumb2_insn_r->mem_rec_count = 2;
12308 record_buf[0] = reg_rn;
12309 thumb2_insn_r->reg_rec_count = 1;
12310 }
12311 }
12312
12313 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12314 record_buf);
12315 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12316 record_buf_mem);
12317 return ARM_RECORD_SUCCESS;
12318 }
12319
12320 /* Handler for thumb2 data processing (shift register and modified immediate)
12321 instructions. */
12322
12323 static int
12324 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12325 {
12326 uint32_t reg_rd, op;
12327 uint32_t record_buf[8];
12328
12329 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12330 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12331
12332 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12333 {
12334 record_buf[0] = ARM_PS_REGNUM;
12335 thumb2_insn_r->reg_rec_count = 1;
12336 }
12337 else
12338 {
12339 record_buf[0] = reg_rd;
12340 record_buf[1] = ARM_PS_REGNUM;
12341 thumb2_insn_r->reg_rec_count = 2;
12342 }
12343
12344 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12345 record_buf);
12346 return ARM_RECORD_SUCCESS;
12347 }
12348
12349 /* Generic handler for thumb2 instructions which effect destination and PS
12350 registers. */
12351
12352 static int
12353 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12354 {
12355 uint32_t reg_rd;
12356 uint32_t record_buf[8];
12357
12358 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12359
12360 record_buf[0] = reg_rd;
12361 record_buf[1] = ARM_PS_REGNUM;
12362 thumb2_insn_r->reg_rec_count = 2;
12363
12364 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12365 record_buf);
12366 return ARM_RECORD_SUCCESS;
12367 }
12368
12369 /* Handler for thumb2 branch and miscellaneous control instructions. */
12370
12371 static int
12372 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12373 {
12374 uint32_t op, op1, op2;
12375 uint32_t record_buf[8];
12376
12377 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12378 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12379 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12380
12381 /* Handle MSR insn. */
12382 if (!(op1 & 0x2) && 0x38 == op)
12383 {
12384 if (!(op2 & 0x3))
12385 {
12386 /* CPSR is going to be changed. */
12387 record_buf[0] = ARM_PS_REGNUM;
12388 thumb2_insn_r->reg_rec_count = 1;
12389 }
12390 else
12391 {
12392 arm_record_unsupported_insn(thumb2_insn_r);
12393 return -1;
12394 }
12395 }
12396 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12397 {
12398 /* BLX. */
12399 record_buf[0] = ARM_PS_REGNUM;
12400 record_buf[1] = ARM_LR_REGNUM;
12401 thumb2_insn_r->reg_rec_count = 2;
12402 }
12403
12404 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12405 record_buf);
12406 return ARM_RECORD_SUCCESS;
12407 }
12408
12409 /* Handler for thumb2 store single data item instructions. */
12410
12411 static int
12412 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12413 {
12414 struct regcache *reg_cache = thumb2_insn_r->regcache;
12415
12416 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12417 uint32_t address, offset_addr;
12418 uint32_t record_buf[8], record_buf_mem[8];
12419 uint32_t op1, op2;
12420
12421 ULONGEST u_regval[2];
12422
12423 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12424 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12425 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12426 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12427
12428 if (bit (thumb2_insn_r->arm_insn, 23))
12429 {
12430 /* T2 encoding. */
12431 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12432 offset_addr = u_regval[0] + offset_imm;
12433 address = offset_addr;
12434 }
12435 else
12436 {
12437 /* T3 encoding. */
12438 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12439 {
12440 /* Handle STRB (register). */
12441 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12442 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12443 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12444 offset_addr = u_regval[1] << shift_imm;
12445 address = u_regval[0] + offset_addr;
12446 }
12447 else
12448 {
12449 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12450 if (bit (thumb2_insn_r->arm_insn, 10))
12451 {
12452 if (bit (thumb2_insn_r->arm_insn, 9))
12453 offset_addr = u_regval[0] + offset_imm;
12454 else
12455 offset_addr = u_regval[0] - offset_imm;
12456
12457 address = offset_addr;
12458 }
12459 else
12460 address = u_regval[0];
12461 }
12462 }
12463
12464 switch (op1)
12465 {
12466 /* Store byte instructions. */
12467 case 4:
12468 case 0:
12469 record_buf_mem[0] = 1;
12470 break;
12471 /* Store half word instructions. */
12472 case 1:
12473 case 5:
12474 record_buf_mem[0] = 2;
12475 break;
12476 /* Store word instructions. */
12477 case 2:
12478 case 6:
12479 record_buf_mem[0] = 4;
12480 break;
12481
12482 default:
12483 gdb_assert_not_reached ("no decoding pattern found");
12484 break;
12485 }
12486
12487 record_buf_mem[1] = address;
12488 thumb2_insn_r->mem_rec_count = 1;
12489 record_buf[0] = reg_rn;
12490 thumb2_insn_r->reg_rec_count = 1;
12491
12492 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12493 record_buf);
12494 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12495 record_buf_mem);
12496 return ARM_RECORD_SUCCESS;
12497 }
12498
12499 /* Handler for thumb2 load memory hints instructions. */
12500
12501 static int
12502 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12503 {
12504 uint32_t record_buf[8];
12505 uint32_t reg_rt, reg_rn;
12506
12507 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12508 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12509
12510 if (ARM_PC_REGNUM != reg_rt)
12511 {
12512 record_buf[0] = reg_rt;
12513 record_buf[1] = reg_rn;
12514 record_buf[2] = ARM_PS_REGNUM;
12515 thumb2_insn_r->reg_rec_count = 3;
12516
12517 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12518 record_buf);
12519 return ARM_RECORD_SUCCESS;
12520 }
12521
12522 return ARM_RECORD_FAILURE;
12523 }
12524
12525 /* Handler for thumb2 load word instructions. */
12526
12527 static int
12528 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12529 {
12530 uint32_t record_buf[8];
12531
12532 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12533 record_buf[1] = ARM_PS_REGNUM;
12534 thumb2_insn_r->reg_rec_count = 2;
12535
12536 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12537 record_buf);
12538 return ARM_RECORD_SUCCESS;
12539 }
12540
12541 /* Handler for thumb2 long multiply, long multiply accumulate, and
12542 divide instructions. */
12543
12544 static int
12545 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12546 {
12547 uint32_t opcode1 = 0, opcode2 = 0;
12548 uint32_t record_buf[8];
12549
12550 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12551 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12552
12553 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12554 {
12555 /* Handle SMULL, UMULL, SMULAL. */
12556 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12557 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12558 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12559 record_buf[2] = ARM_PS_REGNUM;
12560 thumb2_insn_r->reg_rec_count = 3;
12561 }
12562 else if (1 == opcode1 || 3 == opcode2)
12563 {
12564 /* Handle SDIV and UDIV. */
12565 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12566 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12567 record_buf[2] = ARM_PS_REGNUM;
12568 thumb2_insn_r->reg_rec_count = 3;
12569 }
12570 else
12571 return ARM_RECORD_FAILURE;
12572
12573 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12574 record_buf);
12575 return ARM_RECORD_SUCCESS;
12576 }
12577
12578 /* Record handler for thumb32 coprocessor instructions. */
12579
12580 static int
12581 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12582 {
12583 if (bit (thumb2_insn_r->arm_insn, 25))
12584 return arm_record_coproc_data_proc (thumb2_insn_r);
12585 else
12586 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12587 }
12588
12589 /* Record handler for advance SIMD structure load/store instructions. */
12590
12591 static int
12592 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12593 {
12594 struct regcache *reg_cache = thumb2_insn_r->regcache;
12595 uint32_t l_bit, a_bit, b_bits;
12596 uint32_t record_buf[128], record_buf_mem[128];
12597 uint32_t reg_rn, reg_vd, address, f_elem;
12598 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12599 uint8_t f_ebytes;
12600
12601 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12602 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12603 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12604 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12605 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12606 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12607 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12608 f_elem = 8 / f_ebytes;
12609
12610 if (!l_bit)
12611 {
12612 ULONGEST u_regval = 0;
12613 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12614 address = u_regval;
12615
12616 if (!a_bit)
12617 {
12618 /* Handle VST1. */
12619 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12620 {
12621 if (b_bits == 0x07)
12622 bf_regs = 1;
12623 else if (b_bits == 0x0a)
12624 bf_regs = 2;
12625 else if (b_bits == 0x06)
12626 bf_regs = 3;
12627 else if (b_bits == 0x02)
12628 bf_regs = 4;
12629 else
12630 bf_regs = 0;
12631
12632 for (index_r = 0; index_r < bf_regs; index_r++)
12633 {
12634 for (index_e = 0; index_e < f_elem; index_e++)
12635 {
12636 record_buf_mem[index_m++] = f_ebytes;
12637 record_buf_mem[index_m++] = address;
12638 address = address + f_ebytes;
12639 thumb2_insn_r->mem_rec_count += 1;
12640 }
12641 }
12642 }
12643 /* Handle VST2. */
12644 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12645 {
12646 if (b_bits == 0x09 || b_bits == 0x08)
12647 bf_regs = 1;
12648 else if (b_bits == 0x03)
12649 bf_regs = 2;
12650 else
12651 bf_regs = 0;
12652
12653 for (index_r = 0; index_r < bf_regs; index_r++)
12654 for (index_e = 0; index_e < f_elem; index_e++)
12655 {
12656 for (loop_t = 0; loop_t < 2; loop_t++)
12657 {
12658 record_buf_mem[index_m++] = f_ebytes;
12659 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12660 thumb2_insn_r->mem_rec_count += 1;
12661 }
12662 address = address + (2 * f_ebytes);
12663 }
12664 }
12665 /* Handle VST3. */
12666 else if ((b_bits & 0x0e) == 0x04)
12667 {
12668 for (index_e = 0; index_e < f_elem; index_e++)
12669 {
12670 for (loop_t = 0; loop_t < 3; loop_t++)
12671 {
12672 record_buf_mem[index_m++] = f_ebytes;
12673 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12674 thumb2_insn_r->mem_rec_count += 1;
12675 }
12676 address = address + (3 * f_ebytes);
12677 }
12678 }
12679 /* Handle VST4. */
12680 else if (!(b_bits & 0x0e))
12681 {
12682 for (index_e = 0; index_e < f_elem; index_e++)
12683 {
12684 for (loop_t = 0; loop_t < 4; loop_t++)
12685 {
12686 record_buf_mem[index_m++] = f_ebytes;
12687 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12688 thumb2_insn_r->mem_rec_count += 1;
12689 }
12690 address = address + (4 * f_ebytes);
12691 }
12692 }
12693 }
12694 else
12695 {
12696 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12697
12698 if (bft_size == 0x00)
12699 f_ebytes = 1;
12700 else if (bft_size == 0x01)
12701 f_ebytes = 2;
12702 else if (bft_size == 0x02)
12703 f_ebytes = 4;
12704 else
12705 f_ebytes = 0;
12706
12707 /* Handle VST1. */
12708 if (!(b_bits & 0x0b) || b_bits == 0x08)
12709 thumb2_insn_r->mem_rec_count = 1;
12710 /* Handle VST2. */
12711 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12712 thumb2_insn_r->mem_rec_count = 2;
12713 /* Handle VST3. */
12714 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12715 thumb2_insn_r->mem_rec_count = 3;
12716 /* Handle VST4. */
12717 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12718 thumb2_insn_r->mem_rec_count = 4;
12719
12720 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12721 {
12722 record_buf_mem[index_m] = f_ebytes;
12723 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12724 }
12725 }
12726 }
12727 else
12728 {
12729 if (!a_bit)
12730 {
12731 /* Handle VLD1. */
12732 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12733 thumb2_insn_r->reg_rec_count = 1;
12734 /* Handle VLD2. */
12735 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12736 thumb2_insn_r->reg_rec_count = 2;
12737 /* Handle VLD3. */
12738 else if ((b_bits & 0x0e) == 0x04)
12739 thumb2_insn_r->reg_rec_count = 3;
12740 /* Handle VLD4. */
12741 else if (!(b_bits & 0x0e))
12742 thumb2_insn_r->reg_rec_count = 4;
12743 }
12744 else
12745 {
12746 /* Handle VLD1. */
12747 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12748 thumb2_insn_r->reg_rec_count = 1;
12749 /* Handle VLD2. */
12750 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12751 thumb2_insn_r->reg_rec_count = 2;
12752 /* Handle VLD3. */
12753 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12754 thumb2_insn_r->reg_rec_count = 3;
12755 /* Handle VLD4. */
12756 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12757 thumb2_insn_r->reg_rec_count = 4;
12758
12759 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12760 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12761 }
12762 }
12763
12764 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12765 {
12766 record_buf[index_r] = reg_rn;
12767 thumb2_insn_r->reg_rec_count += 1;
12768 }
12769
12770 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12771 record_buf);
12772 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12773 record_buf_mem);
12774 return 0;
12775 }
12776
12777 /* Decodes thumb2 instruction type and invokes its record handler. */
12778
12779 static unsigned int
12780 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12781 {
12782 uint32_t op, op1, op2;
12783
12784 op = bit (thumb2_insn_r->arm_insn, 15);
12785 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12786 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12787
12788 if (op1 == 0x01)
12789 {
12790 if (!(op2 & 0x64 ))
12791 {
12792 /* Load/store multiple instruction. */
12793 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12794 }
12795 else if (!((op2 & 0x64) ^ 0x04))
12796 {
12797 /* Load/store (dual/exclusive) and table branch instruction. */
12798 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12799 }
12800 else if (!((op2 & 0x20) ^ 0x20))
12801 {
12802 /* Data-processing (shifted register). */
12803 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12804 }
12805 else if (op2 & 0x40)
12806 {
12807 /* Co-processor instructions. */
12808 return thumb2_record_coproc_insn (thumb2_insn_r);
12809 }
12810 }
12811 else if (op1 == 0x02)
12812 {
12813 if (op)
12814 {
12815 /* Branches and miscellaneous control instructions. */
12816 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12817 }
12818 else if (op2 & 0x20)
12819 {
12820 /* Data-processing (plain binary immediate) instruction. */
12821 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12822 }
12823 else
12824 {
12825 /* Data-processing (modified immediate). */
12826 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12827 }
12828 }
12829 else if (op1 == 0x03)
12830 {
12831 if (!(op2 & 0x71 ))
12832 {
12833 /* Store single data item. */
12834 return thumb2_record_str_single_data (thumb2_insn_r);
12835 }
12836 else if (!((op2 & 0x71) ^ 0x10))
12837 {
12838 /* Advanced SIMD or structure load/store instructions. */
12839 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12840 }
12841 else if (!((op2 & 0x67) ^ 0x01))
12842 {
12843 /* Load byte, memory hints instruction. */
12844 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12845 }
12846 else if (!((op2 & 0x67) ^ 0x03))
12847 {
12848 /* Load halfword, memory hints instruction. */
12849 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12850 }
12851 else if (!((op2 & 0x67) ^ 0x05))
12852 {
12853 /* Load word instruction. */
12854 return thumb2_record_ld_word (thumb2_insn_r);
12855 }
12856 else if (!((op2 & 0x70) ^ 0x20))
12857 {
12858 /* Data-processing (register) instruction. */
12859 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12860 }
12861 else if (!((op2 & 0x78) ^ 0x30))
12862 {
12863 /* Multiply, multiply accumulate, abs diff instruction. */
12864 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12865 }
12866 else if (!((op2 & 0x78) ^ 0x38))
12867 {
12868 /* Long multiply, long multiply accumulate, and divide. */
12869 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12870 }
12871 else if (op2 & 0x40)
12872 {
12873 /* Co-processor instructions. */
12874 return thumb2_record_coproc_insn (thumb2_insn_r);
12875 }
12876 }
12877
12878 return -1;
12879 }
12880
12881 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12882 and positive val on fauilure. */
12883
12884 static int
12885 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12886 {
12887 gdb_byte buf[insn_size];
12888
12889 memset (&buf[0], 0, insn_size);
12890
12891 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12892 return 1;
12893 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12894 insn_size,
12895 gdbarch_byte_order_for_code (insn_record->gdbarch));
12896 return 0;
12897 }
12898
12899 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12900
12901 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12902 dispatch it. */
12903
12904 static int
12905 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
12906 uint32_t insn_size)
12907 {
12908
12909 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12910 instruction. */
12911 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
12912 {
12913 arm_record_data_proc_misc_ld_str, /* 000. */
12914 arm_record_data_proc_imm, /* 001. */
12915 arm_record_ld_st_imm_offset, /* 010. */
12916 arm_record_ld_st_reg_offset, /* 011. */
12917 arm_record_ld_st_multiple, /* 100. */
12918 arm_record_b_bl, /* 101. */
12919 arm_record_asimd_vfp_coproc, /* 110. */
12920 arm_record_coproc_data_proc /* 111. */
12921 };
12922
12923 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12924 instruction. */
12925 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
12926 { \
12927 thumb_record_shift_add_sub, /* 000. */
12928 thumb_record_add_sub_cmp_mov, /* 001. */
12929 thumb_record_ld_st_reg_offset, /* 010. */
12930 thumb_record_ld_st_imm_offset, /* 011. */
12931 thumb_record_ld_st_stack, /* 100. */
12932 thumb_record_misc, /* 101. */
12933 thumb_record_ldm_stm_swi, /* 110. */
12934 thumb_record_branch /* 111. */
12935 };
12936
12937 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12938 uint32_t insn_id = 0;
12939
12940 if (extract_arm_insn (arm_record, insn_size))
12941 {
12942 if (record_debug)
12943 {
12944 printf_unfiltered (_("Process record: error reading memory at "
12945 "addr %s len = %d.\n"),
12946 paddress (arm_record->gdbarch,
12947 arm_record->this_addr), insn_size);
12948 }
12949 return -1;
12950 }
12951 else if (ARM_RECORD == record_type)
12952 {
12953 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12954 insn_id = bits (arm_record->arm_insn, 25, 27);
12955
12956 if (arm_record->cond == 0xf)
12957 ret = arm_record_extension_space (arm_record);
12958 else
12959 {
12960 /* If this insn has fallen into extension space
12961 then we need not decode it anymore. */
12962 ret = arm_handle_insn[insn_id] (arm_record);
12963 }
12964 if (ret != ARM_RECORD_SUCCESS)
12965 {
12966 arm_record_unsupported_insn (arm_record);
12967 ret = -1;
12968 }
12969 }
12970 else if (THUMB_RECORD == record_type)
12971 {
12972 /* As thumb does not have condition codes, we set negative. */
12973 arm_record->cond = -1;
12974 insn_id = bits (arm_record->arm_insn, 13, 15);
12975 ret = thumb_handle_insn[insn_id] (arm_record);
12976 if (ret != ARM_RECORD_SUCCESS)
12977 {
12978 arm_record_unsupported_insn (arm_record);
12979 ret = -1;
12980 }
12981 }
12982 else if (THUMB2_RECORD == record_type)
12983 {
12984 /* As thumb does not have condition codes, we set negative. */
12985 arm_record->cond = -1;
12986
12987 /* Swap first half of 32bit thumb instruction with second half. */
12988 arm_record->arm_insn
12989 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
12990
12991 ret = thumb2_record_decode_insn_handler (arm_record);
12992
12993 if (ret != ARM_RECORD_SUCCESS)
12994 {
12995 arm_record_unsupported_insn (arm_record);
12996 ret = -1;
12997 }
12998 }
12999 else
13000 {
13001 /* Throw assertion. */
13002 gdb_assert_not_reached ("not a valid instruction, could not decode");
13003 }
13004
13005 return ret;
13006 }
13007
13008
13009 /* Cleans up local record registers and memory allocations. */
13010
13011 static void
13012 deallocate_reg_mem (insn_decode_record *record)
13013 {
13014 xfree (record->arm_regs);
13015 xfree (record->arm_mems);
13016 }
13017
13018
13019 /* Parse the current instruction and record the values of the registers and
13020 memory that will be changed in current instruction to record_arch_list".
13021 Return -1 if something is wrong. */
13022
13023 int
13024 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13025 CORE_ADDR insn_addr)
13026 {
13027
13028 uint32_t no_of_rec = 0;
13029 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13030 ULONGEST t_bit = 0, insn_id = 0;
13031
13032 ULONGEST u_regval = 0;
13033
13034 insn_decode_record arm_record;
13035
13036 memset (&arm_record, 0, sizeof (insn_decode_record));
13037 arm_record.regcache = regcache;
13038 arm_record.this_addr = insn_addr;
13039 arm_record.gdbarch = gdbarch;
13040
13041
13042 if (record_debug > 1)
13043 {
13044 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13045 "addr = %s\n",
13046 paddress (gdbarch, arm_record.this_addr));
13047 }
13048
13049 if (extract_arm_insn (&arm_record, 2))
13050 {
13051 if (record_debug)
13052 {
13053 printf_unfiltered (_("Process record: error reading memory at "
13054 "addr %s len = %d.\n"),
13055 paddress (arm_record.gdbarch,
13056 arm_record.this_addr), 2);
13057 }
13058 return -1;
13059 }
13060
13061 /* Check the insn, whether it is thumb or arm one. */
13062
13063 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13064 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13065
13066
13067 if (!(u_regval & t_bit))
13068 {
13069 /* We are decoding arm insn. */
13070 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13071 }
13072 else
13073 {
13074 insn_id = bits (arm_record.arm_insn, 11, 15);
13075 /* is it thumb2 insn? */
13076 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13077 {
13078 ret = decode_insn (&arm_record, THUMB2_RECORD,
13079 THUMB2_INSN_SIZE_BYTES);
13080 }
13081 else
13082 {
13083 /* We are decoding thumb insn. */
13084 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13085 }
13086 }
13087
13088 if (0 == ret)
13089 {
13090 /* Record registers. */
13091 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13092 if (arm_record.arm_regs)
13093 {
13094 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13095 {
13096 if (record_full_arch_list_add_reg
13097 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13098 ret = -1;
13099 }
13100 }
13101 /* Record memories. */
13102 if (arm_record.arm_mems)
13103 {
13104 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13105 {
13106 if (record_full_arch_list_add_mem
13107 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13108 arm_record.arm_mems[no_of_rec].len))
13109 ret = -1;
13110 }
13111 }
13112
13113 if (record_full_arch_list_add_end ())
13114 ret = -1;
13115 }
13116
13117
13118 deallocate_reg_mem (&arm_record);
13119
13120 return ret;
13121 }